summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authormo khan <mo@mokhan.ca>2025-07-14 16:29:33 -0600
committermo khan <mo@mokhan.ca>2025-07-14 16:29:33 -0600
commit0432cfbbb07f234dd2cd294cfe7dfa065b113182 (patch)
treecab9f759b7d656dab92eab48694e5924c54b9644
parent5a74d3988d8a029f1c879da709db623611aa545a (diff)
parente0b38f6ca22b28a0c4fe4192d642fceb48030737 (diff)
Merge branch 'the-spice-must-flow' into 'main'
Add SpiceDB Integration with Service-based Routing See merge request gitlab-org/software-supply-chain-security/authorization/authzd!9
-rw-r--r--.runway/runway.yml4
-rw-r--r--Cargo.lock39
-rw-r--r--Cargo.toml2
-rw-r--r--Dockerfile4
-rw-r--r--Makefile14
-rw-r--r--Procfile3
-rw-r--r--README.md3
-rwxr-xr-xbin/envoy5
-rwxr-xr-xbin/spicedb8
-rw-r--r--etc/authzd/policy1.cedar7
-rw-r--r--etc/authzd/spice.schema7
-rw-r--r--etc/envoy/envoy.yaml42
-rw-r--r--share/man/cedar/README.md125
-rw-r--r--share/man/spicedb/README.md152
-rw-r--r--src/authorization/entities.rs8
-rw-r--r--src/authorization/server.rs16
-rw-r--r--src/bin/cli.rs2
-rw-r--r--tests/authorization/cedar_authorizer_test.rs18
-rw-r--r--tests/authorization/server_test.rs18
-rw-r--r--vendor/prost-types-0.12.6/.cargo-checksum.json1
-rw-r--r--vendor/prost-types-0.12.6/Cargo.toml42
-rw-r--r--vendor/prost-types-0.12.6/LICENSE201
-rw-r--r--vendor/prost-types-0.12.6/README.md21
-rw-r--r--vendor/prost-types-0.12.6/src/any.rs69
-rw-r--r--vendor/prost-types-0.12.6/src/compiler.rs174
-rw-r--r--vendor/prost-types-0.12.6/src/datetime.rs864
-rw-r--r--vendor/prost-types-0.12.6/src/duration.rs333
-rw-r--r--vendor/prost-types-0.12.6/src/lib.rs55
-rw-r--r--vendor/prost-types-0.12.6/src/protobuf.rs2309
-rw-r--r--vendor/prost-types-0.12.6/src/timestamp.rs416
-rw-r--r--vendor/prost-types-0.12.6/src/type_url.rs70
-rw-r--r--vendor/prost-types/.cargo-checksum.json2
-rw-r--r--vendor/prost-types/Cargo.lock471
-rw-r--r--vendor/prost-types/Cargo.toml32
-rw-r--r--vendor/prost-types/README.md2
-rw-r--r--vendor/prost-types/src/compiler.rs13
-rw-r--r--vendor/prost-types/src/conversions.rs62
-rw-r--r--vendor/prost-types/src/datetime.rs205
-rw-r--r--vendor/prost-types/src/duration.rs228
-rw-r--r--vendor/prost-types/src/lib.rs33
-rw-r--r--vendor/prost-types/src/protobuf.rs224
-rw-r--r--vendor/prost-types/src/timestamp.rs81
-rw-r--r--vendor/tonic-health/.cargo-checksum.json1
-rw-r--r--vendor/tonic-health/Cargo.lock474
-rw-r--r--vendor/tonic-health/Cargo.toml97
-rw-r--r--vendor/tonic-health/LICENSE19
-rw-r--r--vendor/tonic-health/README.md14
-rw-r--r--vendor/tonic-health/proto/health.proto63
-rw-r--r--vendor/tonic-health/src/generated/grpc_health_v1.rs459
-rw-r--r--vendor/tonic-health/src/generated/grpc_health_v1_fds.rs63
-rw-r--r--vendor/tonic-health/src/lib.rs76
-rw-r--r--vendor/tonic-health/src/server.rs353
-rw-r--r--vendor/tonic-reflection/.cargo-checksum.json1
-rw-r--r--vendor/tonic-reflection/Cargo.lock728
-rw-r--r--vendor/tonic-reflection/Cargo.toml128
-rw-r--r--vendor/tonic-reflection/LICENSE19
-rw-r--r--vendor/tonic-reflection/README.md3
-rw-r--r--vendor/tonic-reflection/proto/reflection_v1.proto147
-rw-r--r--vendor/tonic-reflection/proto/reflection_v1alpha.proto136
-rw-r--r--vendor/tonic-reflection/src/generated/grpc_reflection_v1.rs461
-rw-r--r--vendor/tonic-reflection/src/generated/grpc_reflection_v1alpha.rs461
-rw-r--r--vendor/tonic-reflection/src/generated/reflection_v1_fds.rs161
-rw-r--r--vendor/tonic-reflection/src/generated/reflection_v1alpha1_fds.rs153
-rw-r--r--vendor/tonic-reflection/src/lib.rs66
-rw-r--r--vendor/tonic-reflection/src/server/mod.rs326
-rw-r--r--vendor/tonic-reflection/src/server/v1.rs138
-rw-r--r--vendor/tonic-reflection/src/server/v1alpha.rs138
-rw-r--r--vendor/tonic-reflection/tests/server.rs151
-rw-r--r--vendor/tonic-reflection/tests/versions.rs172
69 files changed, 638 insertions, 10755 deletions
diff --git a/.runway/runway.yml b/.runway/runway.yml
index 2824fe1d..88e7608f 100644
--- a/.runway/runway.yml
+++ b/.runway/runway.yml
@@ -13,12 +13,12 @@ spec:
regions:
- us-east1
startup_probe:
- grpc_service: ""
+ path: "/health"
timeout_seconds: 5
period_seconds: 5
failure_threshold: 24 # 2 minutes
liveness_probe:
- grpc_service: ""
+ path: "/health"
scalability:
min_instances: 1
max_instances: 1
diff --git a/Cargo.lock b/Cargo.lock
index d9f7eca9..63ea22e8 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -168,8 +168,6 @@ dependencies = [
"tokio-test",
"tonic",
"tonic-build",
- "tonic-health",
- "tonic-reflection",
"tracing",
"tracing-subscriber",
"urlencoding",
@@ -1697,7 +1695,7 @@ dependencies = [
"petgraph 0.6.5",
"prettyplease",
"prost 0.12.6",
- "prost-types 0.12.6",
+ "prost-types",
"regex",
"syn",
"tempfile",
@@ -1739,15 +1737,6 @@ dependencies = [
]
[[package]]
-name = "prost-types"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16"
-dependencies = [
- "prost 0.13.5",
-]
-
-[[package]]
name = "psm"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2428,7 +2417,6 @@ dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
- "tokio-util",
]
[[package]]
@@ -2500,31 +2488,6 @@ dependencies = [
]
[[package]]
-name = "tonic-health"
-version = "0.13.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb87334d340313fefa513b6e60794d44a86d5f039b523229c99c323e4e19ca4b"
-dependencies = [
- "prost 0.13.5",
- "tokio",
- "tokio-stream",
- "tonic",
-]
-
-[[package]]
-name = "tonic-reflection"
-version = "0.13.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f9687bd5bfeafebdded2356950f278bba8226f0b32109537c4253406e09aafe1"
-dependencies = [
- "prost 0.13.5",
- "prost-types 0.13.5",
- "tokio",
- "tokio-stream",
- "tonic",
-]
-
-[[package]]
name = "tower"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/Cargo.toml b/Cargo.toml
index c99f5625..0a3f3483 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -22,8 +22,6 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tokio = { version = "1.0.0", features = ["macros", "rt-multi-thread"] }
tonic = "0.13.1"
-tonic-health = "0.13.1"
-tonic-reflection = "0.13.1"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["json"] }
urlencoding = "2.1"
diff --git a/Dockerfile b/Dockerfile
index c088574e..e1c3d7a0 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -20,6 +20,9 @@ RUN apt-get update && apt-get install -y wget && \
wget -O /usr/bin/dumb-init https://github.com/Yelp/dumb-init/releases/download/v1.2.5/dumb-init_1.2.5_x86_64 && \
chmod +x /usr/bin/dumb-init
+# Build stage for getting SpiceDB binary
+FROM authzed/spicedb:latest AS spicedb-binary
+
# Final stage
FROM gcr.io/distroless/base-debian12:nonroot
EXPOSE 20000
@@ -31,6 +34,7 @@ COPY --from=authzd-builder /app/target/x86_64-unknown-linux-musl/release/authzd
COPY --from=envoy-binary /usr/local/bin/envoy /bin/envoy
COPY --from=minit-builder /go/bin/minit /bin/minit
COPY --from=dumb-init-builder /usr/bin/dumb-init /usr/bin/dumb-init
+COPY --from=spicedb-binary /usr/local/bin/spicedb /bin/spicedb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
CMD ["/bin/minit"]
diff --git a/Makefile b/Makefile
index 82d6f5a7..bdc7a3d8 100644
--- a/Makefile
+++ b/Makefile
@@ -42,6 +42,7 @@ lint:
@cargo clippy
@for policy in etc/authzd/*.cedar; do cedar check-parse --policies $$policy; done
@for policy in etc/authzd/*.cedar; do cedar format --policies $$policy --check; done
+ @./bin/envoy --mode validate -c ./etc/envoy/envoy.yaml || echo "Envoy not available, skipping validation"
doc:
@cargo doc --open
@@ -78,3 +79,16 @@ production-entities: $(AUTHZD_BIN) check-gitlab-token
@$(AUTHZD_BIN) generate --project gitlab-org/gitlab --output etc/authzd/gitlab.com/gitlab-org/gitlab/entities.json
@$(AUTHZD_BIN) generate --project gitlab-org/software-supply-chain-security/authorization/authzd --output etc/authzd/gitlab.com/gitlab-org/software-supply-chain-security/authorization/authzd/entities.json
@$(AUTHZD_BIN) generate --project gitlab-org/software-supply-chain-security/authorization/sparkled --output etc/authzd/gitlab.com/gitlab-org/software-supply-chain-security/authorization/sparkled/entities.json
+
+# spice targets
+run-spicedb-setup:
+ @zed --endpoint ":50051" --token "secret" --insecure schema write etc/authzd/spice.schema
+ @zed --endpoint ":50051" --token "secret" --insecure schema read
+ @zed --endpoint ":50051" --token "secret" --insecure relationship create project:1 maintainer user:mokhax
+ @zed --endpoint ":50051" --token "secret" --insecure relationship create project:1 developer user:tanuki
+
+run-spicedb-permission-check:
+ @zed --endpoint ":50051" --token "secret" --insecure permission check project:1 read user:mokhax
+ @zed --endpoint ":50051" --token "secret" --insecure permission check project:1 write user:mokhax
+ @zed --endpoint ":50051" --token "secret" --insecure permission check project:1 read user:tanuki
+ @zed --endpoint ":50051" --token "secret" --insecure permission check project:1 write user:tanuki
diff --git a/Procfile b/Procfile
index 39bbbeaf..7354a07a 100644
--- a/Procfile
+++ b/Procfile
@@ -1,2 +1,3 @@
-authzd: ./bin/authzd server
+authzd: ./bin/authzd server --addr 127.0.0.1:50052
envoy: ./bin/envoy -c ./etc/envoy/envoy.yaml --base-id 1 --log-level warn --component-log-level admin:warn,connection:warn,grpc:warn,http:warn,http2:warn,router:warn,upstream:warn
+spicedb: ./bin/spicedb serve --grpc-preshared-key "secret" --http-addr :8080 --grpc-addr :50051 --datastore-engine memory --log-level warn --log-format json --telemetry-endpoint "" --skip-release-check
diff --git a/README.md b/README.md
index 065aa7ef..6de81523 100644
--- a/README.md
+++ b/README.md
@@ -45,8 +45,9 @@ It integrates with an identity provider (IdP) and uses message queues to stay in
### Prerequisites
-- [mise](https://mise.jdx.dev/)
- [make](https://www.gnu.org/software/make/)
+- [mise](https://mise.jdx.dev/)
+- [spicedb](https://authzed.com/docs/spicedb/getting-started/installing-spicedb)
1. Install tools:
diff --git a/bin/envoy b/bin/envoy
index ede6290c..5f2ea9ca 100755
--- a/bin/envoy
+++ b/bin/envoy
@@ -1,3 +1,8 @@
#!/bin/sh
+if ! command -v envoy >/dev/null 2>&1; then
+ echo "Install envoy: https://www.envoyproxy.io/docs/envoy/latest/start/install"
+ exit 1
+fi
+
exec envoy $@
diff --git a/bin/spicedb b/bin/spicedb
new file mode 100755
index 00000000..5d4cf0bb
--- /dev/null
+++ b/bin/spicedb
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+if ! command -v spicedb >/dev/null 2>&1; then
+ echo "Install spicedb: https://authzed.com/docs/spicedb/getting-started/installing-spicedb"
+ exit 1
+fi
+
+exec spicedb $@
diff --git a/etc/authzd/policy1.cedar b/etc/authzd/policy1.cedar
index 15776ab7..966bbcfb 100644
--- a/etc/authzd/policy1.cedar
+++ b/etc/authzd/policy1.cedar
@@ -28,3 +28,10 @@ when
context.host == "sparkle.staging.runway.gitlab.net" &&
principal has username
};
+
+permit (
+ principal == User::"1",
+ action == Action::"GET",
+ resource == Resource::"/dashboard"
+)
+when { context has host && context.host == "localhost:10000" };
diff --git a/etc/authzd/spice.schema b/etc/authzd/spice.schema
new file mode 100644
index 00000000..0d6a6482
--- /dev/null
+++ b/etc/authzd/spice.schema
@@ -0,0 +1,7 @@
+definition user {}
+definition project {
+ relation developer: user
+ relation maintainer: user
+ permission read = developer + maintainer
+ permission write = maintainer
+}
diff --git a/etc/envoy/envoy.yaml b/etc/envoy/envoy.yaml
index 19df6a4f..bfe2ce16 100644
--- a/etc/envoy/envoy.yaml
+++ b/etc/envoy/envoy.yaml
@@ -34,6 +34,37 @@ static_resources:
address:
socket_address:
address: 127.0.0.1
+ port_value: 50052
+ typed_extension_protocol_options:
+ envoy.extensions.upstreams.http.v3.HttpProtocolOptions:
+ "@type": type.googleapis.com/envoy.extensions.upstreams.http.v3.HttpProtocolOptions
+ explicit_http_config:
+ http2_protocol_options: {}
+ health_checks:
+ - timeout: 3s
+ interval: 5s
+ unhealthy_threshold: 2
+ healthy_threshold: 2
+ grpc_health_check: {}
+ circuit_breakers:
+ thresholds:
+ - priority: DEFAULT
+ max_connections: 1024
+ max_pending_requests: 1024
+ max_requests: 1024
+ max_retries: 3
+ - name: spicedb
+ connect_timeout: 5s
+ type: STATIC
+ lb_policy: ROUND_ROBIN
+ load_assignment:
+ cluster_name: spicedb
+ endpoints:
+ - lb_endpoints:
+ - endpoint:
+ address:
+ socket_address:
+ address: 127.0.0.1
port_value: 50051
typed_extension_protocol_options:
envoy.extensions.upstreams.http.v3.HttpProtocolOptions:
@@ -120,14 +151,21 @@ static_resources:
key: "x-xss-protection"
value: "1; mode=block"
virtual_hosts:
- - name: backend
+ - name: grpc_services
domains: ["*"]
routes:
+ # Route ext_authz to authzd
- match:
- prefix: "/"
+ prefix: "/envoy.service.auth.v3.Authorization/"
route:
cluster: authzd
timeout: 30s
+ # Default route - everything else goes to SpiceDB
+ - match:
+ prefix: "/"
+ route:
+ cluster: spicedb
+ timeout: 30s
retry_policy:
retry_on: "5xx,reset,connect-failure,retriable-status-codes"
num_retries: 3
diff --git a/share/man/cedar/README.md b/share/man/cedar/README.md
new file mode 100644
index 00000000..ec665d69
--- /dev/null
+++ b/share/man/cedar/README.md
@@ -0,0 +1,125 @@
+# Cedar Authorization Guide
+
+Cedar provides policy-based authorization using Amazon's Cedar policy
+language. This service handles request authorization through Envoy's
+`ext_authz` filter.
+
+## Architecture
+
+```
++---------------------------------------------------------------------+
+| Client Request |
++---------------------------------------------------------------------+
+ │
+ V
++---------------------------------------------------------------------+
+│ Envoy Proxy (:20000) |
+│ |
+│ * JWT Filter extracts x-jwt-claim-sub header |
+│ * ext_authz sends CheckRequest to authzd |
++---------------------------------------------------------------------+
+ | ext_authz
+ V
+ +---------------------+
+ | authzd (:50052) |
+ | |
+ | +-----------------+ |
+ | | Cedar Policies | |
+ | | * Static Assets | |
+ | | * JWT Claims | |
+ | | * Path Rules | |
+ | +-----------------+ |
+ +---------------------+
+```
+
+## Authorization Flow
+
+```
+ Client Envoy authzd
+ | | |
+ | HTTP Request + JWT | |
+ |---------------------->| |
+ | | Extract JWT claims |
+ | | Add x-jwt-claim-sub |
+ | | |
+ | | ext_authz CheckRequest |
+ | |----------------------->|
+ | | |
+ | | | Evaluate
+ | | | Cedar
+ | | | policies
+ | | Allow/Deny |
+ | |<-----------------------|
+ | | |
+ | Forward request | |
+ | or 403 Forbidden | |
+ |<----------------------| |
+```
+
+## Cedar Policies
+
+### Policy Structure
+
+Policies are stored in `etc/authzd/*.cedar` files using Cedar's policy language:
+
+```cedar
+permit (
+ principal == User::"1",
+ action == Action::"GET",
+ resource == Resource::"/sparkle/"
+)
+when
+{
+ context has host &&
+ context.host == "sparkle.staging.runway.gitlab.net" &&
+ principal has username
+};
+```
+
+## JWT Integration
+
+### JWT Header Extraction
+
+Envoy's JWT filter extracts claims and adds them as headers:
+
+- `x-jwt-claim-sub` - User ID (subject)
+
+## Policy Development
+
+### Adding New Policies
+
+1. Create or edit `.cedar` files in `etc/authzd/`
+2. Use Cedar policy syntax for rules
+3. Test with `make test`
+4. Validate with `make lint`
+
+### Policy Validation
+
+```bash
+# Check policy syntax
+cedar check-parse --policies etc/authzd/policy1.cedar
+
+# Format policies
+cedar format --policies etc/authzd/policy1.cedar --check
+```
+
+### Testing Policies
+
+```bash
+# Run Cedar authorization tests
+cargo test authorization::cedar_authorizer_test
+
+# Test specific scenarios
+cargo test test_sparkle_homepage
+```
+
+## Make Targets
+
+- `make test` - Run all tests including Cedar policy tests
+- `make lint` - Validate Cedar policy syntax and formatting
+
+## References
+
+- [Cedar Policy Language](https://docs.cedarpolicy.com/)
+- [Cedar Language Guide](https://docs.cedarpolicy.com/policies/syntax.html)
+- [Envoy JWT Authentication](https://www.envoyproxy.io/docs/envoy/latest/configuration/http/http_filters/jwt_authn_filter)
diff --git a/share/man/spicedb/README.md b/share/man/spicedb/README.md
new file mode 100644
index 00000000..f5e2e968
--- /dev/null
+++ b/share/man/spicedb/README.md
@@ -0,0 +1,152 @@
+# SpiceDB Integration Guide
+
+SpiceDB provides relation-based authorization using the Google Zanzibar model.
+This service handles complex permission hierarchies through relationship graphs.
+
+## Architecture
+
+```
++---------------------------------------------------------------------+
+| Client Request |
++---------------------------------------------------------------------+
+ |
+ V
++---------------------------------------------------------------------+
+| Envoy Proxy (:20000) |
+| |
+| Routes /authzed.api.v1.* directly to SpiceDB |
+|---------------------------------------------------------------------+
+ | SpiceDB APIs
+ V
+ +---------------------+
+ | SpiceDB (:50051) |
+ | |
+ | +-----------------+ |
+ | | Relations | |
+ | | * user:mokhax | |
+ | | * project:1 | |
+ | | * maintainer | |
+ | | * developer | |
+ | +-----------------+ |
+ +---------------------+
+```
+
+## Authorization Flow
+
+```
+ Client Envoy SpiceDB
+ | | |
+ | gRPC PermissionCheck | |
+ |---------------------->| |
+ | | Route by gRPC service |
+ | |----------------------->|
+ | | |
+ | | | Query
+ | | | relations
+ | | | graph
+ | | Permission result |
+ | |<-----------------------|
+ | | |
+ | Permission response | |
+ |<----------------------| |
+```
+
+## Quick Start
+
+### 1. Start All Services
+
+```bash
+# Start authzd, envoy, and spicedb
+make run
+```
+
+### 2. Setup SpiceDB Schema & Data
+
+```bash
+# Initialize schema and test data
+make run-spicedb-setup
+
+# Test permissions
+make run-spicedb-permission-check
+```
+
+### 3. Test SpiceDB Permissions
+
+```bash
+# Check permissions via zed CLI
+zed --endpoint "localhost:20000" --token "secret" --insecure permission check project:1 read user:mokhax
+```
+
+## SpiceDB Configuration
+
+### Schema Development
+
+1. Update schema in `etc/authzd/spice.schema`
+2. Apply with `zed schema write`
+3. Add relationships with `zed relationship create`
+
+### Schema Example
+
+```zed
+definition user {}
+definition project {
+ relation developer: user
+ relation maintainer: user
+ permission read = developer + maintainer
+ permission write = maintainer
+}
+```
+
+### Creating Relationships
+
+```bash
+# Add user to project as maintainer
+zed relationship create project:1 maintainer user:mokhax
+
+# Add user to project as developer
+zed relationship create project:1 developer user:tanuki
+```
+
+## zed CLI Commands
+
+### Schema Management
+
+```bash
+# Write schema to SpiceDB
+zed --endpoint "localhost:20000" --token "secret" --insecure schema write etc/authzd/spice.schema
+
+# Read current schema
+zed --endpoint "localhost:20000" --token "secret" --insecure schema read
+```
+
+### Relationship Management
+
+```bash
+# Create relationships
+zed --endpoint "localhost:20000" --token "secret" --insecure relationship create project:1 maintainer user:mokhax
+
+# Delete relationships
+zed --endpoint "localhost:20000" --token "secret" --insecure relationship delete project:1 developer user:tanuki
+```
+
+### Permission Checks
+
+```bash
+# Check specific permissions
+zed --endpoint "localhost:20000" --token "secret" --insecure permission check project:1 write user:mokhax
+
+# Bulk permission checks
+zed --endpoint "localhost:20000" --token "secret" --insecure permission check project:1 read user:tanuki
+```
+
+## Make Targets
+
+- `make run-spicedb-setup` - Initialize schema and test data
+- `make run-spicedb-permission-check` - Test permission queries
+
+## References
+
+- [SpiceDB Documentation](https://authzed.com/docs)
+- [Google Zanzibar Paper](https://authzed.com/blog/what-is-google-zanzibar)
+- [Cedar Policy Language](https://docs.cedarpolicy.com/)
+- [Envoy External Authorization](https://www.envoyproxy.io/docs/envoy/latest/configuration/http/http_filters/ext_authz_filter)
diff --git a/src/authorization/entities.rs b/src/authorization/entities.rs
index ec1a7a1b..050f6f26 100644
--- a/src/authorization/entities.rs
+++ b/src/authorization/entities.rs
@@ -1,6 +1,10 @@
use crate::gitlab::Api;
use serde::Serialize;
use std::collections::HashSet;
+use std::future::Future;
+use std::pin::Pin;
+
+type BoxFuture<'a, T> = Pin<Box<dyn Future<Output = T> + 'a>>;
// Cedar entity structures
// Note: We define custom types instead of using cedar_policy::Entity directly because:
@@ -102,9 +106,7 @@ impl EntitiesRepository {
group_id: u64,
entities: &'a mut Vec<CedarEntity>,
groups: &'a mut HashSet<u64>,
- ) -> std::pin::Pin<
- Box<dyn std::future::Future<Output = Result<(), Box<dyn std::error::Error>>> + 'a>,
- > {
+ ) -> BoxFuture<'a, Result<(), Box<dyn std::error::Error>>> {
Box::pin(async move {
if groups.contains(&group_id) {
return Ok(());
diff --git a/src/authorization/server.rs b/src/authorization/server.rs
index 90d3edf6..31bf2af8 100644
--- a/src/authorization/server.rs
+++ b/src/authorization/server.rs
@@ -9,25 +9,11 @@ pub struct Server {
impl Server {
pub fn new<T: super::Authorizer>(authorizer: T) -> Result<Server, Box<dyn std::error::Error>> {
- let (health_reporter, health_service) = tonic_health::server::health_reporter();
- std::mem::drop(
- health_reporter.set_service_status("", tonic_health::ServingStatus::Serving),
- );
let authorization_service =
AuthorizationServer::new(CheckService::new(Arc::new(authorizer)));
- let reflection_service = tonic_reflection::server::Builder::configure()
- .register_encoded_file_descriptor_set(tonic_health::pb::FILE_DESCRIPTOR_SET)
- .register_encoded_file_descriptor_set(include_bytes!(
- "../../vendor/envoy-types/src/generated/types.bin"
- ))
- .build_v1()?;
-
Ok(Self::new_with(|mut builder| {
- builder
- .add_service(authorization_service)
- .add_service(health_service)
- .add_service(reflection_service)
+ builder.add_service(authorization_service)
}))
}
diff --git a/src/bin/cli.rs b/src/bin/cli.rs
index 837ef80f..78aa1ba1 100644
--- a/src/bin/cli.rs
+++ b/src/bin/cli.rs
@@ -40,7 +40,7 @@ enum Commands {
},
Server {
/// Address to bind to
- #[arg(short, long, env = "BIND_ADDR", default_value = "127.0.0.1:50051")]
+ #[arg(short, long, env = "BIND_ADDR", default_value = "127.0.0.1:50052")]
addr: String,
},
}
diff --git a/tests/authorization/cedar_authorizer_test.rs b/tests/authorization/cedar_authorizer_test.rs
index 7a99a7d9..58563832 100644
--- a/tests/authorization/cedar_authorizer_test.rs
+++ b/tests/authorization/cedar_authorizer_test.rs
@@ -128,4 +128,22 @@ mod tests {
let authorizer = subject();
assert_eq!(authorizer.authorize(request), true);
}
+
+ #[test]
+ fn test_sparkle_dashboard() {
+ let request = build_request(|item: &mut HttpRequest| {
+ item.method = "GET".to_string();
+ item.path = "/dashboard".to_string();
+ item.host = "localhost:10000".to_string();
+ item.headers = build_headers(vec![
+ (String::from("x-jwt-claim-sub"), "1".to_string()),
+ (String::from(":path"), item.path.to_string()),
+ (String::from(":method"), item.method.to_string()),
+ (String::from(":authority"), item.host.to_string()),
+ ]);
+ });
+
+ let authorizer = subject();
+ assert_eq!(authorizer.authorize(request), true);
+ }
}
diff --git a/tests/authorization/server_test.rs b/tests/authorization/server_test.rs
index fe8c8a73..5a92dcff 100644
--- a/tests/authorization/server_test.rs
+++ b/tests/authorization/server_test.rs
@@ -27,21 +27,19 @@ mod tests {
}
#[tokio::test]
- async fn test_health_check_service() {
+ async fn test_health_ext_authz_service() {
let (addr, server) = start_server().await;
- let mut client =
- build_rpc_client(addr, tonic_health::pb::health_client::HealthClient::new).await;
- let request = tonic::Request::new(tonic_health::pb::HealthCheckRequest {
- service: String::new(),
- });
+ let mut client = build_rpc_client(
+ addr,
+ envoy_types::pb::envoy::service::auth::v3::authorization_client::AuthorizationClient::new,
+ )
+ .await;
+
+ let request = tonic::Request::new(envoy_types::ext_authz::v3::pb::CheckRequest::default());
let response = client.check(request).await;
assert!(response.is_ok());
- assert_eq!(
- response.unwrap().into_inner().status(),
- tonic_health::pb::health_check_response::ServingStatus::Serving
- );
server.abort();
}
diff --git a/vendor/prost-types-0.12.6/.cargo-checksum.json b/vendor/prost-types-0.12.6/.cargo-checksum.json
deleted file mode 100644
index 2cb8563d..00000000
--- a/vendor/prost-types-0.12.6/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.toml":"cadf2579e0a1e10bf59134e5341555e9c8557ccccf2f390e4ef2320bb76de718","LICENSE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","README.md":"05bf3eb034345e386d76f957e8ccdb26960cf5f78c050804b074ef3f01f92477","src/any.rs":"663ad6e55a0e15ace05ab66df21555e5fa81258ca5e9624e3cacb1ec56277b72","src/compiler.rs":"cdeb17a1df6f555c358dbfb0270f2a151ad759cae42be4a66af05b686f517d0f","src/datetime.rs":"df4fd7aee4d6fb5e28850d797cbd490ba9446a2e3fd6bbec015baf8a7ccfe4e4","src/duration.rs":"7378442f6ae52b9799fd114b4c6be6edc1bc41834b1f5b56f98e3c0b7037a6f2","src/lib.rs":"e3c05512b314b7a9b64d302f1a240830553cd1f28629b9ad439591f49935af41","src/protobuf.rs":"5d92f618bb6ad3ac3939a182a4ff8c106c90ec6588054738b0e65caaf1e90e76","src/timestamp.rs":"8eaa6dd53633f2a05839e5e5790da7adcb50ed67fb2ceb5358e2440080492be8","src/type_url.rs":"dc69abaa0ebaaaa58ea81dfba6712bc5be00c35bfff5a3da80b5df0c49c7725f"},"package":"9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0"} \ No newline at end of file
diff --git a/vendor/prost-types-0.12.6/Cargo.toml b/vendor/prost-types-0.12.6/Cargo.toml
deleted file mode 100644
index 0c71fd73..00000000
--- a/vendor/prost-types-0.12.6/Cargo.toml
+++ /dev/null
@@ -1,42 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies.
-#
-# If you are reading this file be aware that the original Cargo.toml
-# will likely look very different (and much more reasonable).
-# See Cargo.toml.orig for the original contents.
-
-[package]
-edition = "2021"
-rust-version = "1.70"
-name = "prost-types"
-version = "0.12.6"
-authors = [
- "Dan Burkert <dan@danburkert.com>",
- "Lucio Franco <luciofranco14@gmail.com>",
- "Casper Meijn <casper@meijn.net>",
- "Tokio Contributors <team@tokio.rs>",
-]
-description = "Prost definitions of Protocol Buffers well known types."
-documentation = "https://docs.rs/prost-types"
-readme = "README.md"
-license = "Apache-2.0"
-repository = "https://github.com/tokio-rs/prost"
-
-[lib]
-doctest = false
-
-[dependencies.prost]
-version = "0.12.6"
-features = ["prost-derive"]
-default-features = false
-
-[dev-dependencies.proptest]
-version = "1"
-
-[features]
-default = ["std"]
-std = ["prost/std"]
diff --git a/vendor/prost-types-0.12.6/LICENSE b/vendor/prost-types-0.12.6/LICENSE
deleted file mode 100644
index 16fe87b0..00000000
--- a/vendor/prost-types-0.12.6/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
-TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
-2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
-3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
-4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
-5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
-6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
-7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
-8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
-9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
-END OF TERMS AND CONDITIONS
-
-APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
-Copyright [yyyy] [name of copyright owner]
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/vendor/prost-types-0.12.6/README.md b/vendor/prost-types-0.12.6/README.md
deleted file mode 100644
index 8724577b..00000000
--- a/vendor/prost-types-0.12.6/README.md
+++ /dev/null
@@ -1,21 +0,0 @@
-[![Documentation](https://docs.rs/prost-types/badge.svg)](https://docs.rs/prost-types/)
-[![Crate](https://img.shields.io/crates/v/prost-types.svg)](https://crates.io/crates/prost-types)
-
-# `prost-types`
-
-Prost definitions of Protocol Buffers well known types. See the [Protobuf reference][1] for more
-information about well known types.
-
-[1]: https://developers.google.com/protocol-buffers/docs/reference/google.protobuf
-
-## License
-
-`prost-types` is distributed under the terms of the Apache License (Version 2.0).
-`prost-types` includes code imported from the Protocol Buffers projet, which is
-included under its original ([BSD][2]) license.
-
-[2]: https://github.com/google/protobuf/blob/master/LICENSE
-
-See [LICENSE](..LICENSE) for details.
-
-Copyright 2017 Dan Burkert
diff --git a/vendor/prost-types-0.12.6/src/any.rs b/vendor/prost-types-0.12.6/src/any.rs
deleted file mode 100644
index af3e0e4d..00000000
--- a/vendor/prost-types-0.12.6/src/any.rs
+++ /dev/null
@@ -1,69 +0,0 @@
-use super::*;
-
-impl Any {
- /// Serialize the given message type `M` as [`Any`].
- pub fn from_msg<M>(msg: &M) -> Result<Self, EncodeError>
- where
- M: Name,
- {
- let type_url = M::type_url();
- let mut value = Vec::new();
- Message::encode(msg, &mut value)?;
- Ok(Any { type_url, value })
- }
-
- /// Decode the given message type `M` from [`Any`], validating that it has
- /// the expected type URL.
- pub fn to_msg<M>(&self) -> Result<M, DecodeError>
- where
- M: Default + Name + Sized,
- {
- let expected_type_url = M::type_url();
-
- if let (Some(expected), Some(actual)) = (
- TypeUrl::new(&expected_type_url),
- TypeUrl::new(&self.type_url),
- ) {
- if expected == actual {
- return M::decode(self.value.as_slice());
- }
- }
-
- let mut err = DecodeError::new(format!(
- "expected type URL: \"{}\" (got: \"{}\")",
- expected_type_url, &self.type_url
- ));
- err.push("unexpected type URL", "type_url");
- Err(err)
- }
-}
-
-impl Name for Any {
- const PACKAGE: &'static str = PACKAGE;
- const NAME: &'static str = "Any";
-
- fn type_url() -> String {
- type_url_for::<Self>()
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn check_any_serialization() {
- let message = Timestamp::date(2000, 1, 1).unwrap();
- let any = Any::from_msg(&message).unwrap();
- assert_eq!(
- &any.type_url,
- "type.googleapis.com/google.protobuf.Timestamp"
- );
-
- let message2 = any.to_msg::<Timestamp>().unwrap();
- assert_eq!(message, message2);
-
- // Wrong type URL
- assert!(any.to_msg::<Duration>().is_err());
- }
-}
diff --git a/vendor/prost-types-0.12.6/src/compiler.rs b/vendor/prost-types-0.12.6/src/compiler.rs
deleted file mode 100644
index 0a3b4680..00000000
--- a/vendor/prost-types-0.12.6/src/compiler.rs
+++ /dev/null
@@ -1,174 +0,0 @@
-// This file is @generated by prost-build.
-/// The version number of protocol compiler.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Version {
- #[prost(int32, optional, tag = "1")]
- pub major: ::core::option::Option<i32>,
- #[prost(int32, optional, tag = "2")]
- pub minor: ::core::option::Option<i32>,
- #[prost(int32, optional, tag = "3")]
- pub patch: ::core::option::Option<i32>,
- /// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
- /// be empty for mainline stable releases.
- #[prost(string, optional, tag = "4")]
- pub suffix: ::core::option::Option<::prost::alloc::string::String>,
-}
-/// An encoded CodeGeneratorRequest is written to the plugin's stdin.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct CodeGeneratorRequest {
- /// The .proto files that were explicitly listed on the command-line. The
- /// code generator should generate code only for these files. Each file's
- /// descriptor will be included in proto_file, below.
- #[prost(string, repeated, tag = "1")]
- pub file_to_generate: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
- /// The generator parameter passed on the command-line.
- #[prost(string, optional, tag = "2")]
- pub parameter: ::core::option::Option<::prost::alloc::string::String>,
- /// FileDescriptorProtos for all files in files_to_generate and everything
- /// they import. The files will appear in topological order, so each file
- /// appears before any file that imports it.
- ///
- /// protoc guarantees that all proto_files will be written after
- /// the fields above, even though this is not technically guaranteed by the
- /// protobuf wire format. This theoretically could allow a plugin to stream
- /// in the FileDescriptorProtos and handle them one by one rather than read
- /// the entire set into memory at once. However, as of this writing, this
- /// is not similarly optimized on protoc's end -- it will store all fields in
- /// memory at once before sending them to the plugin.
- ///
- /// Type names of fields and extensions in the FileDescriptorProto are always
- /// fully qualified.
- #[prost(message, repeated, tag = "15")]
- pub proto_file: ::prost::alloc::vec::Vec<super::FileDescriptorProto>,
- /// The version number of protocol compiler.
- #[prost(message, optional, tag = "3")]
- pub compiler_version: ::core::option::Option<Version>,
-}
-/// The plugin writes an encoded CodeGeneratorResponse to stdout.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct CodeGeneratorResponse {
- /// Error message. If non-empty, code generation failed. The plugin process
- /// should exit with status code zero even if it reports an error in this way.
- ///
- /// This should be used to indicate errors in .proto files which prevent the
- /// code generator from generating correct code. Errors which indicate a
- /// problem in protoc itself -- such as the input CodeGeneratorRequest being
- /// unparseable -- should be reported by writing a message to stderr and
- /// exiting with a non-zero status code.
- #[prost(string, optional, tag = "1")]
- pub error: ::core::option::Option<::prost::alloc::string::String>,
- /// A bitmask of supported features that the code generator supports.
- /// This is a bitwise "or" of values from the Feature enum.
- #[prost(uint64, optional, tag = "2")]
- pub supported_features: ::core::option::Option<u64>,
- #[prost(message, repeated, tag = "15")]
- pub file: ::prost::alloc::vec::Vec<code_generator_response::File>,
-}
-/// Nested message and enum types in `CodeGeneratorResponse`.
-pub mod code_generator_response {
- /// Represents a single generated file.
- #[allow(clippy::derive_partial_eq_without_eq)]
- #[derive(Clone, PartialEq, ::prost::Message)]
- pub struct File {
- /// The file name, relative to the output directory. The name must not
- /// contain "." or ".." components and must be relative, not be absolute (so,
- /// the file cannot lie outside the output directory). "/" must be used as
- /// the path separator, not "".
- ///
- /// If the name is omitted, the content will be appended to the previous
- /// file. This allows the generator to break large files into small chunks,
- /// and allows the generated text to be streamed back to protoc so that large
- /// files need not reside completely in memory at one time. Note that as of
- /// this writing protoc does not optimize for this -- it will read the entire
- /// CodeGeneratorResponse before writing files to disk.
- #[prost(string, optional, tag = "1")]
- pub name: ::core::option::Option<::prost::alloc::string::String>,
- /// If non-empty, indicates that the named file should already exist, and the
- /// content here is to be inserted into that file at a defined insertion
- /// point. This feature allows a code generator to extend the output
- /// produced by another code generator. The original generator may provide
- /// insertion points by placing special annotations in the file that look
- /// like:
- /// @@protoc_insertion_point(NAME)
- /// The annotation can have arbitrary text before and after it on the line,
- /// which allows it to be placed in a comment. NAME should be replaced with
- /// an identifier naming the point -- this is what other generators will use
- /// as the insertion_point. Code inserted at this point will be placed
- /// immediately above the line containing the insertion point (thus multiple
- /// insertions to the same point will come out in the order they were added).
- /// The double-@ is intended to make it unlikely that the generated code
- /// could contain things that look like insertion points by accident.
- ///
- /// For example, the C++ code generator places the following line in the
- /// .pb.h files that it generates:
- /// // @@protoc_insertion_point(namespace_scope)
- /// This line appears within the scope of the file's package namespace, but
- /// outside of any particular class. Another plugin can then specify the
- /// insertion_point "namespace_scope" to generate additional classes or
- /// other declarations that should be placed in this scope.
- ///
- /// Note that if the line containing the insertion point begins with
- /// whitespace, the same whitespace will be added to every line of the
- /// inserted text. This is useful for languages like Python, where
- /// indentation matters. In these languages, the insertion point comment
- /// should be indented the same amount as any inserted code will need to be
- /// in order to work correctly in that context.
- ///
- /// The code generator that generates the initial file and the one which
- /// inserts into it must both run as part of a single invocation of protoc.
- /// Code generators are executed in the order in which they appear on the
- /// command line.
- ///
- /// If |insertion_point| is present, |name| must also be present.
- #[prost(string, optional, tag = "2")]
- pub insertion_point: ::core::option::Option<::prost::alloc::string::String>,
- /// The file contents.
- #[prost(string, optional, tag = "15")]
- pub content: ::core::option::Option<::prost::alloc::string::String>,
- /// Information describing the file content being inserted. If an insertion
- /// point is used, this information will be appropriately offset and inserted
- /// into the code generation metadata for the generated files.
- #[prost(message, optional, tag = "16")]
- pub generated_code_info: ::core::option::Option<super::super::GeneratedCodeInfo>,
- }
- /// Sync with code_generator.h.
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum Feature {
- None = 0,
- Proto3Optional = 1,
- }
- impl Feature {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- Feature::None => "FEATURE_NONE",
- Feature::Proto3Optional => "FEATURE_PROTO3_OPTIONAL",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "FEATURE_NONE" => Some(Self::None),
- "FEATURE_PROTO3_OPTIONAL" => Some(Self::Proto3Optional),
- _ => None,
- }
- }
- }
-}
diff --git a/vendor/prost-types-0.12.6/src/datetime.rs b/vendor/prost-types-0.12.6/src/datetime.rs
deleted file mode 100644
index 2435ffe7..00000000
--- a/vendor/prost-types-0.12.6/src/datetime.rs
+++ /dev/null
@@ -1,864 +0,0 @@
-//! A date/time type which exists primarily to convert [`Timestamp`]s into an RFC 3339 formatted
-//! string.
-
-use core::fmt;
-
-use crate::Duration;
-use crate::Timestamp;
-
-/// A point in time, represented as a date and time in the UTC timezone.
-#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
-pub(crate) struct DateTime {
- /// The year.
- pub(crate) year: i64,
- /// The month of the year, from 1 to 12, inclusive.
- pub(crate) month: u8,
- /// The day of the month, from 1 to 31, inclusive.
- pub(crate) day: u8,
- /// The hour of the day, from 0 to 23, inclusive.
- pub(crate) hour: u8,
- /// The minute of the hour, from 0 to 59, inclusive.
- pub(crate) minute: u8,
- /// The second of the minute, from 0 to 59, inclusive.
- pub(crate) second: u8,
- /// The nanoseconds, from 0 to 999_999_999, inclusive.
- pub(crate) nanos: u32,
-}
-
-impl DateTime {
- /// The minimum representable [`Timestamp`] as a `DateTime`.
- pub(crate) const MIN: DateTime = DateTime {
- year: -292_277_022_657,
- month: 1,
- day: 27,
- hour: 8,
- minute: 29,
- second: 52,
- nanos: 0,
- };
-
- /// The maximum representable [`Timestamp`] as a `DateTime`.
- pub(crate) const MAX: DateTime = DateTime {
- year: 292_277_026_596,
- month: 12,
- day: 4,
- hour: 15,
- minute: 30,
- second: 7,
- nanos: 999_999_999,
- };
-
- /// Returns `true` if the `DateTime` is a valid calendar date.
- pub(crate) fn is_valid(&self) -> bool {
- self >= &DateTime::MIN
- && self <= &DateTime::MAX
- && self.month > 0
- && self.month <= 12
- && self.day > 0
- && self.day <= days_in_month(self.year, self.month)
- && self.hour < 24
- && self.minute < 60
- && self.second < 60
- && self.nanos < 1_000_000_000
- }
-}
-
-impl fmt::Display for DateTime {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- // Pad years to at least 4 digits.
- if self.year > 9999 {
- write!(f, "+{}", self.year)?;
- } else if self.year < 0 {
- write!(f, "{:05}", self.year)?;
- } else {
- write!(f, "{:04}", self.year)?;
- };
-
- write!(
- f,
- "-{:02}-{:02}T{:02}:{:02}:{:02}",
- self.month, self.day, self.hour, self.minute, self.second,
- )?;
-
- // Format subseconds to either nothing, millis, micros, or nanos.
- let nanos = self.nanos;
- if nanos == 0 {
- write!(f, "Z")
- } else if nanos % 1_000_000 == 0 {
- write!(f, ".{:03}Z", nanos / 1_000_000)
- } else if nanos % 1_000 == 0 {
- write!(f, ".{:06}Z", nanos / 1_000)
- } else {
- write!(f, ".{:09}Z", nanos)
- }
- }
-}
-
-impl From<Timestamp> for DateTime {
- /// musl's [`__secs_to_tm`][1] converted to Rust via [c2rust][2] and then cleaned up by hand.
- ///
- /// All existing `strftime`-like APIs in Rust are unable to handle the full range of timestamps
- /// representable by `Timestamp`, including `strftime` itself, since tm.tm_year is an int.
- ///
- /// [1]: http://git.musl-libc.org/cgit/musl/tree/src/time/__secs_to_tm.c
- /// [2]: https://c2rust.com/
- fn from(mut timestamp: Timestamp) -> DateTime {
- timestamp.normalize();
-
- let t = timestamp.seconds;
- let nanos = timestamp.nanos;
-
- // 2000-03-01 (mod 400 year, immediately after feb29
- const LEAPOCH: i64 = 946_684_800 + 86400 * (31 + 29);
- const DAYS_PER_400Y: i32 = 365 * 400 + 97;
- const DAYS_PER_100Y: i32 = 365 * 100 + 24;
- const DAYS_PER_4Y: i32 = 365 * 4 + 1;
- const DAYS_IN_MONTH: [u8; 12] = [31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31, 29];
-
- // Note(dcb): this bit is rearranged slightly to avoid integer overflow.
- let mut days: i64 = (t / 86_400) - (LEAPOCH / 86_400);
- let mut remsecs: i32 = (t % 86_400) as i32;
- if remsecs < 0i32 {
- remsecs += 86_400;
- days -= 1
- }
-
- let mut qc_cycles: i32 = (days / i64::from(DAYS_PER_400Y)) as i32;
- let mut remdays: i32 = (days % i64::from(DAYS_PER_400Y)) as i32;
- if remdays < 0 {
- remdays += DAYS_PER_400Y;
- qc_cycles -= 1;
- }
-
- let mut c_cycles: i32 = remdays / DAYS_PER_100Y;
- if c_cycles == 4 {
- c_cycles -= 1;
- }
- remdays -= c_cycles * DAYS_PER_100Y;
-
- let mut q_cycles: i32 = remdays / DAYS_PER_4Y;
- if q_cycles == 25 {
- q_cycles -= 1;
- }
- remdays -= q_cycles * DAYS_PER_4Y;
-
- let mut remyears: i32 = remdays / 365;
- if remyears == 4 {
- remyears -= 1;
- }
- remdays -= remyears * 365;
-
- let mut years: i64 = i64::from(remyears)
- + 4 * i64::from(q_cycles)
- + 100 * i64::from(c_cycles)
- + 400 * i64::from(qc_cycles);
-
- let mut months: i32 = 0;
- while i32::from(DAYS_IN_MONTH[months as usize]) <= remdays {
- remdays -= i32::from(DAYS_IN_MONTH[months as usize]);
- months += 1
- }
-
- if months >= 10 {
- months -= 12;
- years += 1;
- }
-
- let date_time = DateTime {
- year: years + 2000,
- month: (months + 3) as u8,
- day: (remdays + 1) as u8,
- hour: (remsecs / 3600) as u8,
- minute: (remsecs / 60 % 60) as u8,
- second: (remsecs % 60) as u8,
- nanos: nanos as u32,
- };
- debug_assert!(date_time.is_valid());
- date_time
- }
-}
-
-/// Returns the number of days in the month.
-fn days_in_month(year: i64, month: u8) -> u8 {
- const DAYS_IN_MONTH: [u8; 12] = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
- let (_, is_leap) = year_to_seconds(year);
- DAYS_IN_MONTH[usize::from(month - 1)] + u8::from(is_leap && month == 2)
-}
-
-macro_rules! ensure {
- ($expr:expr) => {{
- if !$expr {
- return None;
- }
- }};
-}
-
-/// Parses a date in RFC 3339 format from ASCII string `b`, returning the year, month, day, and
-/// remaining input.
-///
-/// The date is not validated according to a calendar.
-fn parse_date(s: &str) -> Option<(i64, u8, u8, &str)> {
- debug_assert!(s.is_ascii());
-
- // Smallest valid date is YYYY-MM-DD.
- ensure!(s.len() >= 10);
-
- // Parse the year in one of three formats:
- // * +YYYY[Y]+
- // * -[Y]+
- // * YYYY
- let (year, s) = match s.as_bytes()[0] {
- b'+' => {
- let (digits, s) = parse_digits(&s[1..]);
- ensure!(digits.len() >= 5);
- let date: i64 = digits.parse().ok()?;
- (date, s)
- }
- b'-' => {
- let (digits, s) = parse_digits(&s[1..]);
- ensure!(digits.len() >= 4);
- let date: i64 = digits.parse().ok()?;
- (-date, s)
- }
- _ => {
- // Parse a 4 digit numeric.
- let (n1, s) = parse_two_digit_numeric(s)?;
- let (n2, s) = parse_two_digit_numeric(s)?;
- (i64::from(n1) * 100 + i64::from(n2), s)
- }
- };
-
- let s = parse_char(s, b'-')?;
- let (month, s) = parse_two_digit_numeric(s)?;
- let s = parse_char(s, b'-')?;
- let (day, s) = parse_two_digit_numeric(s)?;
- Some((year, month, day, s))
-}
-
-/// Parses a time in RFC 3339 format from ASCII string `s`, returning the hour, minute, second, and
-/// nanos.
-///
-/// The date is not validated according to a calendar.
-fn parse_time(s: &str) -> Option<(u8, u8, u8, u32, &str)> {
- debug_assert!(s.is_ascii());
-
- let (hour, s) = parse_two_digit_numeric(s)?;
- let s = parse_char(s, b':')?;
- let (minute, s) = parse_two_digit_numeric(s)?;
- let s = parse_char(s, b':')?;
- let (second, s) = parse_two_digit_numeric(s)?;
-
- let (nanos, s) = parse_nanos(s)?;
-
- Some((hour, minute, second, nanos, s))
-}
-
-/// Parses an optional nanosecond time from ASCII string `s`, returning the nanos and remaining
-/// string.
-fn parse_nanos(s: &str) -> Option<(u32, &str)> {
- debug_assert!(s.is_ascii());
-
- // Parse the nanoseconds, if present.
- let (nanos, s) = if let Some(s) = parse_char(s, b'.') {
- let (digits, s) = parse_digits(s);
- ensure!(digits.len() <= 9);
- let nanos = 10u32.pow(9 - digits.len() as u32) * digits.parse::<u32>().ok()?;
- (nanos, s)
- } else {
- (0, s)
- };
-
- Some((nanos, s))
-}
-
-/// Parses a timezone offset in RFC 3339 format from ASCII string `s`, returning the offset hour,
-/// offset minute, and remaining input.
-fn parse_offset(s: &str) -> Option<(i8, i8, &str)> {
- debug_assert!(s.is_ascii());
-
- if s.is_empty() {
- // If no timezone specified, assume UTC.
- return Some((0, 0, s));
- }
-
- // Snowflake's timestamp format contains a space separator before the offset.
- let s = parse_char(s, b' ').unwrap_or(s);
-
- if let Some(s) = parse_char_ignore_case(s, b'Z') {
- Some((0, 0, s))
- } else {
- let (is_positive, s) = if let Some(s) = parse_char(s, b'+') {
- (true, s)
- } else if let Some(s) = parse_char(s, b'-') {
- (false, s)
- } else {
- return None;
- };
-
- let (hour, s) = parse_two_digit_numeric(s)?;
-
- let (minute, s) = if s.is_empty() {
- // No offset minutes are specified, e.g. +00 or +07.
- (0, s)
- } else {
- // Optional colon separator between the hour and minute digits.
- let s = parse_char(s, b':').unwrap_or(s);
- let (minute, s) = parse_two_digit_numeric(s)?;
- (minute, s)
- };
-
- // '-00:00' indicates an unknown local offset.
- ensure!(is_positive || hour > 0 || minute > 0);
-
- ensure!(hour < 24 && minute < 60);
-
- let hour = hour as i8;
- let minute = minute as i8;
-
- if is_positive {
- Some((hour, minute, s))
- } else {
- Some((-hour, -minute, s))
- }
- }
-}
-
-/// Parses a two-digit base-10 number from ASCII string `s`, returning the number and the remaining
-/// string.
-fn parse_two_digit_numeric(s: &str) -> Option<(u8, &str)> {
- debug_assert!(s.is_ascii());
-
- let (digits, s) = s.split_at(2);
- Some((digits.parse().ok()?, s))
-}
-
-/// Splits ASCII string `s` at the first occurrence of a non-digit character.
-fn parse_digits(s: &str) -> (&str, &str) {
- debug_assert!(s.is_ascii());
-
- let idx = s
- .as_bytes()
- .iter()
- .position(|c| !c.is_ascii_digit())
- .unwrap_or(s.len());
- s.split_at(idx)
-}
-
-/// Attempts to parse ASCII character `c` from ASCII string `s`, returning the remaining string. If
-/// the character can not be parsed, returns `None`.
-fn parse_char(s: &str, c: u8) -> Option<&str> {
- debug_assert!(s.is_ascii());
-
- ensure!(*s.as_bytes().first()? == c);
- Some(&s[1..])
-}
-
-/// Attempts to parse ASCII character `c` from ASCII string `s`, ignoring ASCII case, returning the
-/// remaining string. If the character can not be parsed, returns `None`.
-fn parse_char_ignore_case(s: &str, c: u8) -> Option<&str> {
- debug_assert!(s.is_ascii());
-
- ensure!(s.as_bytes().first()?.eq_ignore_ascii_case(&c));
- Some(&s[1..])
-}
-
-/// Returns the offset in seconds from the Unix epoch of the date time.
-///
-/// This is musl's [`__tm_to_secs`][1] converted to Rust via [c2rust[2] and then cleaned up by
-/// hand.
-///
-/// [1]: https://git.musl-libc.org/cgit/musl/tree/src/time/__tm_to_secs.c
-/// [2]: https://c2rust.com/
-fn date_time_to_seconds(tm: &DateTime) -> i64 {
- let (start_of_year, is_leap) = year_to_seconds(tm.year);
-
- let seconds_within_year = month_to_seconds(tm.month, is_leap)
- + 86400 * u32::from(tm.day - 1)
- + 3600 * u32::from(tm.hour)
- + 60 * u32::from(tm.minute)
- + u32::from(tm.second);
-
- (start_of_year + i128::from(seconds_within_year)) as i64
-}
-
-/// Returns the number of seconds in the year prior to the start of the provided month.
-///
-/// This is musl's [`__month_to_secs`][1] converted to Rust via c2rust and then cleaned up by hand.
-///
-/// [1]: https://git.musl-libc.org/cgit/musl/tree/src/time/__month_to_secs.c
-fn month_to_seconds(month: u8, is_leap: bool) -> u32 {
- const SECS_THROUGH_MONTH: [u32; 12] = [
- 0,
- 31 * 86400,
- 59 * 86400,
- 90 * 86400,
- 120 * 86400,
- 151 * 86400,
- 181 * 86400,
- 212 * 86400,
- 243 * 86400,
- 273 * 86400,
- 304 * 86400,
- 334 * 86400,
- ];
- let t = SECS_THROUGH_MONTH[usize::from(month - 1)];
- if is_leap && month > 2 {
- t + 86400
- } else {
- t
- }
-}
-
-/// Returns the offset in seconds from the Unix epoch of the start of a year.
-///
-/// musl's [`__year_to_secs`][1] converted to Rust via c2rust and then cleaned up by hand.
-///
-/// Returns an i128 because the start of the earliest supported year underflows i64.
-///
-/// [1]: https://git.musl-libc.org/cgit/musl/tree/src/time/__year_to_secs.c
-pub(crate) fn year_to_seconds(year: i64) -> (i128, bool) {
- let is_leap;
- let year = year - 1900;
-
- // Fast path for years 1900 - 2038.
- if year as u64 <= 138 {
- let mut leaps: i64 = (year - 68) >> 2;
- if (year - 68).trailing_zeros() >= 2 {
- leaps -= 1;
- is_leap = true;
- } else {
- is_leap = false;
- }
- return (
- i128::from(31_536_000 * (year - 70) + 86400 * leaps),
- is_leap,
- );
- }
-
- let centuries: i64;
- let mut leaps: i64;
-
- let mut cycles: i64 = (year - 100) / 400;
- let mut rem: i64 = (year - 100) % 400;
-
- if rem < 0 {
- cycles -= 1;
- rem += 400
- }
- if rem == 0 {
- is_leap = true;
- centuries = 0;
- leaps = 0;
- } else {
- if rem >= 200 {
- if rem >= 300 {
- centuries = 3;
- rem -= 300;
- } else {
- centuries = 2;
- rem -= 200;
- }
- } else if rem >= 100 {
- centuries = 1;
- rem -= 100;
- } else {
- centuries = 0;
- }
- if rem == 0 {
- is_leap = false;
- leaps = 0;
- } else {
- leaps = rem / 4;
- rem %= 4;
- is_leap = rem == 0;
- }
- }
- leaps += 97 * cycles + 24 * centuries - i64::from(is_leap);
-
- (
- i128::from((year - 100) * 31_536_000) + i128::from(leaps * 86400 + 946_684_800 + 86400),
- is_leap,
- )
-}
-
-/// Parses a timestamp in RFC 3339 format from `s`.
-pub(crate) fn parse_timestamp(s: &str) -> Option<Timestamp> {
- // Check that the string is ASCII, since subsequent parsing steps use byte-level indexing.
- ensure!(s.is_ascii());
-
- let (year, month, day, s) = parse_date(s)?;
-
- if s.is_empty() {
- // The string only contained a date.
- let date_time = DateTime {
- year,
- month,
- day,
- ..DateTime::default()
- };
-
- ensure!(date_time.is_valid());
-
- return Some(Timestamp::from(date_time));
- }
-
- // Accept either 'T' or ' ' as delimiter between date and time.
- let s = parse_char_ignore_case(s, b'T').or_else(|| parse_char(s, b' '))?;
- let (hour, minute, mut second, nanos, s) = parse_time(s)?;
- let (offset_hour, offset_minute, s) = parse_offset(s)?;
-
- ensure!(s.is_empty());
-
- // Detect whether the timestamp falls in a leap second. If this is the case, roll it back
- // to the previous second. To be maximally conservative, this should be checking that the
- // timestamp is the last second in the UTC day (23:59:60), and even potentially checking
- // that it's the final day of the UTC month, however these checks are non-trivial because
- // at this point we have, in effect, a local date time, since the offset has not been
- // applied.
- if second == 60 {
- second = 59;
- }
-
- let date_time = DateTime {
- year,
- month,
- day,
- hour,
- minute,
- second,
- nanos,
- };
-
- ensure!(date_time.is_valid());
-
- let Timestamp { seconds, nanos } = Timestamp::from(date_time);
-
- let seconds =
- seconds.checked_sub(i64::from(offset_hour) * 3600 + i64::from(offset_minute) * 60)?;
-
- Some(Timestamp { seconds, nanos })
-}
-
-/// Parse a duration in the [Protobuf JSON encoding spec format][1].
-///
-/// [1]: https://developers.google.com/protocol-buffers/docs/proto3#json
-pub(crate) fn parse_duration(s: &str) -> Option<Duration> {
- // Check that the string is ASCII, since subsequent parsing steps use byte-level indexing.
- ensure!(s.is_ascii());
-
- let (is_negative, s) = match parse_char(s, b'-') {
- Some(s) => (true, s),
- None => (false, s),
- };
-
- let (digits, s) = parse_digits(s);
- let seconds = digits.parse::<i64>().ok()?;
-
- let (nanos, s) = parse_nanos(s)?;
-
- let s = parse_char(s, b's')?;
- ensure!(s.is_empty());
- ensure!(nanos < crate::NANOS_PER_SECOND as u32);
-
- // If the duration is negative, also flip the nanos sign.
- let (seconds, nanos) = if is_negative {
- (-seconds, -(nanos as i32))
- } else {
- (seconds, nanos as i32)
- };
-
- Some(Duration { seconds, nanos })
-}
-
-impl From<DateTime> for Timestamp {
- fn from(date_time: DateTime) -> Timestamp {
- let seconds = date_time_to_seconds(&date_time);
- let nanos = date_time.nanos;
- Timestamp {
- seconds,
- nanos: nanos as i32,
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use proptest::prelude::*;
-
- #[test]
- fn test_min_max() {
- assert_eq!(
- DateTime::MIN,
- DateTime::from(Timestamp {
- seconds: i64::MIN,
- nanos: 0
- }),
- );
- assert_eq!(
- DateTime::MAX,
- DateTime::from(Timestamp {
- seconds: i64::MAX,
- nanos: 999_999_999
- }),
- );
- }
-
- #[cfg(feature = "std")]
- #[test]
- fn test_datetime_from_timestamp() {
- let case = |expected: &str, secs: i64, nanos: i32| {
- let timestamp = Timestamp {
- seconds: secs,
- nanos,
- };
- assert_eq!(
- expected,
- format!("{}", DateTime::from(timestamp.clone())),
- "timestamp: {:?}",
- timestamp
- );
- };
-
- // Mostly generated with:
- // - date -jur <secs> +"%Y-%m-%dT%H:%M:%S.000000000Z"
- // - http://unixtimestamp.50x.eu/
-
- case("1970-01-01T00:00:00Z", 0, 0);
-
- case("1970-01-01T00:00:00.000000001Z", 0, 1);
- case("1970-01-01T00:00:00.123450Z", 0, 123_450_000);
- case("1970-01-01T00:00:00.050Z", 0, 50_000_000);
- case("1970-01-01T00:00:01.000000001Z", 1, 1);
- case("1970-01-01T00:01:01.000000001Z", 60 + 1, 1);
- case("1970-01-01T01:01:01.000000001Z", 60 * 60 + 60 + 1, 1);
- case(
- "1970-01-02T01:01:01.000000001Z",
- 24 * 60 * 60 + 60 * 60 + 60 + 1,
- 1,
- );
-
- case("1969-12-31T23:59:59Z", -1, 0);
- case("1969-12-31T23:59:59.000001Z", -1, 1_000);
- case("1969-12-31T23:59:59.500Z", -1, 500_000_000);
- case("1969-12-31T23:58:59.000001Z", -60 - 1, 1_000);
- case("1969-12-31T22:58:59.000001Z", -60 * 60 - 60 - 1, 1_000);
- case(
- "1969-12-30T22:58:59.000000001Z",
- -24 * 60 * 60 - 60 * 60 - 60 - 1,
- 1,
- );
-
- case("2038-01-19T03:14:07Z", i32::MAX as i64, 0);
- case("2038-01-19T03:14:08Z", i32::MAX as i64 + 1, 0);
- case("1901-12-13T20:45:52Z", i32::MIN as i64, 0);
- case("1901-12-13T20:45:51Z", i32::MIN as i64 - 1, 0);
-
- // Skipping these tests on windows as std::time::SystemTime range is low
- // on Windows compared with that of Unix which can cause the following
- // high date value tests to panic
- #[cfg(not(target_os = "windows"))]
- {
- case("+292277026596-12-04T15:30:07Z", i64::MAX, 0);
- case("+292277026596-12-04T15:30:06Z", i64::MAX - 1, 0);
- case("-292277022657-01-27T08:29:53Z", i64::MIN + 1, 0);
- }
-
- case("1900-01-01T00:00:00Z", -2_208_988_800, 0);
- case("1899-12-31T23:59:59Z", -2_208_988_801, 0);
- case("0000-01-01T00:00:00Z", -62_167_219_200, 0);
- case("-0001-12-31T23:59:59Z", -62_167_219_201, 0);
-
- case("1234-05-06T07:08:09Z", -23_215_049_511, 0);
- case("-1234-05-06T07:08:09Z", -101_097_651_111, 0);
- case("2345-06-07T08:09:01Z", 11_847_456_541, 0);
- case("-2345-06-07T08:09:01Z", -136_154_620_259, 0);
- }
-
- #[test]
- fn test_parse_timestamp() {
- // RFC 3339 Section 5.8 Examples
- assert_eq!(
- "1985-04-12T23:20:50.52Z".parse::<Timestamp>(),
- Timestamp::date_time_nanos(1985, 4, 12, 23, 20, 50, 520_000_000),
- );
- assert_eq!(
- "1996-12-19T16:39:57-08:00".parse::<Timestamp>(),
- Timestamp::date_time(1996, 12, 20, 0, 39, 57),
- );
- assert_eq!(
- "1996-12-19T16:39:57-08:00".parse::<Timestamp>(),
- Timestamp::date_time(1996, 12, 20, 0, 39, 57),
- );
- assert_eq!(
- "1990-12-31T23:59:60Z".parse::<Timestamp>(),
- Timestamp::date_time(1990, 12, 31, 23, 59, 59),
- );
- assert_eq!(
- "1990-12-31T15:59:60-08:00".parse::<Timestamp>(),
- Timestamp::date_time(1990, 12, 31, 23, 59, 59),
- );
- assert_eq!(
- "1937-01-01T12:00:27.87+00:20".parse::<Timestamp>(),
- Timestamp::date_time_nanos(1937, 1, 1, 11, 40, 27, 870_000_000),
- );
-
- // Date
- assert_eq!(
- "1937-01-01".parse::<Timestamp>(),
- Timestamp::date(1937, 1, 1),
- );
-
- // Negative year
- assert_eq!(
- "-0008-01-01".parse::<Timestamp>(),
- Timestamp::date(-8, 1, 1),
- );
-
- // Plus year
- assert_eq!(
- "+19370-01-01".parse::<Timestamp>(),
- Timestamp::date(19370, 1, 1),
- );
-
- // Full nanos
- assert_eq!(
- "2020-02-03T01:02:03.123456789Z".parse::<Timestamp>(),
- Timestamp::date_time_nanos(2020, 2, 3, 1, 2, 3, 123_456_789),
- );
-
- // Leap day
- assert_eq!(
- "2020-02-29T01:02:03.00Z".parse::<Timestamp>().unwrap(),
- Timestamp::from(DateTime {
- year: 2020,
- month: 2,
- day: 29,
- hour: 1,
- minute: 2,
- second: 3,
- nanos: 0,
- }),
- );
-
- // Test extensions to RFC 3339.
- // ' ' instead of 'T' as date/time separator.
- assert_eq!(
- "1985-04-12 23:20:50.52Z".parse::<Timestamp>(),
- Timestamp::date_time_nanos(1985, 4, 12, 23, 20, 50, 520_000_000),
- );
-
- // No time zone specified.
- assert_eq!(
- "1985-04-12T23:20:50.52".parse::<Timestamp>(),
- Timestamp::date_time_nanos(1985, 4, 12, 23, 20, 50, 520_000_000),
- );
-
- // Offset without minutes specified.
- assert_eq!(
- "1996-12-19T16:39:57-08".parse::<Timestamp>(),
- Timestamp::date_time(1996, 12, 20, 0, 39, 57),
- );
-
- // Snowflake stage style.
- assert_eq!(
- "2015-09-12 00:47:19.591 Z".parse::<Timestamp>(),
- Timestamp::date_time_nanos(2015, 9, 12, 0, 47, 19, 591_000_000),
- );
- assert_eq!(
- "2020-06-15 00:01:02.123 +0800".parse::<Timestamp>(),
- Timestamp::date_time_nanos(2020, 6, 14, 16, 1, 2, 123_000_000),
- );
- }
-
- #[test]
- fn test_parse_duration() {
- let case = |s: &str, seconds: i64, nanos: i32| {
- assert_eq!(
- s.parse::<Duration>().unwrap(),
- Duration { seconds, nanos },
- "duration: {}",
- s
- );
- };
-
- case("0s", 0, 0);
- case("0.0s", 0, 0);
- case("0.000s", 0, 0);
-
- case("-0s", 0, 0);
- case("-0.0s", 0, 0);
- case("-0.000s", 0, 0);
-
- case("-0s", 0, 0);
- case("-0.0s", 0, 0);
- case("-0.000s", 0, 0);
-
- case("0.05s", 0, 50_000_000);
- case("0.050s", 0, 50_000_000);
-
- case("-0.05s", 0, -50_000_000);
- case("-0.050s", 0, -50_000_000);
-
- case("1s", 1, 0);
- case("1.0s", 1, 0);
- case("1.000s", 1, 0);
-
- case("-1s", -1, 0);
- case("-1.0s", -1, 0);
- case("-1.000s", -1, 0);
-
- case("15s", 15, 0);
- case("15.1s", 15, 100_000_000);
- case("15.100s", 15, 100_000_000);
-
- case("-15s", -15, 0);
- case("-15.1s", -15, -100_000_000);
- case("-15.100s", -15, -100_000_000);
-
- case("100.000000009s", 100, 9);
- case("-100.000000009s", -100, -9);
- }
-
- #[test]
- fn test_parse_non_ascii() {
- assert!("2021️⃣-06-15 00:01:02.123 +0800"
- .parse::<Timestamp>()
- .is_err());
-
- assert!("1️⃣s".parse::<Duration>().is_err());
- }
-
- proptest! {
- #[cfg(feature = "std")]
- #[test]
- fn check_timestamp_parse_to_string_roundtrip(
- system_time in std::time::SystemTime::arbitrary(),
- ) {
-
- let ts = Timestamp::from(system_time);
-
- assert_eq!(
- ts,
- ts.to_string().parse::<Timestamp>().unwrap(),
- )
- }
-
- #[cfg(feature = "std")]
- #[test]
- fn check_duration_parse_to_string_roundtrip(
- duration in core::time::Duration::arbitrary(),
- ) {
- let duration = match Duration::try_from(duration) {
- Ok(duration) => duration,
- Err(_) => return Err(TestCaseError::reject("duration out of range")),
- };
-
- prop_assert_eq!(
- &duration,
- &duration.to_string().parse::<Duration>().unwrap(),
- "{}", duration.to_string()
- );
- }
- }
-}
diff --git a/vendor/prost-types-0.12.6/src/duration.rs b/vendor/prost-types-0.12.6/src/duration.rs
deleted file mode 100644
index 60071693..00000000
--- a/vendor/prost-types-0.12.6/src/duration.rs
+++ /dev/null
@@ -1,333 +0,0 @@
-use super::*;
-
-#[cfg(feature = "std")]
-impl std::hash::Hash for Duration {
- fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
- self.seconds.hash(state);
- self.nanos.hash(state);
- }
-}
-
-impl Duration {
- /// Normalizes the duration to a canonical format.
- ///
- /// Based on [`google::protobuf::util::CreateNormalized`][1].
- ///
- /// [1]: https://github.com/google/protobuf/blob/v3.3.2/src/google/protobuf/util/time_util.cc#L79-L100
- pub fn normalize(&mut self) {
- // Make sure nanos is in the range.
- if self.nanos <= -NANOS_PER_SECOND || self.nanos >= NANOS_PER_SECOND {
- if let Some(seconds) = self
- .seconds
- .checked_add((self.nanos / NANOS_PER_SECOND) as i64)
- {
- self.seconds = seconds;
- self.nanos %= NANOS_PER_SECOND;
- } else if self.nanos < 0 {
- // Negative overflow! Set to the least normal value.
- self.seconds = i64::MIN;
- self.nanos = -NANOS_MAX;
- } else {
- // Positive overflow! Set to the greatest normal value.
- self.seconds = i64::MAX;
- self.nanos = NANOS_MAX;
- }
- }
-
- // nanos should have the same sign as seconds.
- if self.seconds < 0 && self.nanos > 0 {
- if let Some(seconds) = self.seconds.checked_add(1) {
- self.seconds = seconds;
- self.nanos -= NANOS_PER_SECOND;
- } else {
- // Positive overflow! Set to the greatest normal value.
- debug_assert_eq!(self.seconds, i64::MAX);
- self.nanos = NANOS_MAX;
- }
- } else if self.seconds > 0 && self.nanos < 0 {
- if let Some(seconds) = self.seconds.checked_sub(1) {
- self.seconds = seconds;
- self.nanos += NANOS_PER_SECOND;
- } else {
- // Negative overflow! Set to the least normal value.
- debug_assert_eq!(self.seconds, i64::MIN);
- self.nanos = -NANOS_MAX;
- }
- }
- // TODO: should this be checked?
- // debug_assert!(self.seconds >= -315_576_000_000 && self.seconds <= 315_576_000_000,
- // "invalid duration: {:?}", self);
- }
-}
-
-impl Name for Duration {
- const PACKAGE: &'static str = PACKAGE;
- const NAME: &'static str = "Duration";
-
- fn type_url() -> String {
- type_url_for::<Self>()
- }
-}
-
-impl TryFrom<time::Duration> for Duration {
- type Error = DurationError;
-
- /// Converts a `std::time::Duration` to a `Duration`, failing if the duration is too large.
- fn try_from(duration: time::Duration) -> Result<Duration, DurationError> {
- let seconds = i64::try_from(duration.as_secs()).map_err(|_| DurationError::OutOfRange)?;
- let nanos = duration.subsec_nanos() as i32;
-
- let mut duration = Duration { seconds, nanos };
- duration.normalize();
- Ok(duration)
- }
-}
-
-impl TryFrom<Duration> for time::Duration {
- type Error = DurationError;
-
- /// Converts a `Duration` to a `std::time::Duration`, failing if the duration is negative.
- fn try_from(mut duration: Duration) -> Result<time::Duration, DurationError> {
- duration.normalize();
- if duration.seconds >= 0 && duration.nanos >= 0 {
- Ok(time::Duration::new(
- duration.seconds as u64,
- duration.nanos as u32,
- ))
- } else {
- Err(DurationError::NegativeDuration(time::Duration::new(
- (-duration.seconds) as u64,
- (-duration.nanos) as u32,
- )))
- }
- }
-}
-
-impl fmt::Display for Duration {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let mut d = self.clone();
- d.normalize();
- if self.seconds < 0 && self.nanos < 0 {
- write!(f, "-")?;
- }
- write!(f, "{}", d.seconds.abs())?;
-
- // Format subseconds to either nothing, millis, micros, or nanos.
- let nanos = d.nanos.abs();
- if nanos == 0 {
- write!(f, "s")
- } else if nanos % 1_000_000 == 0 {
- write!(f, ".{:03}s", nanos / 1_000_000)
- } else if nanos % 1_000 == 0 {
- write!(f, ".{:06}s", nanos / 1_000)
- } else {
- write!(f, ".{:09}s", nanos)
- }
- }
-}
-
-/// A duration handling error.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Debug, PartialEq)]
-#[non_exhaustive]
-pub enum DurationError {
- /// Indicates failure to parse a [`Duration`] from a string.
- ///
- /// The [`Duration`] string format is specified in the [Protobuf JSON mapping specification][1].
- ///
- /// [1]: https://developers.google.com/protocol-buffers/docs/proto3#json
- ParseFailure,
-
- /// Indicates failure to convert a `prost_types::Duration` to a `std::time::Duration` because
- /// the duration is negative. The included `std::time::Duration` matches the magnitude of the
- /// original negative `prost_types::Duration`.
- NegativeDuration(time::Duration),
-
- /// Indicates failure to convert a `std::time::Duration` to a `prost_types::Duration`.
- ///
- /// Converting a `std::time::Duration` to a `prost_types::Duration` fails if the magnitude
- /// exceeds that representable by `prost_types::Duration`.
- OutOfRange,
-}
-
-impl fmt::Display for DurationError {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self {
- DurationError::ParseFailure => write!(f, "failed to parse duration"),
- DurationError::NegativeDuration(duration) => {
- write!(f, "failed to convert negative duration: {:?}", duration)
- }
- DurationError::OutOfRange => {
- write!(f, "failed to convert duration out of range")
- }
- }
- }
-}
-
-#[cfg(feature = "std")]
-impl std::error::Error for DurationError {}
-
-impl FromStr for Duration {
- type Err = DurationError;
-
- fn from_str(s: &str) -> Result<Duration, DurationError> {
- datetime::parse_duration(s).ok_or(DurationError::ParseFailure)
- }
-}
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[cfg(feature = "std")]
- use proptest::prelude::*;
-
- #[cfg(feature = "std")]
- proptest! {
- #[test]
- fn check_duration_roundtrip(
- seconds in u64::arbitrary(),
- nanos in 0u32..1_000_000_000u32,
- ) {
- let std_duration = time::Duration::new(seconds, nanos);
- let prost_duration = match Duration::try_from(std_duration) {
- Ok(duration) => duration,
- Err(_) => return Err(TestCaseError::reject("duration out of range")),
- };
- prop_assert_eq!(time::Duration::try_from(prost_duration.clone()).unwrap(), std_duration);
-
- if std_duration != time::Duration::default() {
- let neg_prost_duration = Duration {
- seconds: -prost_duration.seconds,
- nanos: -prost_duration.nanos,
- };
-
- prop_assert!(
- matches!(
- time::Duration::try_from(neg_prost_duration),
- Err(DurationError::NegativeDuration(d)) if d == std_duration,
- )
- )
- }
- }
-
- #[test]
- fn check_duration_roundtrip_nanos(
- nanos in u32::arbitrary(),
- ) {
- let seconds = 0;
- let std_duration = std::time::Duration::new(seconds, nanos);
- let prost_duration = match Duration::try_from(std_duration) {
- Ok(duration) => duration,
- Err(_) => return Err(TestCaseError::reject("duration out of range")),
- };
- prop_assert_eq!(time::Duration::try_from(prost_duration.clone()).unwrap(), std_duration);
-
- if std_duration != time::Duration::default() {
- let neg_prost_duration = Duration {
- seconds: -prost_duration.seconds,
- nanos: -prost_duration.nanos,
- };
-
- prop_assert!(
- matches!(
- time::Duration::try_from(neg_prost_duration),
- Err(DurationError::NegativeDuration(d)) if d == std_duration,
- )
- )
- }
- }
- }
-
- #[cfg(feature = "std")]
- #[test]
- fn check_duration_try_from_negative_nanos() {
- let seconds: u64 = 0;
- let nanos: u32 = 1;
- let std_duration = std::time::Duration::new(seconds, nanos);
-
- let neg_prost_duration = Duration {
- seconds: 0,
- nanos: -1,
- };
-
- assert!(matches!(
- time::Duration::try_from(neg_prost_duration),
- Err(DurationError::NegativeDuration(d)) if d == std_duration,
- ))
- }
-
- #[test]
- fn check_duration_normalize() {
- #[rustfmt::skip] // Don't mangle the table formatting.
- let cases = [
- // --- Table of test cases ---
- // test seconds test nanos expected seconds expected nanos
- (line!(), 0, 0, 0, 0),
- (line!(), 1, 1, 1, 1),
- (line!(), -1, -1, -1, -1),
- (line!(), 0, 999_999_999, 0, 999_999_999),
- (line!(), 0, -999_999_999, 0, -999_999_999),
- (line!(), 0, 1_000_000_000, 1, 0),
- (line!(), 0, -1_000_000_000, -1, 0),
- (line!(), 0, 1_000_000_001, 1, 1),
- (line!(), 0, -1_000_000_001, -1, -1),
- (line!(), -1, 1, 0, -999_999_999),
- (line!(), 1, -1, 0, 999_999_999),
- (line!(), -1, 1_000_000_000, 0, 0),
- (line!(), 1, -1_000_000_000, 0, 0),
- (line!(), i64::MIN , 0, i64::MIN , 0),
- (line!(), i64::MIN + 1, 0, i64::MIN + 1, 0),
- (line!(), i64::MIN , 1, i64::MIN + 1, -999_999_999),
- (line!(), i64::MIN , 1_000_000_000, i64::MIN + 1, 0),
- (line!(), i64::MIN , -1_000_000_000, i64::MIN , -999_999_999),
- (line!(), i64::MIN + 1, -1_000_000_000, i64::MIN , 0),
- (line!(), i64::MIN + 2, -1_000_000_000, i64::MIN + 1, 0),
- (line!(), i64::MIN , -1_999_999_998, i64::MIN , -999_999_999),
- (line!(), i64::MIN + 1, -1_999_999_998, i64::MIN , -999_999_998),
- (line!(), i64::MIN + 2, -1_999_999_998, i64::MIN + 1, -999_999_998),
- (line!(), i64::MIN , -1_999_999_999, i64::MIN , -999_999_999),
- (line!(), i64::MIN + 1, -1_999_999_999, i64::MIN , -999_999_999),
- (line!(), i64::MIN + 2, -1_999_999_999, i64::MIN + 1, -999_999_999),
- (line!(), i64::MIN , -2_000_000_000, i64::MIN , -999_999_999),
- (line!(), i64::MIN + 1, -2_000_000_000, i64::MIN , -999_999_999),
- (line!(), i64::MIN + 2, -2_000_000_000, i64::MIN , 0),
- (line!(), i64::MIN , -999_999_998, i64::MIN , -999_999_998),
- (line!(), i64::MIN + 1, -999_999_998, i64::MIN + 1, -999_999_998),
- (line!(), i64::MAX , 0, i64::MAX , 0),
- (line!(), i64::MAX - 1, 0, i64::MAX - 1, 0),
- (line!(), i64::MAX , -1, i64::MAX - 1, 999_999_999),
- (line!(), i64::MAX , 1_000_000_000, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 1, 1_000_000_000, i64::MAX , 0),
- (line!(), i64::MAX - 2, 1_000_000_000, i64::MAX - 1, 0),
- (line!(), i64::MAX , 1_999_999_998, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 1, 1_999_999_998, i64::MAX , 999_999_998),
- (line!(), i64::MAX - 2, 1_999_999_998, i64::MAX - 1, 999_999_998),
- (line!(), i64::MAX , 1_999_999_999, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 1, 1_999_999_999, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 2, 1_999_999_999, i64::MAX - 1, 999_999_999),
- (line!(), i64::MAX , 2_000_000_000, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 1, 2_000_000_000, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 2, 2_000_000_000, i64::MAX , 0),
- (line!(), i64::MAX , 999_999_998, i64::MAX , 999_999_998),
- (line!(), i64::MAX - 1, 999_999_998, i64::MAX - 1, 999_999_998),
- ];
-
- for case in cases.iter() {
- let mut test_duration = Duration {
- seconds: case.1,
- nanos: case.2,
- };
- test_duration.normalize();
-
- assert_eq!(
- test_duration,
- Duration {
- seconds: case.3,
- nanos: case.4,
- },
- "test case on line {} doesn't match",
- case.0,
- );
- }
- }
-}
diff --git a/vendor/prost-types-0.12.6/src/lib.rs b/vendor/prost-types-0.12.6/src/lib.rs
deleted file mode 100644
index a2a94d43..00000000
--- a/vendor/prost-types-0.12.6/src/lib.rs
+++ /dev/null
@@ -1,55 +0,0 @@
-#![doc(html_root_url = "https://docs.rs/prost-types/0.12.6")]
-
-//! Protocol Buffers well-known types.
-//!
-//! Note that the documentation for the types defined in this crate are generated from the Protobuf
-//! definitions, so code examples are not in Rust.
-//!
-//! See the [Protobuf reference][1] for more information about well-known types.
-//!
-//! ## Feature Flags
-//! - `std`: Enable integration with standard library. Disable this feature for `no_std` support. This feature is enabled by default.
-//!
-//! [1]: https://developers.google.com/protocol-buffers/docs/reference/google.protobuf
-
-#![cfg_attr(not(feature = "std"), no_std)]
-
-#[rustfmt::skip]
-pub mod compiler;
-mod datetime;
-#[rustfmt::skip]
-mod protobuf;
-
-use core::convert::TryFrom;
-use core::fmt;
-use core::i32;
-use core::i64;
-use core::str::FromStr;
-use core::time;
-
-use prost::alloc::format;
-use prost::alloc::string::String;
-use prost::alloc::vec::Vec;
-use prost::{DecodeError, EncodeError, Message, Name};
-
-pub use protobuf::*;
-
-// The Protobuf `Duration` and `Timestamp` types can't delegate to the standard library equivalents
-// because the Protobuf versions are signed. To make them easier to work with, `From` conversions
-// are defined in both directions.
-
-const NANOS_PER_SECOND: i32 = 1_000_000_000;
-const NANOS_MAX: i32 = NANOS_PER_SECOND - 1;
-
-const PACKAGE: &str = "google.protobuf";
-
-mod any;
-
-mod duration;
-pub use duration::DurationError;
-
-mod timestamp;
-pub use timestamp::TimestampError;
-
-mod type_url;
-pub(crate) use type_url::{type_url_for, TypeUrl};
diff --git a/vendor/prost-types-0.12.6/src/protobuf.rs b/vendor/prost-types-0.12.6/src/protobuf.rs
deleted file mode 100644
index edc1361b..00000000
--- a/vendor/prost-types-0.12.6/src/protobuf.rs
+++ /dev/null
@@ -1,2309 +0,0 @@
-// This file is @generated by prost-build.
-/// The protocol compiler can output a FileDescriptorSet containing the .proto
-/// files it parses.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct FileDescriptorSet {
- #[prost(message, repeated, tag = "1")]
- pub file: ::prost::alloc::vec::Vec<FileDescriptorProto>,
-}
-/// Describes a complete .proto file.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct FileDescriptorProto {
- /// file name, relative to root of source tree
- #[prost(string, optional, tag = "1")]
- pub name: ::core::option::Option<::prost::alloc::string::String>,
- /// e.g. "foo", "foo.bar", etc.
- #[prost(string, optional, tag = "2")]
- pub package: ::core::option::Option<::prost::alloc::string::String>,
- /// Names of files imported by this file.
- #[prost(string, repeated, tag = "3")]
- pub dependency: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
- /// Indexes of the public imported files in the dependency list above.
- #[prost(int32, repeated, packed = "false", tag = "10")]
- pub public_dependency: ::prost::alloc::vec::Vec<i32>,
- /// Indexes of the weak imported files in the dependency list.
- /// For Google-internal migration only. Do not use.
- #[prost(int32, repeated, packed = "false", tag = "11")]
- pub weak_dependency: ::prost::alloc::vec::Vec<i32>,
- /// All top-level definitions in this file.
- #[prost(message, repeated, tag = "4")]
- pub message_type: ::prost::alloc::vec::Vec<DescriptorProto>,
- #[prost(message, repeated, tag = "5")]
- pub enum_type: ::prost::alloc::vec::Vec<EnumDescriptorProto>,
- #[prost(message, repeated, tag = "6")]
- pub service: ::prost::alloc::vec::Vec<ServiceDescriptorProto>,
- #[prost(message, repeated, tag = "7")]
- pub extension: ::prost::alloc::vec::Vec<FieldDescriptorProto>,
- #[prost(message, optional, tag = "8")]
- pub options: ::core::option::Option<FileOptions>,
- /// This field contains optional information about the original source code.
- /// You may safely remove this entire field without harming runtime
- /// functionality of the descriptors -- the information is needed only by
- /// development tools.
- #[prost(message, optional, tag = "9")]
- pub source_code_info: ::core::option::Option<SourceCodeInfo>,
- /// The syntax of the proto file.
- /// The supported values are "proto2" and "proto3".
- #[prost(string, optional, tag = "12")]
- pub syntax: ::core::option::Option<::prost::alloc::string::String>,
-}
-/// Describes a message type.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct DescriptorProto {
- #[prost(string, optional, tag = "1")]
- pub name: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(message, repeated, tag = "2")]
- pub field: ::prost::alloc::vec::Vec<FieldDescriptorProto>,
- #[prost(message, repeated, tag = "6")]
- pub extension: ::prost::alloc::vec::Vec<FieldDescriptorProto>,
- #[prost(message, repeated, tag = "3")]
- pub nested_type: ::prost::alloc::vec::Vec<DescriptorProto>,
- #[prost(message, repeated, tag = "4")]
- pub enum_type: ::prost::alloc::vec::Vec<EnumDescriptorProto>,
- #[prost(message, repeated, tag = "5")]
- pub extension_range: ::prost::alloc::vec::Vec<descriptor_proto::ExtensionRange>,
- #[prost(message, repeated, tag = "8")]
- pub oneof_decl: ::prost::alloc::vec::Vec<OneofDescriptorProto>,
- #[prost(message, optional, tag = "7")]
- pub options: ::core::option::Option<MessageOptions>,
- #[prost(message, repeated, tag = "9")]
- pub reserved_range: ::prost::alloc::vec::Vec<descriptor_proto::ReservedRange>,
- /// Reserved field names, which may not be used by fields in the same message.
- /// A given name may only be reserved once.
- #[prost(string, repeated, tag = "10")]
- pub reserved_name: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
-}
-/// Nested message and enum types in `DescriptorProto`.
-pub mod descriptor_proto {
- #[allow(clippy::derive_partial_eq_without_eq)]
- #[derive(Clone, PartialEq, ::prost::Message)]
- pub struct ExtensionRange {
- /// Inclusive.
- #[prost(int32, optional, tag = "1")]
- pub start: ::core::option::Option<i32>,
- /// Exclusive.
- #[prost(int32, optional, tag = "2")]
- pub end: ::core::option::Option<i32>,
- #[prost(message, optional, tag = "3")]
- pub options: ::core::option::Option<super::ExtensionRangeOptions>,
- }
- /// Range of reserved tag numbers. Reserved tag numbers may not be used by
- /// fields or extension ranges in the same message. Reserved ranges may
- /// not overlap.
- #[allow(clippy::derive_partial_eq_without_eq)]
- #[derive(Clone, PartialEq, ::prost::Message)]
- pub struct ReservedRange {
- /// Inclusive.
- #[prost(int32, optional, tag = "1")]
- pub start: ::core::option::Option<i32>,
- /// Exclusive.
- #[prost(int32, optional, tag = "2")]
- pub end: ::core::option::Option<i32>,
- }
-}
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ExtensionRangeOptions {
- /// The parser stores options it doesn't recognize here. See above.
- #[prost(message, repeated, tag = "999")]
- pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
-}
-/// Describes a field within a message.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct FieldDescriptorProto {
- #[prost(string, optional, tag = "1")]
- pub name: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(int32, optional, tag = "3")]
- pub number: ::core::option::Option<i32>,
- #[prost(enumeration = "field_descriptor_proto::Label", optional, tag = "4")]
- pub label: ::core::option::Option<i32>,
- /// If type_name is set, this need not be set. If both this and type_name
- /// are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP.
- #[prost(enumeration = "field_descriptor_proto::Type", optional, tag = "5")]
- pub r#type: ::core::option::Option<i32>,
- /// For message and enum types, this is the name of the type. If the name
- /// starts with a '.', it is fully-qualified. Otherwise, C++-like scoping
- /// rules are used to find the type (i.e. first the nested types within this
- /// message are searched, then within the parent, on up to the root
- /// namespace).
- #[prost(string, optional, tag = "6")]
- pub type_name: ::core::option::Option<::prost::alloc::string::String>,
- /// For extensions, this is the name of the type being extended. It is
- /// resolved in the same manner as type_name.
- #[prost(string, optional, tag = "2")]
- pub extendee: ::core::option::Option<::prost::alloc::string::String>,
- /// For numeric types, contains the original text representation of the value.
- /// For booleans, "true" or "false".
- /// For strings, contains the default text contents (not escaped in any way).
- /// For bytes, contains the C escaped value. All bytes >= 128 are escaped.
- /// TODO(kenton): Base-64 encode?
- #[prost(string, optional, tag = "7")]
- pub default_value: ::core::option::Option<::prost::alloc::string::String>,
- /// If set, gives the index of a oneof in the containing type's oneof_decl
- /// list. This field is a member of that oneof.
- #[prost(int32, optional, tag = "9")]
- pub oneof_index: ::core::option::Option<i32>,
- /// JSON name of this field. The value is set by protocol compiler. If the
- /// user has set a "json_name" option on this field, that option's value
- /// will be used. Otherwise, it's deduced from the field's name by converting
- /// it to camelCase.
- #[prost(string, optional, tag = "10")]
- pub json_name: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(message, optional, tag = "8")]
- pub options: ::core::option::Option<FieldOptions>,
- /// If true, this is a proto3 "optional". When a proto3 field is optional, it
- /// tracks presence regardless of field type.
- ///
- /// When proto3_optional is true, this field must be belong to a oneof to
- /// signal to old proto3 clients that presence is tracked for this field. This
- /// oneof is known as a "synthetic" oneof, and this field must be its sole
- /// member (each proto3 optional field gets its own synthetic oneof). Synthetic
- /// oneofs exist in the descriptor only, and do not generate any API. Synthetic
- /// oneofs must be ordered after all "real" oneofs.
- ///
- /// For message fields, proto3_optional doesn't create any semantic change,
- /// since non-repeated message fields always track presence. However it still
- /// indicates the semantic detail of whether the user wrote "optional" or not.
- /// This can be useful for round-tripping the .proto file. For consistency we
- /// give message fields a synthetic oneof also, even though it is not required
- /// to track presence. This is especially important because the parser can't
- /// tell if a field is a message or an enum, so it must always create a
- /// synthetic oneof.
- ///
- /// Proto2 optional fields do not set this flag, because they already indicate
- /// optional with `LABEL_OPTIONAL`.
- #[prost(bool, optional, tag = "17")]
- pub proto3_optional: ::core::option::Option<bool>,
-}
-/// Nested message and enum types in `FieldDescriptorProto`.
-pub mod field_descriptor_proto {
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum Type {
- /// 0 is reserved for errors.
- /// Order is weird for historical reasons.
- Double = 1,
- Float = 2,
- /// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if
- /// negative values are likely.
- Int64 = 3,
- Uint64 = 4,
- /// Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if
- /// negative values are likely.
- Int32 = 5,
- Fixed64 = 6,
- Fixed32 = 7,
- Bool = 8,
- String = 9,
- /// Tag-delimited aggregate.
- /// Group type is deprecated and not supported in proto3. However, Proto3
- /// implementations should still be able to parse the group wire format and
- /// treat group fields as unknown fields.
- Group = 10,
- /// Length-delimited aggregate.
- Message = 11,
- /// New in version 2.
- Bytes = 12,
- Uint32 = 13,
- Enum = 14,
- Sfixed32 = 15,
- Sfixed64 = 16,
- /// Uses ZigZag encoding.
- Sint32 = 17,
- /// Uses ZigZag encoding.
- Sint64 = 18,
- }
- impl Type {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- Type::Double => "TYPE_DOUBLE",
- Type::Float => "TYPE_FLOAT",
- Type::Int64 => "TYPE_INT64",
- Type::Uint64 => "TYPE_UINT64",
- Type::Int32 => "TYPE_INT32",
- Type::Fixed64 => "TYPE_FIXED64",
- Type::Fixed32 => "TYPE_FIXED32",
- Type::Bool => "TYPE_BOOL",
- Type::String => "TYPE_STRING",
- Type::Group => "TYPE_GROUP",
- Type::Message => "TYPE_MESSAGE",
- Type::Bytes => "TYPE_BYTES",
- Type::Uint32 => "TYPE_UINT32",
- Type::Enum => "TYPE_ENUM",
- Type::Sfixed32 => "TYPE_SFIXED32",
- Type::Sfixed64 => "TYPE_SFIXED64",
- Type::Sint32 => "TYPE_SINT32",
- Type::Sint64 => "TYPE_SINT64",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "TYPE_DOUBLE" => Some(Self::Double),
- "TYPE_FLOAT" => Some(Self::Float),
- "TYPE_INT64" => Some(Self::Int64),
- "TYPE_UINT64" => Some(Self::Uint64),
- "TYPE_INT32" => Some(Self::Int32),
- "TYPE_FIXED64" => Some(Self::Fixed64),
- "TYPE_FIXED32" => Some(Self::Fixed32),
- "TYPE_BOOL" => Some(Self::Bool),
- "TYPE_STRING" => Some(Self::String),
- "TYPE_GROUP" => Some(Self::Group),
- "TYPE_MESSAGE" => Some(Self::Message),
- "TYPE_BYTES" => Some(Self::Bytes),
- "TYPE_UINT32" => Some(Self::Uint32),
- "TYPE_ENUM" => Some(Self::Enum),
- "TYPE_SFIXED32" => Some(Self::Sfixed32),
- "TYPE_SFIXED64" => Some(Self::Sfixed64),
- "TYPE_SINT32" => Some(Self::Sint32),
- "TYPE_SINT64" => Some(Self::Sint64),
- _ => None,
- }
- }
- }
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum Label {
- /// 0 is reserved for errors
- Optional = 1,
- Required = 2,
- Repeated = 3,
- }
- impl Label {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- Label::Optional => "LABEL_OPTIONAL",
- Label::Required => "LABEL_REQUIRED",
- Label::Repeated => "LABEL_REPEATED",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "LABEL_OPTIONAL" => Some(Self::Optional),
- "LABEL_REQUIRED" => Some(Self::Required),
- "LABEL_REPEATED" => Some(Self::Repeated),
- _ => None,
- }
- }
- }
-}
-/// Describes a oneof.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct OneofDescriptorProto {
- #[prost(string, optional, tag = "1")]
- pub name: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(message, optional, tag = "2")]
- pub options: ::core::option::Option<OneofOptions>,
-}
-/// Describes an enum type.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct EnumDescriptorProto {
- #[prost(string, optional, tag = "1")]
- pub name: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(message, repeated, tag = "2")]
- pub value: ::prost::alloc::vec::Vec<EnumValueDescriptorProto>,
- #[prost(message, optional, tag = "3")]
- pub options: ::core::option::Option<EnumOptions>,
- /// Range of reserved numeric values. Reserved numeric values may not be used
- /// by enum values in the same enum declaration. Reserved ranges may not
- /// overlap.
- #[prost(message, repeated, tag = "4")]
- pub reserved_range: ::prost::alloc::vec::Vec<
- enum_descriptor_proto::EnumReservedRange,
- >,
- /// Reserved enum value names, which may not be reused. A given name may only
- /// be reserved once.
- #[prost(string, repeated, tag = "5")]
- pub reserved_name: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
-}
-/// Nested message and enum types in `EnumDescriptorProto`.
-pub mod enum_descriptor_proto {
- /// Range of reserved numeric values. Reserved values may not be used by
- /// entries in the same enum. Reserved ranges may not overlap.
- ///
- /// Note that this is distinct from DescriptorProto.ReservedRange in that it
- /// is inclusive such that it can appropriately represent the entire int32
- /// domain.
- #[allow(clippy::derive_partial_eq_without_eq)]
- #[derive(Clone, PartialEq, ::prost::Message)]
- pub struct EnumReservedRange {
- /// Inclusive.
- #[prost(int32, optional, tag = "1")]
- pub start: ::core::option::Option<i32>,
- /// Inclusive.
- #[prost(int32, optional, tag = "2")]
- pub end: ::core::option::Option<i32>,
- }
-}
-/// Describes a value within an enum.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct EnumValueDescriptorProto {
- #[prost(string, optional, tag = "1")]
- pub name: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(int32, optional, tag = "2")]
- pub number: ::core::option::Option<i32>,
- #[prost(message, optional, tag = "3")]
- pub options: ::core::option::Option<EnumValueOptions>,
-}
-/// Describes a service.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ServiceDescriptorProto {
- #[prost(string, optional, tag = "1")]
- pub name: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(message, repeated, tag = "2")]
- pub method: ::prost::alloc::vec::Vec<MethodDescriptorProto>,
- #[prost(message, optional, tag = "3")]
- pub options: ::core::option::Option<ServiceOptions>,
-}
-/// Describes a method of a service.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct MethodDescriptorProto {
- #[prost(string, optional, tag = "1")]
- pub name: ::core::option::Option<::prost::alloc::string::String>,
- /// Input and output type names. These are resolved in the same way as
- /// FieldDescriptorProto.type_name, but must refer to a message type.
- #[prost(string, optional, tag = "2")]
- pub input_type: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(string, optional, tag = "3")]
- pub output_type: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(message, optional, tag = "4")]
- pub options: ::core::option::Option<MethodOptions>,
- /// Identifies if client streams multiple client messages
- #[prost(bool, optional, tag = "5", default = "false")]
- pub client_streaming: ::core::option::Option<bool>,
- /// Identifies if server streams multiple server messages
- #[prost(bool, optional, tag = "6", default = "false")]
- pub server_streaming: ::core::option::Option<bool>,
-}
-/// Each of the definitions above may have "options" attached. These are
-/// just annotations which may cause code to be generated slightly differently
-/// or may contain hints for code that manipulates protocol messages.
-///
-/// Clients may define custom options as extensions of the \*Options messages.
-/// These extensions may not yet be known at parsing time, so the parser cannot
-/// store the values in them. Instead it stores them in a field in the \*Options
-/// message called uninterpreted_option. This field must have the same name
-/// across all \*Options messages. We then use this field to populate the
-/// extensions when we build a descriptor, at which point all protos have been
-/// parsed and so all extensions are known.
-///
-/// Extension numbers for custom options may be chosen as follows:
-///
-/// * For options which will only be used within a single application or
-/// organization, or for experimental options, use field numbers 50000
-/// through 99999. It is up to you to ensure that you do not use the
-/// same number for multiple options.
-/// * For options which will be published and used publicly by multiple
-/// independent entities, e-mail protobuf-global-extension-registry@google.com
-/// to reserve extension numbers. Simply provide your project name (e.g.
-/// Objective-C plugin) and your project website (if available) -- there's no
-/// need to explain how you intend to use them. Usually you only need one
-/// extension number. You can declare multiple options with only one extension
-/// number by putting them in a sub-message. See the Custom Options section of
-/// the docs for examples:
-/// <https://developers.google.com/protocol-buffers/docs/proto#options>
-/// If this turns out to be popular, a web service will be set up
-/// to automatically assign option numbers.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct FileOptions {
- /// Sets the Java package where classes generated from this .proto will be
- /// placed. By default, the proto package is used, but this is often
- /// inappropriate because proto packages do not normally start with backwards
- /// domain names.
- #[prost(string, optional, tag = "1")]
- pub java_package: ::core::option::Option<::prost::alloc::string::String>,
- /// Controls the name of the wrapper Java class generated for the .proto file.
- /// That class will always contain the .proto file's getDescriptor() method as
- /// well as any top-level extensions defined in the .proto file.
- /// If java_multiple_files is disabled, then all the other classes from the
- /// .proto file will be nested inside the single wrapper outer class.
- #[prost(string, optional, tag = "8")]
- pub java_outer_classname: ::core::option::Option<::prost::alloc::string::String>,
- /// If enabled, then the Java code generator will generate a separate .java
- /// file for each top-level message, enum, and service defined in the .proto
- /// file. Thus, these types will *not* be nested inside the wrapper class
- /// named by java_outer_classname. However, the wrapper class will still be
- /// generated to contain the file's getDescriptor() method as well as any
- /// top-level extensions defined in the file.
- #[prost(bool, optional, tag = "10", default = "false")]
- pub java_multiple_files: ::core::option::Option<bool>,
- /// This option does nothing.
- #[deprecated]
- #[prost(bool, optional, tag = "20")]
- pub java_generate_equals_and_hash: ::core::option::Option<bool>,
- /// If set true, then the Java2 code generator will generate code that
- /// throws an exception whenever an attempt is made to assign a non-UTF-8
- /// byte sequence to a string field.
- /// Message reflection will do the same.
- /// However, an extension field still accepts non-UTF-8 byte sequences.
- /// This option has no effect on when used with the lite runtime.
- #[prost(bool, optional, tag = "27", default = "false")]
- pub java_string_check_utf8: ::core::option::Option<bool>,
- #[prost(
- enumeration = "file_options::OptimizeMode",
- optional,
- tag = "9",
- default = "Speed"
- )]
- pub optimize_for: ::core::option::Option<i32>,
- /// Sets the Go package where structs generated from this .proto will be
- /// placed. If omitted, the Go package will be derived from the following:
- ///
- /// * The basename of the package import path, if provided.
- /// * Otherwise, the package statement in the .proto file, if present.
- /// * Otherwise, the basename of the .proto file, without extension.
- #[prost(string, optional, tag = "11")]
- pub go_package: ::core::option::Option<::prost::alloc::string::String>,
- /// Should generic services be generated in each language? "Generic" services
- /// are not specific to any particular RPC system. They are generated by the
- /// main code generators in each language (without additional plugins).
- /// Generic services were the only kind of service generation supported by
- /// early versions of google.protobuf.
- ///
- /// Generic services are now considered deprecated in favor of using plugins
- /// that generate code specific to your particular RPC system. Therefore,
- /// these default to false. Old code which depends on generic services should
- /// explicitly set them to true.
- #[prost(bool, optional, tag = "16", default = "false")]
- pub cc_generic_services: ::core::option::Option<bool>,
- #[prost(bool, optional, tag = "17", default = "false")]
- pub java_generic_services: ::core::option::Option<bool>,
- #[prost(bool, optional, tag = "18", default = "false")]
- pub py_generic_services: ::core::option::Option<bool>,
- #[prost(bool, optional, tag = "42", default = "false")]
- pub php_generic_services: ::core::option::Option<bool>,
- /// Is this file deprecated?
- /// Depending on the target platform, this can emit Deprecated annotations
- /// for everything in the file, or it will be completely ignored; in the very
- /// least, this is a formalization for deprecating files.
- #[prost(bool, optional, tag = "23", default = "false")]
- pub deprecated: ::core::option::Option<bool>,
- /// Enables the use of arenas for the proto messages in this file. This applies
- /// only to generated classes for C++.
- #[prost(bool, optional, tag = "31", default = "true")]
- pub cc_enable_arenas: ::core::option::Option<bool>,
- /// Sets the objective c class prefix which is prepended to all objective c
- /// generated classes from this .proto. There is no default.
- #[prost(string, optional, tag = "36")]
- pub objc_class_prefix: ::core::option::Option<::prost::alloc::string::String>,
- /// Namespace for generated classes; defaults to the package.
- #[prost(string, optional, tag = "37")]
- pub csharp_namespace: ::core::option::Option<::prost::alloc::string::String>,
- /// By default Swift generators will take the proto package and CamelCase it
- /// replacing '.' with underscore and use that to prefix the types/symbols
- /// defined. When this options is provided, they will use this value instead
- /// to prefix the types/symbols defined.
- #[prost(string, optional, tag = "39")]
- pub swift_prefix: ::core::option::Option<::prost::alloc::string::String>,
- /// Sets the php class prefix which is prepended to all php generated classes
- /// from this .proto. Default is empty.
- #[prost(string, optional, tag = "40")]
- pub php_class_prefix: ::core::option::Option<::prost::alloc::string::String>,
- /// Use this option to change the namespace of php generated classes. Default
- /// is empty. When this option is empty, the package name will be used for
- /// determining the namespace.
- #[prost(string, optional, tag = "41")]
- pub php_namespace: ::core::option::Option<::prost::alloc::string::String>,
- /// Use this option to change the namespace of php generated metadata classes.
- /// Default is empty. When this option is empty, the proto file name will be
- /// used for determining the namespace.
- #[prost(string, optional, tag = "44")]
- pub php_metadata_namespace: ::core::option::Option<::prost::alloc::string::String>,
- /// Use this option to change the package of ruby generated classes. Default
- /// is empty. When this option is not set, the package name will be used for
- /// determining the ruby package.
- #[prost(string, optional, tag = "45")]
- pub ruby_package: ::core::option::Option<::prost::alloc::string::String>,
- /// The parser stores options it doesn't recognize here.
- /// See the documentation for the "Options" section above.
- #[prost(message, repeated, tag = "999")]
- pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
-}
-/// Nested message and enum types in `FileOptions`.
-pub mod file_options {
- /// Generated classes can be optimized for speed or code size.
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum OptimizeMode {
- /// Generate complete code for parsing, serialization,
- Speed = 1,
- /// etc.
- ///
- /// Use ReflectionOps to implement these methods.
- CodeSize = 2,
- /// Generate code using MessageLite and the lite runtime.
- LiteRuntime = 3,
- }
- impl OptimizeMode {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- OptimizeMode::Speed => "SPEED",
- OptimizeMode::CodeSize => "CODE_SIZE",
- OptimizeMode::LiteRuntime => "LITE_RUNTIME",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "SPEED" => Some(Self::Speed),
- "CODE_SIZE" => Some(Self::CodeSize),
- "LITE_RUNTIME" => Some(Self::LiteRuntime),
- _ => None,
- }
- }
- }
-}
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct MessageOptions {
- /// Set true to use the old proto1 MessageSet wire format for extensions.
- /// This is provided for backwards-compatibility with the MessageSet wire
- /// format. You should not use this for any other reason: It's less
- /// efficient, has fewer features, and is more complicated.
- ///
- /// The message must be defined exactly as follows:
- /// message Foo {
- /// option message_set_wire_format = true;
- /// extensions 4 to max;
- /// }
- /// Note that the message cannot have any defined fields; MessageSets only
- /// have extensions.
- ///
- /// All extensions of your type must be singular messages; e.g. they cannot
- /// be int32s, enums, or repeated messages.
- ///
- /// Because this is an option, the above two restrictions are not enforced by
- /// the protocol compiler.
- #[prost(bool, optional, tag = "1", default = "false")]
- pub message_set_wire_format: ::core::option::Option<bool>,
- /// Disables the generation of the standard "descriptor()" accessor, which can
- /// conflict with a field of the same name. This is meant to make migration
- /// from proto1 easier; new code should avoid fields named "descriptor".
- #[prost(bool, optional, tag = "2", default = "false")]
- pub no_standard_descriptor_accessor: ::core::option::Option<bool>,
- /// Is this message deprecated?
- /// Depending on the target platform, this can emit Deprecated annotations
- /// for the message, or it will be completely ignored; in the very least,
- /// this is a formalization for deprecating messages.
- #[prost(bool, optional, tag = "3", default = "false")]
- pub deprecated: ::core::option::Option<bool>,
- /// Whether the message is an automatically generated map entry type for the
- /// maps field.
- ///
- /// For maps fields:
- /// map\<KeyType, ValueType> map_field = 1;
- /// The parsed descriptor looks like:
- /// message MapFieldEntry {
- /// option map_entry = true;
- /// optional KeyType key = 1;
- /// optional ValueType value = 2;
- /// }
- /// repeated MapFieldEntry map_field = 1;
- ///
- /// Implementations may choose not to generate the map_entry=true message, but
- /// use a native map in the target language to hold the keys and values.
- /// The reflection APIs in such implementations still need to work as
- /// if the field is a repeated message field.
- ///
- /// NOTE: Do not set the option in .proto files. Always use the maps syntax
- /// instead. The option should only be implicitly set by the proto compiler
- /// parser.
- #[prost(bool, optional, tag = "7")]
- pub map_entry: ::core::option::Option<bool>,
- /// The parser stores options it doesn't recognize here. See above.
- #[prost(message, repeated, tag = "999")]
- pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
-}
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct FieldOptions {
- /// The ctype option instructs the C++ code generator to use a different
- /// representation of the field than it normally would. See the specific
- /// options below. This option is not yet implemented in the open source
- /// release -- sorry, we'll try to include it in a future version!
- #[prost(
- enumeration = "field_options::CType",
- optional,
- tag = "1",
- default = "String"
- )]
- pub ctype: ::core::option::Option<i32>,
- /// The packed option can be enabled for repeated primitive fields to enable
- /// a more efficient representation on the wire. Rather than repeatedly
- /// writing the tag and type for each element, the entire array is encoded as
- /// a single length-delimited blob. In proto3, only explicit setting it to
- /// false will avoid using packed encoding.
- #[prost(bool, optional, tag = "2")]
- pub packed: ::core::option::Option<bool>,
- /// The jstype option determines the JavaScript type used for values of the
- /// field. The option is permitted only for 64 bit integral and fixed types
- /// (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING
- /// is represented as JavaScript string, which avoids loss of precision that
- /// can happen when a large value is converted to a floating point JavaScript.
- /// Specifying JS_NUMBER for the jstype causes the generated JavaScript code to
- /// use the JavaScript "number" type. The behavior of the default option
- /// JS_NORMAL is implementation dependent.
- ///
- /// This option is an enum to permit additional types to be added, e.g.
- /// goog.math.Integer.
- #[prost(
- enumeration = "field_options::JsType",
- optional,
- tag = "6",
- default = "JsNormal"
- )]
- pub jstype: ::core::option::Option<i32>,
- /// Should this field be parsed lazily? Lazy applies only to message-type
- /// fields. It means that when the outer message is initially parsed, the
- /// inner message's contents will not be parsed but instead stored in encoded
- /// form. The inner message will actually be parsed when it is first accessed.
- ///
- /// This is only a hint. Implementations are free to choose whether to use
- /// eager or lazy parsing regardless of the value of this option. However,
- /// setting this option true suggests that the protocol author believes that
- /// using lazy parsing on this field is worth the additional bookkeeping
- /// overhead typically needed to implement it.
- ///
- /// This option does not affect the public interface of any generated code;
- /// all method signatures remain the same. Furthermore, thread-safety of the
- /// interface is not affected by this option; const methods remain safe to
- /// call from multiple threads concurrently, while non-const methods continue
- /// to require exclusive access.
- ///
- /// Note that implementations may choose not to check required fields within
- /// a lazy sub-message. That is, calling IsInitialized() on the outer message
- /// may return true even if the inner message has missing required fields.
- /// This is necessary because otherwise the inner message would have to be
- /// parsed in order to perform the check, defeating the purpose of lazy
- /// parsing. An implementation which chooses not to check required fields
- /// must be consistent about it. That is, for any particular sub-message, the
- /// implementation must either *always* check its required fields, or *never*
- /// check its required fields, regardless of whether or not the message has
- /// been parsed.
- #[prost(bool, optional, tag = "5", default = "false")]
- pub lazy: ::core::option::Option<bool>,
- /// Is this field deprecated?
- /// Depending on the target platform, this can emit Deprecated annotations
- /// for accessors, or it will be completely ignored; in the very least, this
- /// is a formalization for deprecating fields.
- #[prost(bool, optional, tag = "3", default = "false")]
- pub deprecated: ::core::option::Option<bool>,
- /// For Google-internal migration only. Do not use.
- #[prost(bool, optional, tag = "10", default = "false")]
- pub weak: ::core::option::Option<bool>,
- /// The parser stores options it doesn't recognize here. See above.
- #[prost(message, repeated, tag = "999")]
- pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
-}
-/// Nested message and enum types in `FieldOptions`.
-pub mod field_options {
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum CType {
- /// Default mode.
- String = 0,
- Cord = 1,
- StringPiece = 2,
- }
- impl CType {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- CType::String => "STRING",
- CType::Cord => "CORD",
- CType::StringPiece => "STRING_PIECE",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "STRING" => Some(Self::String),
- "CORD" => Some(Self::Cord),
- "STRING_PIECE" => Some(Self::StringPiece),
- _ => None,
- }
- }
- }
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum JsType {
- /// Use the default type.
- JsNormal = 0,
- /// Use JavaScript strings.
- JsString = 1,
- /// Use JavaScript numbers.
- JsNumber = 2,
- }
- impl JsType {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- JsType::JsNormal => "JS_NORMAL",
- JsType::JsString => "JS_STRING",
- JsType::JsNumber => "JS_NUMBER",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "JS_NORMAL" => Some(Self::JsNormal),
- "JS_STRING" => Some(Self::JsString),
- "JS_NUMBER" => Some(Self::JsNumber),
- _ => None,
- }
- }
- }
-}
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct OneofOptions {
- /// The parser stores options it doesn't recognize here. See above.
- #[prost(message, repeated, tag = "999")]
- pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
-}
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct EnumOptions {
- /// Set this option to true to allow mapping different tag names to the same
- /// value.
- #[prost(bool, optional, tag = "2")]
- pub allow_alias: ::core::option::Option<bool>,
- /// Is this enum deprecated?
- /// Depending on the target platform, this can emit Deprecated annotations
- /// for the enum, or it will be completely ignored; in the very least, this
- /// is a formalization for deprecating enums.
- #[prost(bool, optional, tag = "3", default = "false")]
- pub deprecated: ::core::option::Option<bool>,
- /// The parser stores options it doesn't recognize here. See above.
- #[prost(message, repeated, tag = "999")]
- pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
-}
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct EnumValueOptions {
- /// Is this enum value deprecated?
- /// Depending on the target platform, this can emit Deprecated annotations
- /// for the enum value, or it will be completely ignored; in the very least,
- /// this is a formalization for deprecating enum values.
- #[prost(bool, optional, tag = "1", default = "false")]
- pub deprecated: ::core::option::Option<bool>,
- /// The parser stores options it doesn't recognize here. See above.
- #[prost(message, repeated, tag = "999")]
- pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
-}
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ServiceOptions {
- /// Is this service deprecated?
- /// Depending on the target platform, this can emit Deprecated annotations
- /// for the service, or it will be completely ignored; in the very least,
- /// this is a formalization for deprecating services.
- #[prost(bool, optional, tag = "33", default = "false")]
- pub deprecated: ::core::option::Option<bool>,
- /// The parser stores options it doesn't recognize here. See above.
- #[prost(message, repeated, tag = "999")]
- pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
-}
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct MethodOptions {
- /// Is this method deprecated?
- /// Depending on the target platform, this can emit Deprecated annotations
- /// for the method, or it will be completely ignored; in the very least,
- /// this is a formalization for deprecating methods.
- #[prost(bool, optional, tag = "33", default = "false")]
- pub deprecated: ::core::option::Option<bool>,
- #[prost(
- enumeration = "method_options::IdempotencyLevel",
- optional,
- tag = "34",
- default = "IdempotencyUnknown"
- )]
- pub idempotency_level: ::core::option::Option<i32>,
- /// The parser stores options it doesn't recognize here. See above.
- #[prost(message, repeated, tag = "999")]
- pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
-}
-/// Nested message and enum types in `MethodOptions`.
-pub mod method_options {
- /// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- /// or neither? HTTP based RPC implementation may choose GET verb for safe
- /// methods, and PUT verb for idempotent methods instead of the default POST.
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum IdempotencyLevel {
- IdempotencyUnknown = 0,
- /// implies idempotent
- NoSideEffects = 1,
- /// idempotent, but may have side effects
- Idempotent = 2,
- }
- impl IdempotencyLevel {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- IdempotencyLevel::IdempotencyUnknown => "IDEMPOTENCY_UNKNOWN",
- IdempotencyLevel::NoSideEffects => "NO_SIDE_EFFECTS",
- IdempotencyLevel::Idempotent => "IDEMPOTENT",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "IDEMPOTENCY_UNKNOWN" => Some(Self::IdempotencyUnknown),
- "NO_SIDE_EFFECTS" => Some(Self::NoSideEffects),
- "IDEMPOTENT" => Some(Self::Idempotent),
- _ => None,
- }
- }
- }
-}
-/// A message representing a option the parser does not recognize. This only
-/// appears in options protos created by the compiler::Parser class.
-/// DescriptorPool resolves these when building Descriptor objects. Therefore,
-/// options protos in descriptor objects (e.g. returned by Descriptor::options(),
-/// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
-/// in them.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct UninterpretedOption {
- #[prost(message, repeated, tag = "2")]
- pub name: ::prost::alloc::vec::Vec<uninterpreted_option::NamePart>,
- /// The value of the uninterpreted option, in whatever type the tokenizer
- /// identified it as during parsing. Exactly one of these should be set.
- #[prost(string, optional, tag = "3")]
- pub identifier_value: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(uint64, optional, tag = "4")]
- pub positive_int_value: ::core::option::Option<u64>,
- #[prost(int64, optional, tag = "5")]
- pub negative_int_value: ::core::option::Option<i64>,
- #[prost(double, optional, tag = "6")]
- pub double_value: ::core::option::Option<f64>,
- #[prost(bytes = "vec", optional, tag = "7")]
- pub string_value: ::core::option::Option<::prost::alloc::vec::Vec<u8>>,
- #[prost(string, optional, tag = "8")]
- pub aggregate_value: ::core::option::Option<::prost::alloc::string::String>,
-}
-/// Nested message and enum types in `UninterpretedOption`.
-pub mod uninterpreted_option {
- /// The name of the uninterpreted option. Each string represents a segment in
- /// a dot-separated name. is_extension is true iff a segment represents an
- /// extension (denoted with parentheses in options specs in .proto files).
- /// E.g.,{ \["foo", false\], \["bar.baz", true\], \["qux", false\] } represents
- /// "foo.(bar.baz).qux".
- #[allow(clippy::derive_partial_eq_without_eq)]
- #[derive(Clone, PartialEq, ::prost::Message)]
- pub struct NamePart {
- #[prost(string, required, tag = "1")]
- pub name_part: ::prost::alloc::string::String,
- #[prost(bool, required, tag = "2")]
- pub is_extension: bool,
- }
-}
-/// Encapsulates information about the original source file from which a
-/// FileDescriptorProto was generated.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct SourceCodeInfo {
- /// A Location identifies a piece of source code in a .proto file which
- /// corresponds to a particular definition. This information is intended
- /// to be useful to IDEs, code indexers, documentation generators, and similar
- /// tools.
- ///
- /// For example, say we have a file like:
- /// message Foo {
- /// optional string foo = 1;
- /// }
- /// Let's look at just the field definition:
- /// optional string foo = 1;
- /// ^ ^^ ^^ ^ ^^^
- /// a bc de f ghi
- /// We have the following locations:
- /// span path represents
- /// \[a,i) \[ 4, 0, 2, 0 \] The whole field definition.
- /// \[a,b) \[ 4, 0, 2, 0, 4 \] The label (optional).
- /// \[c,d) \[ 4, 0, 2, 0, 5 \] The type (string).
- /// \[e,f) \[ 4, 0, 2, 0, 1 \] The name (foo).
- /// \[g,h) \[ 4, 0, 2, 0, 3 \] The number (1).
- ///
- /// Notes:
- ///
- /// * A location may refer to a repeated field itself (i.e. not to any
- /// particular index within it). This is used whenever a set of elements are
- /// logically enclosed in a single code segment. For example, an entire
- /// extend block (possibly containing multiple extension definitions) will
- /// have an outer location whose path refers to the "extensions" repeated
- /// field without an index.
- /// * Multiple locations may have the same path. This happens when a single
- /// logical declaration is spread out across multiple places. The most
- /// obvious example is the "extend" block again -- there may be multiple
- /// extend blocks in the same scope, each of which will have the same path.
- /// * A location's span is not always a subset of its parent's span. For
- /// example, the "extendee" of an extension declaration appears at the
- /// beginning of the "extend" block and is shared by all extensions within
- /// the block.
- /// * Just because a location's span is a subset of some other location's span
- /// does not mean that it is a descendant. For example, a "group" defines
- /// both a type and a field in a single declaration. Thus, the locations
- /// corresponding to the type and field and their components will overlap.
- /// * Code which tries to interpret locations should probably be designed to
- /// ignore those that it doesn't understand, as more types of locations could
- /// be recorded in the future.
- #[prost(message, repeated, tag = "1")]
- pub location: ::prost::alloc::vec::Vec<source_code_info::Location>,
-}
-/// Nested message and enum types in `SourceCodeInfo`.
-pub mod source_code_info {
- #[allow(clippy::derive_partial_eq_without_eq)]
- #[derive(Clone, PartialEq, ::prost::Message)]
- pub struct Location {
- /// Identifies which part of the FileDescriptorProto was defined at this
- /// location.
- ///
- /// Each element is a field number or an index. They form a path from
- /// the root FileDescriptorProto to the place where the definition. For
- /// example, this path:
- /// \[ 4, 3, 2, 7, 1 \]
- /// refers to:
- /// file.message_type(3) // 4, 3
- /// .field(7) // 2, 7
- /// .name() // 1
- /// This is because FileDescriptorProto.message_type has field number 4:
- /// repeated DescriptorProto message_type = 4;
- /// and DescriptorProto.field has field number 2:
- /// repeated FieldDescriptorProto field = 2;
- /// and FieldDescriptorProto.name has field number 1:
- /// optional string name = 1;
- ///
- /// Thus, the above path gives the location of a field name. If we removed
- /// the last element:
- /// \[ 4, 3, 2, 7 \]
- /// this path refers to the whole field declaration (from the beginning
- /// of the label to the terminating semicolon).
- #[prost(int32, repeated, tag = "1")]
- pub path: ::prost::alloc::vec::Vec<i32>,
- /// Always has exactly three or four elements: start line, start column,
- /// end line (optional, otherwise assumed same as start line), end column.
- /// These are packed into a single field for efficiency. Note that line
- /// and column numbers are zero-based -- typically you will want to add
- /// 1 to each before displaying to a user.
- #[prost(int32, repeated, tag = "2")]
- pub span: ::prost::alloc::vec::Vec<i32>,
- /// If this SourceCodeInfo represents a complete declaration, these are any
- /// comments appearing before and after the declaration which appear to be
- /// attached to the declaration.
- ///
- /// A series of line comments appearing on consecutive lines, with no other
- /// tokens appearing on those lines, will be treated as a single comment.
- ///
- /// leading_detached_comments will keep paragraphs of comments that appear
- /// before (but not connected to) the current element. Each paragraph,
- /// separated by empty lines, will be one comment element in the repeated
- /// field.
- ///
- /// Only the comment content is provided; comment markers (e.g. //) are
- /// stripped out. For block comments, leading whitespace and an asterisk
- /// will be stripped from the beginning of each line other than the first.
- /// Newlines are included in the output.
- ///
- /// Examples:
- ///
- /// optional int32 foo = 1; // Comment attached to foo.
- /// // Comment attached to bar.
- /// optional int32 bar = 2;
- ///
- /// optional string baz = 3;
- /// // Comment attached to baz.
- /// // Another line attached to baz.
- ///
- /// // Comment attached to qux.
- /// //
- /// // Another line attached to qux.
- /// optional double qux = 4;
- ///
- /// // Detached comment for corge. This is not leading or trailing comments
- /// // to qux or corge because there are blank lines separating it from
- /// // both.
- ///
- /// // Detached comment for corge paragraph 2.
- ///
- /// optional string corge = 5;
- /// /\* Block comment attached
- /// \* to corge. Leading asterisks
- /// \* will be removed. */
- /// /* Block comment attached to
- /// \* grault. \*/
- /// optional int32 grault = 6;
- ///
- /// // ignored detached comments.
- #[prost(string, optional, tag = "3")]
- pub leading_comments: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(string, optional, tag = "4")]
- pub trailing_comments: ::core::option::Option<::prost::alloc::string::String>,
- #[prost(string, repeated, tag = "6")]
- pub leading_detached_comments: ::prost::alloc::vec::Vec<
- ::prost::alloc::string::String,
- >,
- }
-}
-/// Describes the relationship between generated code and its original source
-/// file. A GeneratedCodeInfo message is associated with only one generated
-/// source file, but may contain references to different source .proto files.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct GeneratedCodeInfo {
- /// An Annotation connects some span of text in generated code to an element
- /// of its generating .proto file.
- #[prost(message, repeated, tag = "1")]
- pub annotation: ::prost::alloc::vec::Vec<generated_code_info::Annotation>,
-}
-/// Nested message and enum types in `GeneratedCodeInfo`.
-pub mod generated_code_info {
- #[allow(clippy::derive_partial_eq_without_eq)]
- #[derive(Clone, PartialEq, ::prost::Message)]
- pub struct Annotation {
- /// Identifies the element in the original source .proto file. This field
- /// is formatted the same as SourceCodeInfo.Location.path.
- #[prost(int32, repeated, tag = "1")]
- pub path: ::prost::alloc::vec::Vec<i32>,
- /// Identifies the filesystem path to the original source .proto.
- #[prost(string, optional, tag = "2")]
- pub source_file: ::core::option::Option<::prost::alloc::string::String>,
- /// Identifies the starting offset in bytes in the generated code
- /// that relates to the identified object.
- #[prost(int32, optional, tag = "3")]
- pub begin: ::core::option::Option<i32>,
- /// Identifies the ending offset in bytes in the generated code that
- /// relates to the identified offset. The end offset should be one past
- /// the last relevant byte (so the length of the text = end - begin).
- #[prost(int32, optional, tag = "4")]
- pub end: ::core::option::Option<i32>,
- }
-}
-/// `Any` contains an arbitrary serialized protocol buffer message along with a
-/// URL that describes the type of the serialized message.
-///
-/// Protobuf library provides support to pack/unpack Any values in the form
-/// of utility functions or additional generated methods of the Any type.
-///
-/// Example 1: Pack and unpack a message in C++.
-///
-/// ```text
-/// Foo foo = ...;
-/// Any any;
-/// any.PackFrom(foo);
-/// ...
-/// if (any.UnpackTo(&foo)) {
-/// ...
-/// }
-/// ```
-///
-/// Example 2: Pack and unpack a message in Java.
-///
-/// ```text
-/// Foo foo = ...;
-/// Any any = Any.pack(foo);
-/// ...
-/// if (any.is(Foo.class)) {
-/// foo = any.unpack(Foo.class);
-/// }
-/// ```
-///
-/// Example 3: Pack and unpack a message in Python.
-///
-/// ```text
-/// foo = Foo(...)
-/// any = Any()
-/// any.Pack(foo)
-/// ...
-/// if any.Is(Foo.DESCRIPTOR):
-/// any.Unpack(foo)
-/// ...
-/// ```
-///
-/// Example 4: Pack and unpack a message in Go
-///
-/// ```text
-/// foo := &pb.Foo{...}
-/// any, err := anypb.New(foo)
-/// if err != nil {
-/// ...
-/// }
-/// ...
-/// foo := &pb.Foo{}
-/// if err := any.UnmarshalTo(foo); err != nil {
-/// ...
-/// }
-/// ```
-///
-/// The pack methods provided by protobuf library will by default use
-/// 'type.googleapis.com/full.type.name' as the type URL and the unpack
-/// methods only use the fully qualified type name after the last '/'
-/// in the type URL, for example "foo.bar.com/x/y.z" will yield type
-/// name "y.z".
-///
-/// # JSON
-///
-/// The JSON representation of an `Any` value uses the regular
-/// representation of the deserialized, embedded message, with an
-/// additional field `@type` which contains the type URL. Example:
-///
-/// ```text
-/// package google.profile;
-/// message Person {
-/// string first_name = 1;
-/// string last_name = 2;
-/// }
-///
-/// {
-/// "@type": "type.googleapis.com/google.profile.Person",
-/// "firstName": <string>,
-/// "lastName": <string>
-/// }
-/// ```
-///
-/// If the embedded message type is well-known and has a custom JSON
-/// representation, that representation will be embedded adding a field
-/// `value` which holds the custom JSON in addition to the `@type`
-/// field. Example (for message \[google.protobuf.Duration\]\[\]):
-///
-/// ```text
-/// {
-/// "@type": "type.googleapis.com/google.protobuf.Duration",
-/// "value": "1.212s"
-/// }
-/// ```
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Any {
- /// A URL/resource name that uniquely identifies the type of the serialized
- /// protocol buffer message. This string must contain at least
- /// one "/" character. The last segment of the URL's path must represent
- /// the fully qualified name of the type (as in
- /// `path/google.protobuf.Duration`). The name should be in a canonical form
- /// (e.g., leading "." is not accepted).
- ///
- /// In practice, teams usually precompile into the binary all types that they
- /// expect it to use in the context of Any. However, for URLs which use the
- /// scheme `http`, `https`, or no scheme, one can optionally set up a type
- /// server that maps type URLs to message definitions as follows:
- ///
- /// * If no scheme is provided, `https` is assumed.
- /// * An HTTP GET on the URL must yield a \[google.protobuf.Type\]\[\]
- /// value in binary format, or produce an error.
- /// * Applications are allowed to cache lookup results based on the
- /// URL, or have them precompiled into a binary to avoid any
- /// lookup. Therefore, binary compatibility needs to be preserved
- /// on changes to types. (Use versioned type names to manage
- /// breaking changes.)
- ///
- /// Note: this functionality is not currently available in the official
- /// protobuf release, and it is not used for type URLs beginning with
- /// type.googleapis.com.
- ///
- /// Schemes other than `http`, `https` (or the empty scheme) might be
- /// used with implementation specific semantics.
- #[prost(string, tag = "1")]
- pub type_url: ::prost::alloc::string::String,
- /// Must be a valid serialized protocol buffer of the above specified type.
- #[prost(bytes = "vec", tag = "2")]
- pub value: ::prost::alloc::vec::Vec<u8>,
-}
-/// `SourceContext` represents information about the source of a
-/// protobuf element, like the file in which it is defined.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct SourceContext {
- /// The path-qualified name of the .proto file that contained the associated
- /// protobuf element. For example: `"google/protobuf/source_context.proto"`.
- #[prost(string, tag = "1")]
- pub file_name: ::prost::alloc::string::String,
-}
-/// A protocol buffer message type.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Type {
- /// The fully qualified message name.
- #[prost(string, tag = "1")]
- pub name: ::prost::alloc::string::String,
- /// The list of fields.
- #[prost(message, repeated, tag = "2")]
- pub fields: ::prost::alloc::vec::Vec<Field>,
- /// The list of types appearing in `oneof` definitions in this type.
- #[prost(string, repeated, tag = "3")]
- pub oneofs: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
- /// The protocol buffer options.
- #[prost(message, repeated, tag = "4")]
- pub options: ::prost::alloc::vec::Vec<Option>,
- /// The source context.
- #[prost(message, optional, tag = "5")]
- pub source_context: ::core::option::Option<SourceContext>,
- /// The source syntax.
- #[prost(enumeration = "Syntax", tag = "6")]
- pub syntax: i32,
-}
-/// A single field of a message type.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Field {
- /// The field type.
- #[prost(enumeration = "field::Kind", tag = "1")]
- pub kind: i32,
- /// The field cardinality.
- #[prost(enumeration = "field::Cardinality", tag = "2")]
- pub cardinality: i32,
- /// The field number.
- #[prost(int32, tag = "3")]
- pub number: i32,
- /// The field name.
- #[prost(string, tag = "4")]
- pub name: ::prost::alloc::string::String,
- /// The field type URL, without the scheme, for message or enumeration
- /// types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`.
- #[prost(string, tag = "6")]
- pub type_url: ::prost::alloc::string::String,
- /// The index of the field type in `Type.oneofs`, for message or enumeration
- /// types. The first type has index 1; zero means the type is not in the list.
- #[prost(int32, tag = "7")]
- pub oneof_index: i32,
- /// Whether to use alternative packed wire representation.
- #[prost(bool, tag = "8")]
- pub packed: bool,
- /// The protocol buffer options.
- #[prost(message, repeated, tag = "9")]
- pub options: ::prost::alloc::vec::Vec<Option>,
- /// The field JSON name.
- #[prost(string, tag = "10")]
- pub json_name: ::prost::alloc::string::String,
- /// The string value of the default value of this field. Proto2 syntax only.
- #[prost(string, tag = "11")]
- pub default_value: ::prost::alloc::string::String,
-}
-/// Nested message and enum types in `Field`.
-pub mod field {
- /// Basic field types.
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum Kind {
- /// Field type unknown.
- TypeUnknown = 0,
- /// Field type double.
- TypeDouble = 1,
- /// Field type float.
- TypeFloat = 2,
- /// Field type int64.
- TypeInt64 = 3,
- /// Field type uint64.
- TypeUint64 = 4,
- /// Field type int32.
- TypeInt32 = 5,
- /// Field type fixed64.
- TypeFixed64 = 6,
- /// Field type fixed32.
- TypeFixed32 = 7,
- /// Field type bool.
- TypeBool = 8,
- /// Field type string.
- TypeString = 9,
- /// Field type group. Proto2 syntax only, and deprecated.
- TypeGroup = 10,
- /// Field type message.
- TypeMessage = 11,
- /// Field type bytes.
- TypeBytes = 12,
- /// Field type uint32.
- TypeUint32 = 13,
- /// Field type enum.
- TypeEnum = 14,
- /// Field type sfixed32.
- TypeSfixed32 = 15,
- /// Field type sfixed64.
- TypeSfixed64 = 16,
- /// Field type sint32.
- TypeSint32 = 17,
- /// Field type sint64.
- TypeSint64 = 18,
- }
- impl Kind {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- Kind::TypeUnknown => "TYPE_UNKNOWN",
- Kind::TypeDouble => "TYPE_DOUBLE",
- Kind::TypeFloat => "TYPE_FLOAT",
- Kind::TypeInt64 => "TYPE_INT64",
- Kind::TypeUint64 => "TYPE_UINT64",
- Kind::TypeInt32 => "TYPE_INT32",
- Kind::TypeFixed64 => "TYPE_FIXED64",
- Kind::TypeFixed32 => "TYPE_FIXED32",
- Kind::TypeBool => "TYPE_BOOL",
- Kind::TypeString => "TYPE_STRING",
- Kind::TypeGroup => "TYPE_GROUP",
- Kind::TypeMessage => "TYPE_MESSAGE",
- Kind::TypeBytes => "TYPE_BYTES",
- Kind::TypeUint32 => "TYPE_UINT32",
- Kind::TypeEnum => "TYPE_ENUM",
- Kind::TypeSfixed32 => "TYPE_SFIXED32",
- Kind::TypeSfixed64 => "TYPE_SFIXED64",
- Kind::TypeSint32 => "TYPE_SINT32",
- Kind::TypeSint64 => "TYPE_SINT64",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "TYPE_UNKNOWN" => Some(Self::TypeUnknown),
- "TYPE_DOUBLE" => Some(Self::TypeDouble),
- "TYPE_FLOAT" => Some(Self::TypeFloat),
- "TYPE_INT64" => Some(Self::TypeInt64),
- "TYPE_UINT64" => Some(Self::TypeUint64),
- "TYPE_INT32" => Some(Self::TypeInt32),
- "TYPE_FIXED64" => Some(Self::TypeFixed64),
- "TYPE_FIXED32" => Some(Self::TypeFixed32),
- "TYPE_BOOL" => Some(Self::TypeBool),
- "TYPE_STRING" => Some(Self::TypeString),
- "TYPE_GROUP" => Some(Self::TypeGroup),
- "TYPE_MESSAGE" => Some(Self::TypeMessage),
- "TYPE_BYTES" => Some(Self::TypeBytes),
- "TYPE_UINT32" => Some(Self::TypeUint32),
- "TYPE_ENUM" => Some(Self::TypeEnum),
- "TYPE_SFIXED32" => Some(Self::TypeSfixed32),
- "TYPE_SFIXED64" => Some(Self::TypeSfixed64),
- "TYPE_SINT32" => Some(Self::TypeSint32),
- "TYPE_SINT64" => Some(Self::TypeSint64),
- _ => None,
- }
- }
- }
- /// Whether a field is optional, required, or repeated.
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum Cardinality {
- /// For fields with unknown cardinality.
- Unknown = 0,
- /// For optional fields.
- Optional = 1,
- /// For required fields. Proto2 syntax only.
- Required = 2,
- /// For repeated fields.
- Repeated = 3,
- }
- impl Cardinality {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- Cardinality::Unknown => "CARDINALITY_UNKNOWN",
- Cardinality::Optional => "CARDINALITY_OPTIONAL",
- Cardinality::Required => "CARDINALITY_REQUIRED",
- Cardinality::Repeated => "CARDINALITY_REPEATED",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "CARDINALITY_UNKNOWN" => Some(Self::Unknown),
- "CARDINALITY_OPTIONAL" => Some(Self::Optional),
- "CARDINALITY_REQUIRED" => Some(Self::Required),
- "CARDINALITY_REPEATED" => Some(Self::Repeated),
- _ => None,
- }
- }
- }
-}
-/// Enum type definition.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Enum {
- /// Enum type name.
- #[prost(string, tag = "1")]
- pub name: ::prost::alloc::string::String,
- /// Enum value definitions.
- #[prost(message, repeated, tag = "2")]
- pub enumvalue: ::prost::alloc::vec::Vec<EnumValue>,
- /// Protocol buffer options.
- #[prost(message, repeated, tag = "3")]
- pub options: ::prost::alloc::vec::Vec<Option>,
- /// The source context.
- #[prost(message, optional, tag = "4")]
- pub source_context: ::core::option::Option<SourceContext>,
- /// The source syntax.
- #[prost(enumeration = "Syntax", tag = "5")]
- pub syntax: i32,
-}
-/// Enum value definition.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct EnumValue {
- /// Enum value name.
- #[prost(string, tag = "1")]
- pub name: ::prost::alloc::string::String,
- /// Enum value number.
- #[prost(int32, tag = "2")]
- pub number: i32,
- /// Protocol buffer options.
- #[prost(message, repeated, tag = "3")]
- pub options: ::prost::alloc::vec::Vec<Option>,
-}
-/// A protocol buffer option, which can be attached to a message, field,
-/// enumeration, etc.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Option {
- /// The option's name. For protobuf built-in options (options defined in
- /// descriptor.proto), this is the short name. For example, `"map_entry"`.
- /// For custom options, it should be the fully-qualified name. For example,
- /// `"google.api.http"`.
- #[prost(string, tag = "1")]
- pub name: ::prost::alloc::string::String,
- /// The option's value packed in an Any message. If the value is a primitive,
- /// the corresponding wrapper type defined in google/protobuf/wrappers.proto
- /// should be used. If the value is an enum, it should be stored as an int32
- /// value using the google.protobuf.Int32Value type.
- #[prost(message, optional, tag = "2")]
- pub value: ::core::option::Option<Any>,
-}
-/// The syntax in which a protocol buffer element is defined.
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
-#[repr(i32)]
-pub enum Syntax {
- /// Syntax `proto2`.
- Proto2 = 0,
- /// Syntax `proto3`.
- Proto3 = 1,
-}
-impl Syntax {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- Syntax::Proto2 => "SYNTAX_PROTO2",
- Syntax::Proto3 => "SYNTAX_PROTO3",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "SYNTAX_PROTO2" => Some(Self::Proto2),
- "SYNTAX_PROTO3" => Some(Self::Proto3),
- _ => None,
- }
- }
-}
-/// Api is a light-weight descriptor for an API Interface.
-///
-/// Interfaces are also described as "protocol buffer services" in some contexts,
-/// such as by the "service" keyword in a .proto file, but they are different
-/// from API Services, which represent a concrete implementation of an interface
-/// as opposed to simply a description of methods and bindings. They are also
-/// sometimes simply referred to as "APIs" in other contexts, such as the name of
-/// this message itself. See <https://cloud.google.com/apis/design/glossary> for
-/// detailed terminology.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Api {
- /// The fully qualified name of this interface, including package name
- /// followed by the interface's simple name.
- #[prost(string, tag = "1")]
- pub name: ::prost::alloc::string::String,
- /// The methods of this interface, in unspecified order.
- #[prost(message, repeated, tag = "2")]
- pub methods: ::prost::alloc::vec::Vec<Method>,
- /// Any metadata attached to the interface.
- #[prost(message, repeated, tag = "3")]
- pub options: ::prost::alloc::vec::Vec<Option>,
- /// A version string for this interface. If specified, must have the form
- /// `major-version.minor-version`, as in `1.10`. If the minor version is
- /// omitted, it defaults to zero. If the entire version field is empty, the
- /// major version is derived from the package name, as outlined below. If the
- /// field is not empty, the version in the package name will be verified to be
- /// consistent with what is provided here.
- ///
- /// The versioning schema uses [semantic
- /// versioning](<http://semver.org>) where the major version number
- /// indicates a breaking change and the minor version an additive,
- /// non-breaking change. Both version numbers are signals to users
- /// what to expect from different versions, and should be carefully
- /// chosen based on the product plan.
- ///
- /// The major version is also reflected in the package name of the
- /// interface, which must end in `v<major-version>`, as in
- /// `google.feature.v1`. For major versions 0 and 1, the suffix can
- /// be omitted. Zero major versions must only be used for
- /// experimental, non-GA interfaces.
- #[prost(string, tag = "4")]
- pub version: ::prost::alloc::string::String,
- /// Source context for the protocol buffer service represented by this
- /// message.
- #[prost(message, optional, tag = "5")]
- pub source_context: ::core::option::Option<SourceContext>,
- /// Included interfaces. See \[Mixin\]\[\].
- #[prost(message, repeated, tag = "6")]
- pub mixins: ::prost::alloc::vec::Vec<Mixin>,
- /// The source syntax of the service.
- #[prost(enumeration = "Syntax", tag = "7")]
- pub syntax: i32,
-}
-/// Method represents a method of an API interface.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Method {
- /// The simple name of this method.
- #[prost(string, tag = "1")]
- pub name: ::prost::alloc::string::String,
- /// A URL of the input message type.
- #[prost(string, tag = "2")]
- pub request_type_url: ::prost::alloc::string::String,
- /// If true, the request is streamed.
- #[prost(bool, tag = "3")]
- pub request_streaming: bool,
- /// The URL of the output message type.
- #[prost(string, tag = "4")]
- pub response_type_url: ::prost::alloc::string::String,
- /// If true, the response is streamed.
- #[prost(bool, tag = "5")]
- pub response_streaming: bool,
- /// Any metadata attached to the method.
- #[prost(message, repeated, tag = "6")]
- pub options: ::prost::alloc::vec::Vec<Option>,
- /// The source syntax of this method.
- #[prost(enumeration = "Syntax", tag = "7")]
- pub syntax: i32,
-}
-/// Declares an API Interface to be included in this interface. The including
-/// interface must redeclare all the methods from the included interface, but
-/// documentation and options are inherited as follows:
-///
-/// * If after comment and whitespace stripping, the documentation
-/// string of the redeclared method is empty, it will be inherited
-/// from the original method.
-///
-/// * Each annotation belonging to the service config (http,
-/// visibility) which is not set in the redeclared method will be
-/// inherited.
-///
-/// * If an http annotation is inherited, the path pattern will be
-/// modified as follows. Any version prefix will be replaced by the
-/// version of the including interface plus the \[root\]\[\] path if
-/// specified.
-///
-/// Example of a simple mixin:
-///
-/// ```text
-/// package google.acl.v1;
-/// service AccessControl {
-/// // Get the underlying ACL object.
-/// rpc GetAcl(GetAclRequest) returns (Acl) {
-/// option (google.api.http).get = "/v1/{resource=**}:getAcl";
-/// }
-/// }
-///
-/// package google.storage.v2;
-/// service Storage {
-/// rpc GetAcl(GetAclRequest) returns (Acl);
-///
-/// // Get a data record.
-/// rpc GetData(GetDataRequest) returns (Data) {
-/// option (google.api.http).get = "/v2/{resource=**}";
-/// }
-/// }
-/// ```
-///
-/// Example of a mixin configuration:
-///
-/// ```text
-/// apis:
-/// - name: google.storage.v2.Storage
-/// mixins:
-/// - name: google.acl.v1.AccessControl
-/// ```
-///
-/// The mixin construct implies that all methods in `AccessControl` are
-/// also declared with same name and request/response types in
-/// `Storage`. A documentation generator or annotation processor will
-/// see the effective `Storage.GetAcl` method after inheriting
-/// documentation and annotations as follows:
-///
-/// ```text
-/// service Storage {
-/// // Get the underlying ACL object.
-/// rpc GetAcl(GetAclRequest) returns (Acl) {
-/// option (google.api.http).get = "/v2/{resource=**}:getAcl";
-/// }
-/// ...
-/// }
-/// ```
-///
-/// Note how the version in the path pattern changed from `v1` to `v2`.
-///
-/// If the `root` field in the mixin is specified, it should be a
-/// relative path under which inherited HTTP paths are placed. Example:
-///
-/// ```text
-/// apis:
-/// - name: google.storage.v2.Storage
-/// mixins:
-/// - name: google.acl.v1.AccessControl
-/// root: acls
-/// ```
-///
-/// This implies the following inherited HTTP annotation:
-///
-/// ```text
-/// service Storage {
-/// // Get the underlying ACL object.
-/// rpc GetAcl(GetAclRequest) returns (Acl) {
-/// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl";
-/// }
-/// ...
-/// }
-/// ```
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Mixin {
- /// The fully qualified name of the interface which is included.
- #[prost(string, tag = "1")]
- pub name: ::prost::alloc::string::String,
- /// If non-empty specifies a path under which inherited HTTP paths
- /// are rooted.
- #[prost(string, tag = "2")]
- pub root: ::prost::alloc::string::String,
-}
-/// A Duration represents a signed, fixed-length span of time represented
-/// as a count of seconds and fractions of seconds at nanosecond
-/// resolution. It is independent of any calendar and concepts like "day"
-/// or "month". It is related to Timestamp in that the difference between
-/// two Timestamp values is a Duration and it can be added or subtracted
-/// from a Timestamp. Range is approximately +-10,000 years.
-///
-/// # Examples
-///
-/// Example 1: Compute Duration from two Timestamps in pseudo code.
-///
-/// ```text
-/// Timestamp start = ...;
-/// Timestamp end = ...;
-/// Duration duration = ...;
-///
-/// duration.seconds = end.seconds - start.seconds;
-/// duration.nanos = end.nanos - start.nanos;
-///
-/// if (duration.seconds < 0 && duration.nanos > 0) {
-/// duration.seconds += 1;
-/// duration.nanos -= 1000000000;
-/// } else if (duration.seconds > 0 && duration.nanos < 0) {
-/// duration.seconds -= 1;
-/// duration.nanos += 1000000000;
-/// }
-/// ```
-///
-/// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
-///
-/// ```text
-/// Timestamp start = ...;
-/// Duration duration = ...;
-/// Timestamp end = ...;
-///
-/// end.seconds = start.seconds + duration.seconds;
-/// end.nanos = start.nanos + duration.nanos;
-///
-/// if (end.nanos < 0) {
-/// end.seconds -= 1;
-/// end.nanos += 1000000000;
-/// } else if (end.nanos >= 1000000000) {
-/// end.seconds += 1;
-/// end.nanos -= 1000000000;
-/// }
-/// ```
-///
-/// Example 3: Compute Duration from datetime.timedelta in Python.
-///
-/// ```text
-/// td = datetime.timedelta(days=3, minutes=10)
-/// duration = Duration()
-/// duration.FromTimedelta(td)
-/// ```
-///
-/// # JSON Mapping
-///
-/// In JSON format, the Duration type is encoded as a string rather than an
-/// object, where the string ends in the suffix "s" (indicating seconds) and
-/// is preceded by the number of seconds, with nanoseconds expressed as
-/// fractional seconds. For example, 3 seconds with 0 nanoseconds should be
-/// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
-/// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
-/// microsecond should be expressed in JSON format as "3.000001s".
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Duration {
- /// Signed seconds of the span of time. Must be from -315,576,000,000
- /// to +315,576,000,000 inclusive. Note: these bounds are computed from:
- /// 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
- #[prost(int64, tag = "1")]
- pub seconds: i64,
- /// Signed fractions of a second at nanosecond resolution of the span
- /// of time. Durations less than one second are represented with a 0
- /// `seconds` field and a positive or negative `nanos` field. For durations
- /// of one second or more, a non-zero value for the `nanos` field must be
- /// of the same sign as the `seconds` field. Must be from -999,999,999
- /// to +999,999,999 inclusive.
- #[prost(int32, tag = "2")]
- pub nanos: i32,
-}
-/// `FieldMask` represents a set of symbolic field paths, for example:
-///
-/// ```text
-/// paths: "f.a"
-/// paths: "f.b.d"
-/// ```
-///
-/// Here `f` represents a field in some root message, `a` and `b`
-/// fields in the message found in `f`, and `d` a field found in the
-/// message in `f.b`.
-///
-/// Field masks are used to specify a subset of fields that should be
-/// returned by a get operation or modified by an update operation.
-/// Field masks also have a custom JSON encoding (see below).
-///
-/// # Field Masks in Projections
-///
-/// When used in the context of a projection, a response message or
-/// sub-message is filtered by the API to only contain those fields as
-/// specified in the mask. For example, if the mask in the previous
-/// example is applied to a response message as follows:
-///
-/// ```text
-/// f {
-/// a : 22
-/// b {
-/// d : 1
-/// x : 2
-/// }
-/// y : 13
-/// }
-/// z: 8
-/// ```
-///
-/// The result will not contain specific values for fields x,y and z
-/// (their value will be set to the default, and omitted in proto text
-/// output):
-///
-/// ```text
-/// f {
-/// a : 22
-/// b {
-/// d : 1
-/// }
-/// }
-/// ```
-///
-/// A repeated field is not allowed except at the last position of a
-/// paths string.
-///
-/// If a FieldMask object is not present in a get operation, the
-/// operation applies to all fields (as if a FieldMask of all fields
-/// had been specified).
-///
-/// Note that a field mask does not necessarily apply to the
-/// top-level response message. In case of a REST get operation, the
-/// field mask applies directly to the response, but in case of a REST
-/// list operation, the mask instead applies to each individual message
-/// in the returned resource list. In case of a REST custom method,
-/// other definitions may be used. Where the mask applies will be
-/// clearly documented together with its declaration in the API. In
-/// any case, the effect on the returned resource/resources is required
-/// behavior for APIs.
-///
-/// # Field Masks in Update Operations
-///
-/// A field mask in update operations specifies which fields of the
-/// targeted resource are going to be updated. The API is required
-/// to only change the values of the fields as specified in the mask
-/// and leave the others untouched. If a resource is passed in to
-/// describe the updated values, the API ignores the values of all
-/// fields not covered by the mask.
-///
-/// If a repeated field is specified for an update operation, new values will
-/// be appended to the existing repeated field in the target resource. Note that
-/// a repeated field is only allowed in the last position of a `paths` string.
-///
-/// If a sub-message is specified in the last position of the field mask for an
-/// update operation, then new value will be merged into the existing sub-message
-/// in the target resource.
-///
-/// For example, given the target message:
-///
-/// ```text
-/// f {
-/// b {
-/// d: 1
-/// x: 2
-/// }
-/// c: \[1\]
-/// }
-/// ```
-///
-/// And an update message:
-///
-/// ```text
-/// f {
-/// b {
-/// d: 10
-/// }
-/// c: \[2\]
-/// }
-/// ```
-///
-/// then if the field mask is:
-///
-/// paths: \["f.b", "f.c"\]
-///
-/// then the result will be:
-///
-/// ```text
-/// f {
-/// b {
-/// d: 10
-/// x: 2
-/// }
-/// c: \[1, 2\]
-/// }
-/// ```
-///
-/// An implementation may provide options to override this default behavior for
-/// repeated and message fields.
-///
-/// In order to reset a field's value to the default, the field must
-/// be in the mask and set to the default value in the provided resource.
-/// Hence, in order to reset all fields of a resource, provide a default
-/// instance of the resource and set all fields in the mask, or do
-/// not provide a mask as described below.
-///
-/// If a field mask is not present on update, the operation applies to
-/// all fields (as if a field mask of all fields has been specified).
-/// Note that in the presence of schema evolution, this may mean that
-/// fields the client does not know and has therefore not filled into
-/// the request will be reset to their default. If this is unwanted
-/// behavior, a specific service may require a client to always specify
-/// a field mask, producing an error if not.
-///
-/// As with get operations, the location of the resource which
-/// describes the updated values in the request message depends on the
-/// operation kind. In any case, the effect of the field mask is
-/// required to be honored by the API.
-///
-/// ## Considerations for HTTP REST
-///
-/// The HTTP kind of an update operation which uses a field mask must
-/// be set to PATCH instead of PUT in order to satisfy HTTP semantics
-/// (PUT must only be used for full updates).
-///
-/// # JSON Encoding of Field Masks
-///
-/// In JSON, a field mask is encoded as a single string where paths are
-/// separated by a comma. Fields name in each path are converted
-/// to/from lower-camel naming conventions.
-///
-/// As an example, consider the following message declarations:
-///
-/// ```text
-/// message Profile {
-/// User user = 1;
-/// Photo photo = 2;
-/// }
-/// message User {
-/// string display_name = 1;
-/// string address = 2;
-/// }
-/// ```
-///
-/// In proto a field mask for `Profile` may look as such:
-///
-/// ```text
-/// mask {
-/// paths: "user.display_name"
-/// paths: "photo"
-/// }
-/// ```
-///
-/// In JSON, the same mask is represented as below:
-///
-/// ```text
-/// {
-/// mask: "user.displayName,photo"
-/// }
-/// ```
-///
-/// # Field Masks and Oneof Fields
-///
-/// Field masks treat fields in oneofs just as regular fields. Consider the
-/// following message:
-///
-/// ```text
-/// message SampleMessage {
-/// oneof test_oneof {
-/// string name = 4;
-/// SubMessage sub_message = 9;
-/// }
-/// }
-/// ```
-///
-/// The field mask can be:
-///
-/// ```text
-/// mask {
-/// paths: "name"
-/// }
-/// ```
-///
-/// Or:
-///
-/// ```text
-/// mask {
-/// paths: "sub_message"
-/// }
-/// ```
-///
-/// Note that oneof type names ("test_oneof" in this case) cannot be used in
-/// paths.
-///
-/// ## Field Mask Verification
-///
-/// The implementation of any API method which has a FieldMask type field in the
-/// request should verify the included field paths, and return an
-/// `INVALID_ARGUMENT` error if any path is unmappable.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct FieldMask {
- /// The set of field mask paths.
- #[prost(string, repeated, tag = "1")]
- pub paths: ::prost::alloc::vec::Vec<::prost::alloc::string::String>,
-}
-/// `Struct` represents a structured data value, consisting of fields
-/// which map to dynamically typed values. In some languages, `Struct`
-/// might be supported by a native representation. For example, in
-/// scripting languages like JS a struct is represented as an
-/// object. The details of that representation are described together
-/// with the proto support for the language.
-///
-/// The JSON representation for `Struct` is JSON object.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Struct {
- /// Unordered map of dynamically typed values.
- #[prost(btree_map = "string, message", tag = "1")]
- pub fields: ::prost::alloc::collections::BTreeMap<
- ::prost::alloc::string::String,
- Value,
- >,
-}
-/// `Value` represents a dynamically typed value which can be either
-/// null, a number, a string, a boolean, a recursive struct value, or a
-/// list of values. A producer of value is expected to set one of these
-/// variants. Absence of any variant indicates an error.
-///
-/// The JSON representation for `Value` is JSON value.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Value {
- /// The kind of value.
- #[prost(oneof = "value::Kind", tags = "1, 2, 3, 4, 5, 6")]
- pub kind: ::core::option::Option<value::Kind>,
-}
-/// Nested message and enum types in `Value`.
-pub mod value {
- /// The kind of value.
- #[allow(clippy::derive_partial_eq_without_eq)]
- #[derive(Clone, PartialEq, ::prost::Oneof)]
- pub enum Kind {
- /// Represents a null value.
- #[prost(enumeration = "super::NullValue", tag = "1")]
- NullValue(i32),
- /// Represents a double value.
- #[prost(double, tag = "2")]
- NumberValue(f64),
- /// Represents a string value.
- #[prost(string, tag = "3")]
- StringValue(::prost::alloc::string::String),
- /// Represents a boolean value.
- #[prost(bool, tag = "4")]
- BoolValue(bool),
- /// Represents a structured value.
- #[prost(message, tag = "5")]
- StructValue(super::Struct),
- /// Represents a repeated `Value`.
- #[prost(message, tag = "6")]
- ListValue(super::ListValue),
- }
-}
-/// `ListValue` is a wrapper around a repeated field of values.
-///
-/// The JSON representation for `ListValue` is JSON array.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ListValue {
- /// Repeated field of dynamically typed values.
- #[prost(message, repeated, tag = "1")]
- pub values: ::prost::alloc::vec::Vec<Value>,
-}
-/// `NullValue` is a singleton enumeration to represent the null value for the
-/// `Value` type union.
-///
-/// The JSON representation for `NullValue` is JSON `null`.
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
-#[repr(i32)]
-pub enum NullValue {
- /// Null value.
- NullValue = 0,
-}
-impl NullValue {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- NullValue::NullValue => "NULL_VALUE",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "NULL_VALUE" => Some(Self::NullValue),
- _ => None,
- }
- }
-}
-/// A Timestamp represents a point in time independent of any time zone or local
-/// calendar, encoded as a count of seconds and fractions of seconds at
-/// nanosecond resolution. The count is relative to an epoch at UTC midnight on
-/// January 1, 1970, in the proleptic Gregorian calendar which extends the
-/// Gregorian calendar backwards to year one.
-///
-/// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
-/// second table is needed for interpretation, using a [24-hour linear
-/// smear](<https://developers.google.com/time/smear>).
-///
-/// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
-/// restricting to that range, we ensure that we can convert to and from [RFC
-/// 3339](<https://www.ietf.org/rfc/rfc3339.txt>) date strings.
-///
-/// # Examples
-///
-/// Example 1: Compute Timestamp from POSIX `time()`.
-///
-/// ```text
-/// Timestamp timestamp;
-/// timestamp.set_seconds(time(NULL));
-/// timestamp.set_nanos(0);
-/// ```
-///
-/// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
-///
-/// ```text
-/// struct timeval tv;
-/// gettimeofday(&tv, NULL);
-///
-/// Timestamp timestamp;
-/// timestamp.set_seconds(tv.tv_sec);
-/// timestamp.set_nanos(tv.tv_usec * 1000);
-/// ```
-///
-/// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
-///
-/// ```text
-/// FILETIME ft;
-/// GetSystemTimeAsFileTime(&ft);
-/// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
-///
-/// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
-/// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
-/// Timestamp timestamp;
-/// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
-/// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
-/// ```
-///
-/// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
-///
-/// ```text
-/// long millis = System.currentTimeMillis();
-///
-/// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
-/// .setNanos((int) ((millis % 1000) * 1000000)).build();
-/// ```
-///
-/// Example 5: Compute Timestamp from Java `Instant.now()`.
-///
-/// ```text
-/// Instant now = Instant.now();
-///
-/// Timestamp timestamp =
-/// Timestamp.newBuilder().setSeconds(now.getEpochSecond())
-/// .setNanos(now.getNano()).build();
-/// ```
-///
-/// Example 6: Compute Timestamp from current time in Python.
-///
-/// ```text
-/// timestamp = Timestamp()
-/// timestamp.GetCurrentTime()
-/// ```
-///
-/// # JSON Mapping
-///
-/// In JSON format, the Timestamp type is encoded as a string in the
-/// [RFC 3339](<https://www.ietf.org/rfc/rfc3339.txt>) format. That is, the
-/// format is "{year}-{month}-{day}T{hour}:{min}:{sec}\[.{frac_sec}\]Z"
-/// where {year} is always expressed using four digits while {month}, {day},
-/// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
-/// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
-/// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
-/// is required. A proto3 JSON serializer should always use UTC (as indicated by
-/// "Z") when printing the Timestamp type and a proto3 JSON parser should be
-/// able to accept both UTC and other timezones (as indicated by an offset).
-///
-/// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
-/// 01:30 UTC on January 15, 2017.
-///
-/// In JavaScript, one can convert a Date object to this format using the
-/// standard
-/// [toISOString()](<https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString>)
-/// method. In Python, a standard `datetime.datetime` object can be converted
-/// to this format using
-/// [`strftime`](<https://docs.python.org/2/library/time.html#time.strftime>) with
-/// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
-/// the Joda Time's [`ISODateTimeFormat.dateTime()`](<http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D>) to obtain a formatter capable of generating timestamps in this format.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct Timestamp {
- /// Represents seconds of UTC time since Unix epoch
- /// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
- /// 9999-12-31T23:59:59Z inclusive.
- #[prost(int64, tag = "1")]
- pub seconds: i64,
- /// Non-negative fractions of a second at nanosecond resolution. Negative
- /// second values with fractions must still have non-negative nanos values
- /// that count forward in time. Must be from 0 to 999,999,999
- /// inclusive.
- #[prost(int32, tag = "2")]
- pub nanos: i32,
-}
diff --git a/vendor/prost-types-0.12.6/src/timestamp.rs b/vendor/prost-types-0.12.6/src/timestamp.rs
deleted file mode 100644
index 216712a4..00000000
--- a/vendor/prost-types-0.12.6/src/timestamp.rs
+++ /dev/null
@@ -1,416 +0,0 @@
-use super::*;
-
-impl Timestamp {
- /// Normalizes the timestamp to a canonical format.
- ///
- /// Based on [`google::protobuf::util::CreateNormalized`][1].
- ///
- /// [1]: https://github.com/google/protobuf/blob/v3.3.2/src/google/protobuf/util/time_util.cc#L59-L77
- pub fn normalize(&mut self) {
- // Make sure nanos is in the range.
- if self.nanos <= -NANOS_PER_SECOND || self.nanos >= NANOS_PER_SECOND {
- if let Some(seconds) = self
- .seconds
- .checked_add((self.nanos / NANOS_PER_SECOND) as i64)
- {
- self.seconds = seconds;
- self.nanos %= NANOS_PER_SECOND;
- } else if self.nanos < 0 {
- // Negative overflow! Set to the earliest normal value.
- self.seconds = i64::MIN;
- self.nanos = 0;
- } else {
- // Positive overflow! Set to the latest normal value.
- self.seconds = i64::MAX;
- self.nanos = 999_999_999;
- }
- }
-
- // For Timestamp nanos should be in the range [0, 999999999].
- if self.nanos < 0 {
- if let Some(seconds) = self.seconds.checked_sub(1) {
- self.seconds = seconds;
- self.nanos += NANOS_PER_SECOND;
- } else {
- // Negative overflow! Set to the earliest normal value.
- debug_assert_eq!(self.seconds, i64::MIN);
- self.nanos = 0;
- }
- }
-
- // TODO: should this be checked?
- // debug_assert!(self.seconds >= -62_135_596_800 && self.seconds <= 253_402_300_799,
- // "invalid timestamp: {:?}", self);
- }
-
- /// Normalizes the timestamp to a canonical format, returning the original value if it cannot be
- /// normalized.
- ///
- /// Normalization is based on [`google::protobuf::util::CreateNormalized`][1].
- ///
- /// [1]: https://github.com/google/protobuf/blob/v3.3.2/src/google/protobuf/util/time_util.cc#L59-L77
- pub fn try_normalize(mut self) -> Result<Timestamp, Timestamp> {
- let before = self.clone();
- self.normalize();
- // If the seconds value has changed, and is either i64::MIN or i64::MAX, then the timestamp
- // normalization overflowed.
- if (self.seconds == i64::MAX || self.seconds == i64::MIN) && self.seconds != before.seconds
- {
- Err(before)
- } else {
- Ok(self)
- }
- }
-
- /// Creates a new `Timestamp` at the start of the provided UTC date.
- pub fn date(year: i64, month: u8, day: u8) -> Result<Timestamp, TimestampError> {
- Timestamp::date_time_nanos(year, month, day, 0, 0, 0, 0)
- }
-
- /// Creates a new `Timestamp` instance with the provided UTC date and time.
- pub fn date_time(
- year: i64,
- month: u8,
- day: u8,
- hour: u8,
- minute: u8,
- second: u8,
- ) -> Result<Timestamp, TimestampError> {
- Timestamp::date_time_nanos(year, month, day, hour, minute, second, 0)
- }
-
- /// Creates a new `Timestamp` instance with the provided UTC date and time.
- pub fn date_time_nanos(
- year: i64,
- month: u8,
- day: u8,
- hour: u8,
- minute: u8,
- second: u8,
- nanos: u32,
- ) -> Result<Timestamp, TimestampError> {
- let date_time = datetime::DateTime {
- year,
- month,
- day,
- hour,
- minute,
- second,
- nanos,
- };
-
- if date_time.is_valid() {
- Ok(Timestamp::from(date_time))
- } else {
- Err(TimestampError::InvalidDateTime)
- }
- }
-}
-
-impl Name for Timestamp {
- const PACKAGE: &'static str = PACKAGE;
- const NAME: &'static str = "Timestamp";
-
- fn type_url() -> String {
- type_url_for::<Self>()
- }
-}
-
-/// Implements the unstable/naive version of `Eq`: a basic equality check on the internal fields of the `Timestamp`.
-/// This implies that `normalized_ts != non_normalized_ts` even if `normalized_ts == non_normalized_ts.normalized()`.
-#[cfg(feature = "std")]
-impl Eq for Timestamp {}
-
-#[cfg(feature = "std")]
-impl std::hash::Hash for Timestamp {
- fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
- self.seconds.hash(state);
- self.nanos.hash(state);
- }
-}
-
-#[cfg(feature = "std")]
-impl From<std::time::SystemTime> for Timestamp {
- fn from(system_time: std::time::SystemTime) -> Timestamp {
- let (seconds, nanos) = match system_time.duration_since(std::time::UNIX_EPOCH) {
- Ok(duration) => {
- let seconds = i64::try_from(duration.as_secs()).unwrap();
- (seconds, duration.subsec_nanos() as i32)
- }
- Err(error) => {
- let duration = error.duration();
- let seconds = i64::try_from(duration.as_secs()).unwrap();
- let nanos = duration.subsec_nanos() as i32;
- if nanos == 0 {
- (-seconds, 0)
- } else {
- (-seconds - 1, 1_000_000_000 - nanos)
- }
- }
- };
- Timestamp { seconds, nanos }
- }
-}
-
-/// A timestamp handling error.
-#[allow(clippy::derive_partial_eq_without_eq)]
-#[derive(Debug, PartialEq)]
-#[non_exhaustive]
-pub enum TimestampError {
- /// Indicates that a [`Timestamp`] could not be converted to
- /// [`SystemTime`][std::time::SystemTime] because it is out of range.
- ///
- /// The range of times that can be represented by `SystemTime` depends on the platform. All
- /// `Timestamp`s are likely representable on 64-bit Unix-like platforms, but other platforms,
- /// such as Windows and 32-bit Linux, may not be able to represent the full range of
- /// `Timestamp`s.
- OutOfSystemRange(Timestamp),
-
- /// An error indicating failure to parse a timestamp in RFC-3339 format.
- ParseFailure,
-
- /// Indicates an error when constructing a timestamp due to invalid date or time data.
- InvalidDateTime,
-}
-
-impl fmt::Display for TimestampError {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self {
- TimestampError::OutOfSystemRange(timestamp) => {
- write!(
- f,
- "{} is not representable as a `SystemTime` because it is out of range",
- timestamp
- )
- }
- TimestampError::ParseFailure => {
- write!(f, "failed to parse RFC-3339 formatted timestamp")
- }
- TimestampError::InvalidDateTime => {
- write!(f, "invalid date or time")
- }
- }
- }
-}
-
-#[cfg(feature = "std")]
-impl std::error::Error for TimestampError {}
-
-#[cfg(feature = "std")]
-impl TryFrom<Timestamp> for std::time::SystemTime {
- type Error = TimestampError;
-
- fn try_from(mut timestamp: Timestamp) -> Result<std::time::SystemTime, Self::Error> {
- let orig_timestamp = timestamp.clone();
- timestamp.normalize();
-
- let system_time = if timestamp.seconds >= 0 {
- std::time::UNIX_EPOCH.checked_add(time::Duration::from_secs(timestamp.seconds as u64))
- } else {
- std::time::UNIX_EPOCH.checked_sub(time::Duration::from_secs(
- timestamp
- .seconds
- .checked_neg()
- .ok_or_else(|| TimestampError::OutOfSystemRange(timestamp.clone()))?
- as u64,
- ))
- };
-
- let system_time = system_time.and_then(|system_time| {
- system_time.checked_add(time::Duration::from_nanos(timestamp.nanos as u64))
- });
-
- system_time.ok_or(TimestampError::OutOfSystemRange(orig_timestamp))
- }
-}
-
-impl FromStr for Timestamp {
- type Err = TimestampError;
-
- fn from_str(s: &str) -> Result<Timestamp, TimestampError> {
- datetime::parse_timestamp(s).ok_or(TimestampError::ParseFailure)
- }
-}
-
-impl fmt::Display for Timestamp {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- datetime::DateTime::from(self.clone()).fmt(f)
- }
-}
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[cfg(feature = "std")]
- use proptest::prelude::*;
- #[cfg(feature = "std")]
- use std::time::{self, SystemTime, UNIX_EPOCH};
-
- #[cfg(feature = "std")]
- proptest! {
- #[test]
- fn check_system_time_roundtrip(
- system_time in SystemTime::arbitrary(),
- ) {
- prop_assert_eq!(SystemTime::try_from(Timestamp::from(system_time)).unwrap(), system_time);
- }
-
- #[test]
- fn check_timestamp_roundtrip_via_system_time(
- seconds in i64::arbitrary(),
- nanos in i32::arbitrary(),
- ) {
- let mut timestamp = Timestamp { seconds, nanos };
- timestamp.normalize();
- if let Ok(system_time) = SystemTime::try_from(timestamp.clone()) {
- prop_assert_eq!(Timestamp::from(system_time), timestamp);
- }
- }
- }
-
- #[cfg(feature = "std")]
- #[test]
- fn check_timestamp_negative_seconds() {
- // Representative tests for the case of timestamps before the UTC Epoch time:
- // validate the expected behaviour that "negative second values with fractions
- // must still have non-negative nanos values that count forward in time"
- // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.Timestamp
- //
- // To ensure cross-platform compatibility, all nanosecond values in these
- // tests are in minimum 100 ns increments. This does not affect the general
- // character of the behaviour being tested, but ensures that the tests are
- // valid for both POSIX (1 ns precision) and Windows (100 ns precision).
- assert_eq!(
- Timestamp::from(UNIX_EPOCH - time::Duration::new(1_001, 0)),
- Timestamp {
- seconds: -1_001,
- nanos: 0
- }
- );
- assert_eq!(
- Timestamp::from(UNIX_EPOCH - time::Duration::new(0, 999_999_900)),
- Timestamp {
- seconds: -1,
- nanos: 100
- }
- );
- assert_eq!(
- Timestamp::from(UNIX_EPOCH - time::Duration::new(2_001_234, 12_300)),
- Timestamp {
- seconds: -2_001_235,
- nanos: 999_987_700
- }
- );
- assert_eq!(
- Timestamp::from(UNIX_EPOCH - time::Duration::new(768, 65_432_100)),
- Timestamp {
- seconds: -769,
- nanos: 934_567_900
- }
- );
- }
-
- #[cfg(all(unix, feature = "std"))]
- #[test]
- fn check_timestamp_negative_seconds_1ns() {
- // UNIX-only test cases with 1 ns precision
- assert_eq!(
- Timestamp::from(UNIX_EPOCH - time::Duration::new(0, 999_999_999)),
- Timestamp {
- seconds: -1,
- nanos: 1
- }
- );
- assert_eq!(
- Timestamp::from(UNIX_EPOCH - time::Duration::new(1_234_567, 123)),
- Timestamp {
- seconds: -1_234_568,
- nanos: 999_999_877
- }
- );
- assert_eq!(
- Timestamp::from(UNIX_EPOCH - time::Duration::new(890, 987_654_321)),
- Timestamp {
- seconds: -891,
- nanos: 12_345_679
- }
- );
- }
-
- #[cfg(feature = "std")]
- #[test]
- fn check_timestamp_normalize() {
- // Make sure that `Timestamp::normalize` behaves correctly on and near overflow.
- #[rustfmt::skip] // Don't mangle the table formatting.
- let cases = [
- // --- Table of test cases ---
- // test seconds test nanos expected seconds expected nanos
- (line!(), 0, 0, 0, 0),
- (line!(), 1, 1, 1, 1),
- (line!(), -1, -1, -2, 999_999_999),
- (line!(), 0, 999_999_999, 0, 999_999_999),
- (line!(), 0, -999_999_999, -1, 1),
- (line!(), 0, 1_000_000_000, 1, 0),
- (line!(), 0, -1_000_000_000, -1, 0),
- (line!(), 0, 1_000_000_001, 1, 1),
- (line!(), 0, -1_000_000_001, -2, 999_999_999),
- (line!(), -1, 1, -1, 1),
- (line!(), 1, -1, 0, 999_999_999),
- (line!(), -1, 1_000_000_000, 0, 0),
- (line!(), 1, -1_000_000_000, 0, 0),
- (line!(), i64::MIN , 0, i64::MIN , 0),
- (line!(), i64::MIN + 1, 0, i64::MIN + 1, 0),
- (line!(), i64::MIN , 1, i64::MIN , 1),
- (line!(), i64::MIN , 1_000_000_000, i64::MIN + 1, 0),
- (line!(), i64::MIN , -1_000_000_000, i64::MIN , 0),
- (line!(), i64::MIN + 1, -1_000_000_000, i64::MIN , 0),
- (line!(), i64::MIN + 2, -1_000_000_000, i64::MIN + 1, 0),
- (line!(), i64::MIN , -1_999_999_998, i64::MIN , 0),
- (line!(), i64::MIN + 1, -1_999_999_998, i64::MIN , 0),
- (line!(), i64::MIN + 2, -1_999_999_998, i64::MIN , 2),
- (line!(), i64::MIN , -1_999_999_999, i64::MIN , 0),
- (line!(), i64::MIN + 1, -1_999_999_999, i64::MIN , 0),
- (line!(), i64::MIN + 2, -1_999_999_999, i64::MIN , 1),
- (line!(), i64::MIN , -2_000_000_000, i64::MIN , 0),
- (line!(), i64::MIN + 1, -2_000_000_000, i64::MIN , 0),
- (line!(), i64::MIN + 2, -2_000_000_000, i64::MIN , 0),
- (line!(), i64::MIN , -999_999_998, i64::MIN , 0),
- (line!(), i64::MIN + 1, -999_999_998, i64::MIN , 2),
- (line!(), i64::MAX , 0, i64::MAX , 0),
- (line!(), i64::MAX - 1, 0, i64::MAX - 1, 0),
- (line!(), i64::MAX , -1, i64::MAX - 1, 999_999_999),
- (line!(), i64::MAX , 1_000_000_000, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 1, 1_000_000_000, i64::MAX , 0),
- (line!(), i64::MAX - 2, 1_000_000_000, i64::MAX - 1, 0),
- (line!(), i64::MAX , 1_999_999_998, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 1, 1_999_999_998, i64::MAX , 999_999_998),
- (line!(), i64::MAX - 2, 1_999_999_998, i64::MAX - 1, 999_999_998),
- (line!(), i64::MAX , 1_999_999_999, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 1, 1_999_999_999, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 2, 1_999_999_999, i64::MAX - 1, 999_999_999),
- (line!(), i64::MAX , 2_000_000_000, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 1, 2_000_000_000, i64::MAX , 999_999_999),
- (line!(), i64::MAX - 2, 2_000_000_000, i64::MAX , 0),
- (line!(), i64::MAX , 999_999_998, i64::MAX , 999_999_998),
- (line!(), i64::MAX - 1, 999_999_998, i64::MAX - 1, 999_999_998),
- ];
-
- for case in cases.iter() {
- let mut test_timestamp = crate::Timestamp {
- seconds: case.1,
- nanos: case.2,
- };
- test_timestamp.normalize();
-
- assert_eq!(
- test_timestamp,
- crate::Timestamp {
- seconds: case.3,
- nanos: case.4,
- },
- "test case on line {} doesn't match",
- case.0,
- );
- }
- }
-}
diff --git a/vendor/prost-types-0.12.6/src/type_url.rs b/vendor/prost-types-0.12.6/src/type_url.rs
deleted file mode 100644
index 01597554..00000000
--- a/vendor/prost-types-0.12.6/src/type_url.rs
+++ /dev/null
@@ -1,70 +0,0 @@
-use super::*;
-
-/// URL/resource name that uniquely identifies the type of the serialized protocol buffer message,
-/// e.g. `type.googleapis.com/google.protobuf.Duration`.
-///
-/// This string must contain at least one "/" character.
-///
-/// The last segment of the URL's path must represent the fully qualified name of the type (as in
-/// `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading "." is
-/// not accepted).
-///
-/// If no scheme is provided, `https` is assumed.
-///
-/// Schemes other than `http`, `https` (or the empty scheme) might be used with implementation
-/// specific semantics.
-#[derive(Debug, Eq, PartialEq)]
-pub(crate) struct TypeUrl<'a> {
- /// Fully qualified name of the type, e.g. `google.protobuf.Duration`
- pub(crate) full_name: &'a str,
-}
-
-impl<'a> TypeUrl<'a> {
- pub(crate) fn new(s: &'a str) -> core::option::Option<Self> {
- // Must contain at least one "/" character.
- let slash_pos = s.rfind('/')?;
-
- // The last segment of the URL's path must represent the fully qualified name
- // of the type (as in `path/google.protobuf.Duration`)
- let full_name = s.get((slash_pos + 1)..)?;
-
- // The name should be in a canonical form (e.g., leading "." is not accepted).
- if full_name.starts_with('.') {
- return None;
- }
-
- Some(Self { full_name })
- }
-}
-
-/// Compute the type URL for the given `google.protobuf` type, using `type.googleapis.com` as the
-/// authority for the URL.
-pub(crate) fn type_url_for<T: Name>() -> String {
- format!("type.googleapis.com/{}.{}", T::PACKAGE, T::NAME)
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- #[test]
- fn check_type_url_parsing() {
- let example_type_name = "google.protobuf.Duration";
-
- let url = TypeUrl::new("type.googleapis.com/google.protobuf.Duration").unwrap();
- assert_eq!(url.full_name, example_type_name);
-
- let full_url =
- TypeUrl::new("https://type.googleapis.com/google.protobuf.Duration").unwrap();
- assert_eq!(full_url.full_name, example_type_name);
-
- let relative_url = TypeUrl::new("/google.protobuf.Duration").unwrap();
- assert_eq!(relative_url.full_name, example_type_name);
-
- // The name should be in a canonical form (e.g., leading "." is not accepted).
- assert_eq!(TypeUrl::new("/.google.protobuf.Duration"), None);
-
- // Must contain at least one "/" character.
- assert_eq!(TypeUrl::new("google.protobuf.Duration"), None);
- }
-}
diff --git a/vendor/prost-types/.cargo-checksum.json b/vendor/prost-types/.cargo-checksum.json
index bf8a3f61..2cb8563d 100644
--- a/vendor/prost-types/.cargo-checksum.json
+++ b/vendor/prost-types/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"e1cfd758cb12f3c56c2949576807e9292be2a7c86c4069a2e3e1b5baba9c49e6","Cargo.toml":"eb1123ed889e562db77e8202f0453f25b8bdc0e6f83126b842cf243dd548634f","LICENSE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","README.md":"92c48b3c8b86a8593acb2b314d312f30f76e2e039ca8b70076d6d8771cb54c0f","src/any.rs":"663ad6e55a0e15ace05ab66df21555e5fa81258ca5e9624e3cacb1ec56277b72","src/compiler.rs":"dabd4665e2044f4002fab985371eb684ba41563c4c36775c1d03b3fc5cc6c924","src/conversions.rs":"b0cf9abd916d13a9df253264f3749c5329ba867d2ed21d7fe874c3e2ceefee07","src/datetime.rs":"2041a1c269b04db2d2eea1d9a7136611b657d843d231e836e9876055e52d3abe","src/duration.rs":"2e269f881aa13a492609f6dc6d32b60eeaf309d67da60537922d12d5a233237d","src/lib.rs":"0300ee6d7ea12fd301273d2ceb9d8336385a26df6ac6a23c8c18bf468d5ada6c","src/protobuf.rs":"1f8eac088779317d0a6982ddf95d09f6b2be024d30835d2f7df00a79fc36df72","src/timestamp.rs":"d0ffb032e8cbd07f2ddef099def27c252a3bcc7c4f44f6b9c310230ac48c9f5c","src/type_url.rs":"dc69abaa0ebaaaa58ea81dfba6712bc5be00c35bfff5a3da80b5df0c49c7725f"},"package":"52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16"} \ No newline at end of file
+{"files":{"Cargo.toml":"cadf2579e0a1e10bf59134e5341555e9c8557ccccf2f390e4ef2320bb76de718","LICENSE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","README.md":"05bf3eb034345e386d76f957e8ccdb26960cf5f78c050804b074ef3f01f92477","src/any.rs":"663ad6e55a0e15ace05ab66df21555e5fa81258ca5e9624e3cacb1ec56277b72","src/compiler.rs":"cdeb17a1df6f555c358dbfb0270f2a151ad759cae42be4a66af05b686f517d0f","src/datetime.rs":"df4fd7aee4d6fb5e28850d797cbd490ba9446a2e3fd6bbec015baf8a7ccfe4e4","src/duration.rs":"7378442f6ae52b9799fd114b4c6be6edc1bc41834b1f5b56f98e3c0b7037a6f2","src/lib.rs":"e3c05512b314b7a9b64d302f1a240830553cd1f28629b9ad439591f49935af41","src/protobuf.rs":"5d92f618bb6ad3ac3939a182a4ff8c106c90ec6588054738b0e65caaf1e90e76","src/timestamp.rs":"8eaa6dd53633f2a05839e5e5790da7adcb50ed67fb2ceb5358e2440080492be8","src/type_url.rs":"dc69abaa0ebaaaa58ea81dfba6712bc5be00c35bfff5a3da80b5df0c49c7725f"},"package":"9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0"} \ No newline at end of file
diff --git a/vendor/prost-types/Cargo.lock b/vendor/prost-types/Cargo.lock
deleted file mode 100644
index a154a15e..00000000
--- a/vendor/prost-types/Cargo.lock
+++ /dev/null
@@ -1,471 +0,0 @@
-# This file is automatically @generated by Cargo.
-# It is not intended for manual editing.
-version = 3
-
-[[package]]
-name = "anyhow"
-version = "1.0.93"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
-
-[[package]]
-name = "arbitrary"
-version = "1.4.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223"
-dependencies = [
- "derive_arbitrary",
-]
-
-[[package]]
-name = "autocfg"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
-
-[[package]]
-name = "bit-set"
-version = "0.5.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
-dependencies = [
- "bit-vec",
-]
-
-[[package]]
-name = "bit-vec"
-version = "0.6.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
-
-[[package]]
-name = "bitflags"
-version = "2.6.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
-
-[[package]]
-name = "byteorder"
-version = "1.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
-
-[[package]]
-name = "bytes"
-version = "1.8.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da"
-
-[[package]]
-name = "cfg-if"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
-
-[[package]]
-name = "derive_arbitrary"
-version = "1.4.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "either"
-version = "1.13.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
-
-[[package]]
-name = "errno"
-version = "0.3.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba"
-dependencies = [
- "libc",
- "windows-sys 0.52.0",
-]
-
-[[package]]
-name = "fastrand"
-version = "2.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4"
-
-[[package]]
-name = "fnv"
-version = "1.0.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
-
-[[package]]
-name = "getrandom"
-version = "0.2.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
-dependencies = [
- "cfg-if",
- "libc",
- "wasi",
-]
-
-[[package]]
-name = "itertools"
-version = "0.10.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
-dependencies = [
- "either",
-]
-
-[[package]]
-name = "lazy_static"
-version = "1.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
-
-[[package]]
-name = "libc"
-version = "0.2.164"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "433bfe06b8c75da9b2e3fbea6e5329ff87748f0b144ef75306e674c3f6f7c13f"
-
-[[package]]
-name = "libm"
-version = "0.2.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa"
-
-[[package]]
-name = "linux-raw-sys"
-version = "0.4.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
-
-[[package]]
-name = "num-traits"
-version = "0.2.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
-dependencies = [
- "autocfg",
- "libm",
-]
-
-[[package]]
-name = "once_cell"
-version = "1.20.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
-
-[[package]]
-name = "ppv-lite86"
-version = "0.2.20"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
-dependencies = [
- "zerocopy",
-]
-
-[[package]]
-name = "proc-macro2"
-version = "1.0.92"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
-dependencies = [
- "unicode-ident",
-]
-
-[[package]]
-name = "proptest"
-version = "1.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d"
-dependencies = [
- "bit-set",
- "bit-vec",
- "bitflags",
- "lazy_static",
- "num-traits",
- "rand",
- "rand_chacha",
- "rand_xorshift",
- "regex-syntax",
- "rusty-fork",
- "tempfile",
- "unarray",
-]
-
-[[package]]
-name = "prost"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
-dependencies = [
- "bytes",
- "prost-derive",
-]
-
-[[package]]
-name = "prost-derive"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
-dependencies = [
- "anyhow",
- "itertools",
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "prost-types"
-version = "0.13.5"
-dependencies = [
- "arbitrary",
- "proptest",
- "prost",
-]
-
-[[package]]
-name = "quick-error"
-version = "1.2.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
-
-[[package]]
-name = "quote"
-version = "1.0.37"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
-dependencies = [
- "proc-macro2",
-]
-
-[[package]]
-name = "rand"
-version = "0.8.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
-dependencies = [
- "libc",
- "rand_chacha",
- "rand_core",
-]
-
-[[package]]
-name = "rand_chacha"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
-dependencies = [
- "ppv-lite86",
- "rand_core",
-]
-
-[[package]]
-name = "rand_core"
-version = "0.6.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
-dependencies = [
- "getrandom",
-]
-
-[[package]]
-name = "rand_xorshift"
-version = "0.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
-dependencies = [
- "rand_core",
-]
-
-[[package]]
-name = "regex-syntax"
-version = "0.8.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
-
-[[package]]
-name = "rustix"
-version = "0.38.41"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6"
-dependencies = [
- "bitflags",
- "errno",
- "libc",
- "linux-raw-sys",
- "windows-sys 0.52.0",
-]
-
-[[package]]
-name = "rusty-fork"
-version = "0.3.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
-dependencies = [
- "fnv",
- "quick-error",
- "tempfile",
- "wait-timeout",
-]
-
-[[package]]
-name = "syn"
-version = "2.0.89"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e"
-dependencies = [
- "proc-macro2",
- "quote",
- "unicode-ident",
-]
-
-[[package]]
-name = "tempfile"
-version = "3.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c"
-dependencies = [
- "cfg-if",
- "fastrand",
- "once_cell",
- "rustix",
- "windows-sys 0.59.0",
-]
-
-[[package]]
-name = "unarray"
-version = "0.1.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
-
-[[package]]
-name = "unicode-ident"
-version = "1.0.14"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
-
-[[package]]
-name = "wait-timeout"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
-dependencies = [
- "libc",
-]
-
-[[package]]
-name = "wasi"
-version = "0.11.0+wasi-snapshot-preview1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
-
-[[package]]
-name = "windows-sys"
-version = "0.52.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
-dependencies = [
- "windows-targets",
-]
-
-[[package]]
-name = "windows-sys"
-version = "0.59.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
-dependencies = [
- "windows-targets",
-]
-
-[[package]]
-name = "windows-targets"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
-dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_gnullvm",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
-]
-
-[[package]]
-name = "windows_aarch64_gnullvm"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
-
-[[package]]
-name = "windows_aarch64_msvc"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
-
-[[package]]
-name = "windows_i686_gnu"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
-
-[[package]]
-name = "windows_i686_gnullvm"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
-
-[[package]]
-name = "windows_i686_msvc"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
-
-[[package]]
-name = "windows_x86_64_gnu"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
-
-[[package]]
-name = "windows_x86_64_gnullvm"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
-
-[[package]]
-name = "windows_x86_64_msvc"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
-
-[[package]]
-name = "zerocopy"
-version = "0.7.35"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
-dependencies = [
- "byteorder",
- "zerocopy-derive",
-]
-
-[[package]]
-name = "zerocopy-derive"
-version = "0.7.35"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
diff --git a/vendor/prost-types/Cargo.toml b/vendor/prost-types/Cargo.toml
index d5a51994..0c71fd73 100644
--- a/vendor/prost-types/Cargo.toml
+++ b/vendor/prost-types/Cargo.toml
@@ -11,50 +11,32 @@
[package]
edition = "2021"
-rust-version = "1.71.1"
+rust-version = "1.70"
name = "prost-types"
-version = "0.13.5"
+version = "0.12.6"
authors = [
"Dan Burkert <dan@danburkert.com>",
"Lucio Franco <luciofranco14@gmail.com>",
"Casper Meijn <casper@meijn.net>",
"Tokio Contributors <team@tokio.rs>",
]
-build = false
-autolib = false
-autobins = false
-autoexamples = false
-autotests = false
-autobenches = false
description = "Prost definitions of Protocol Buffers well known types."
+documentation = "https://docs.rs/prost-types"
readme = "README.md"
license = "Apache-2.0"
repository = "https://github.com/tokio-rs/prost"
-[features]
-arbitrary = ["dep:arbitrary"]
-default = ["std"]
-std = ["prost/std"]
-
[lib]
-name = "prost_types"
-path = "src/lib.rs"
doctest = false
-[dependencies.arbitrary]
-version = "1.4"
-features = ["derive"]
-optional = true
-
[dependencies.prost]
-version = "0.13.5"
+version = "0.12.6"
features = ["prost-derive"]
default-features = false
[dev-dependencies.proptest]
version = "1"
-[lints.rust.unexpected_cfgs]
-level = "warn"
-priority = 0
-check-cfg = ["cfg(kani)"]
+[features]
+default = ["std"]
+std = ["prost/std"]
diff --git a/vendor/prost-types/README.md b/vendor/prost-types/README.md
index 5e37ad61..8724577b 100644
--- a/vendor/prost-types/README.md
+++ b/vendor/prost-types/README.md
@@ -11,7 +11,7 @@ information about well known types.
## License
`prost-types` is distributed under the terms of the Apache License (Version 2.0).
-`prost-types` includes code imported from the Protocol Buffers project, which is
+`prost-types` includes code imported from the Protocol Buffers projet, which is
included under its original ([BSD][2]) license.
[2]: https://github.com/google/protobuf/blob/master/LICENSE
diff --git a/vendor/prost-types/src/compiler.rs b/vendor/prost-types/src/compiler.rs
index d274aeba..0a3b4680 100644
--- a/vendor/prost-types/src/compiler.rs
+++ b/vendor/prost-types/src/compiler.rs
@@ -1,6 +1,6 @@
// This file is @generated by prost-build.
/// The version number of protocol compiler.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Version {
#[prost(int32, optional, tag = "1")]
@@ -15,7 +15,7 @@ pub struct Version {
pub suffix: ::core::option::Option<::prost::alloc::string::String>,
}
/// An encoded CodeGeneratorRequest is written to the plugin's stdin.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CodeGeneratorRequest {
/// The .proto files that were explicitly listed on the command-line. The
@@ -47,7 +47,7 @@ pub struct CodeGeneratorRequest {
pub compiler_version: ::core::option::Option<Version>,
}
/// The plugin writes an encoded CodeGeneratorResponse to stdout.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct CodeGeneratorResponse {
/// Error message. If non-empty, code generation failed. The plugin process
@@ -70,7 +70,7 @@ pub struct CodeGeneratorResponse {
/// Nested message and enum types in `CodeGeneratorResponse`.
pub mod code_generator_response {
/// Represents a single generated file.
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+ #[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct File {
/// The file name, relative to the output directory. The name must not
@@ -135,7 +135,6 @@ pub mod code_generator_response {
pub generated_code_info: ::core::option::Option<super::super::GeneratedCodeInfo>,
}
/// Sync with code_generator.h.
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(
Clone,
Copy,
@@ -159,8 +158,8 @@ pub mod code_generator_response {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::None => "FEATURE_NONE",
- Self::Proto3Optional => "FEATURE_PROTO3_OPTIONAL",
+ Feature::None => "FEATURE_NONE",
+ Feature::Proto3Optional => "FEATURE_PROTO3_OPTIONAL",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
diff --git a/vendor/prost-types/src/conversions.rs b/vendor/prost-types/src/conversions.rs
deleted file mode 100644
index cbd5c1fc..00000000
--- a/vendor/prost-types/src/conversions.rs
+++ /dev/null
@@ -1,62 +0,0 @@
-use crate::protobuf::Value;
-use crate::value;
-use crate::String;
-use crate::Vec;
-use ::prost::alloc::collections::BTreeMap;
-
-impl From<value::Kind> for Value {
- fn from(value: value::Kind) -> Self {
- Value { kind: Some(value) }
- }
-}
-
-macro_rules! impl_number_value {
- ($t: ty) => {
- impl From<$t> for Value {
- fn from(value: $t) -> Self {
- value::Kind::NumberValue(value.into()).into()
- }
- }
- };
-}
-
-impl_number_value!(u8);
-impl_number_value!(u16);
-impl_number_value!(u32);
-
-impl_number_value!(i8);
-impl_number_value!(i16);
-impl_number_value!(i32);
-
-impl_number_value!(f32);
-impl_number_value!(f64);
-
-impl From<bool> for Value {
- fn from(value: bool) -> Self {
- value::Kind::BoolValue(value).into()
- }
-}
-
-impl From<String> for Value {
- fn from(value: String) -> Self {
- value::Kind::StringValue(value).into()
- }
-}
-
-impl From<&str> for Value {
- fn from(value: &str) -> Self {
- value::Kind::StringValue(value.into()).into()
- }
-}
-
-impl From<Vec<Value>> for Value {
- fn from(value: Vec<Value>) -> Self {
- value::Kind::ListValue(crate::protobuf::ListValue { values: value }).into()
- }
-}
-
-impl From<BTreeMap<String, Value>> for Value {
- fn from(value: BTreeMap<String, Value>) -> Self {
- value::Kind::StructValue(crate::protobuf::Struct { fields: value }).into()
- }
-}
diff --git a/vendor/prost-types/src/datetime.rs b/vendor/prost-types/src/datetime.rs
index 4c946775..2435ffe7 100644
--- a/vendor/prost-types/src/datetime.rs
+++ b/vendor/prost-types/src/datetime.rs
@@ -5,7 +5,6 @@ use core::fmt;
use crate::Duration;
use crate::Timestamp;
-use crate::TimestampError;
/// A point in time, represented as a date and time in the UTC timezone.
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
@@ -261,10 +260,8 @@ fn parse_nanos(s: &str) -> Option<(u32, &str)> {
// Parse the nanoseconds, if present.
let (nanos, s) = if let Some(s) = parse_char(s, b'.') {
- let (mut digits, s) = parse_digits(s);
- if digits.len() > 9 {
- digits = digits.split_at(9).0;
- }
+ let (digits, s) = parse_digits(s);
+ ensure!(digits.len() <= 9);
let nanos = 10u32.pow(9 - digits.len() as u32) * digits.parse::<u32>().ok()?;
(nanos, s)
} else {
@@ -310,6 +307,9 @@ fn parse_offset(s: &str) -> Option<(i8, i8, &str)> {
(minute, s)
};
+ // '-00:00' indicates an unknown local offset.
+ ensure!(is_positive || hour > 0 || minute > 0);
+
ensure!(hour < 24 && minute < 60);
let hour = hour as i8;
@@ -327,12 +327,7 @@ fn parse_offset(s: &str) -> Option<(i8, i8, &str)> {
/// string.
fn parse_two_digit_numeric(s: &str) -> Option<(u8, &str)> {
debug_assert!(s.is_ascii());
- if s.len() < 2 {
- return None;
- }
- if s.starts_with('+') {
- return None;
- }
+
let (digits, s) = s.split_at(2);
Some((digits.parse().ok()?, s))
}
@@ -425,8 +420,8 @@ pub(crate) fn year_to_seconds(year: i64) -> (i128, bool) {
let is_leap;
let year = year - 1900;
- // Fast path for years 1901 - 2038.
- if (1..=138).contains(&year) {
+ // Fast path for years 1900 - 2038.
+ if year as u64 <= 138 {
let mut leaps: i64 = (year - 68) >> 2;
if (year - 68).trailing_zeros() >= 2 {
leaps -= 1;
@@ -502,7 +497,9 @@ pub(crate) fn parse_timestamp(s: &str) -> Option<Timestamp> {
..DateTime::default()
};
- return Timestamp::try_from(date_time).ok();
+ ensure!(date_time.is_valid());
+
+ return Some(Timestamp::from(date_time));
}
// Accept either 'T' or ' ' as delimiter between date and time.
@@ -532,7 +529,9 @@ pub(crate) fn parse_timestamp(s: &str) -> Option<Timestamp> {
nanos,
};
- let Timestamp { seconds, nanos } = Timestamp::try_from(date_time).ok()?;
+ ensure!(date_time.is_valid());
+
+ let Timestamp { seconds, nanos } = Timestamp::from(date_time);
let seconds =
seconds.checked_sub(i64::from(offset_hour) * 3600 + i64::from(offset_minute) * 60)?;
@@ -571,19 +570,14 @@ pub(crate) fn parse_duration(s: &str) -> Option<Duration> {
Some(Duration { seconds, nanos })
}
-impl TryFrom<DateTime> for Timestamp {
- type Error = TimestampError;
-
- fn try_from(date_time: DateTime) -> Result<Timestamp, TimestampError> {
- if !date_time.is_valid() {
- return Err(TimestampError::InvalidDateTime);
- }
+impl From<DateTime> for Timestamp {
+ fn from(date_time: DateTime) -> Timestamp {
let seconds = date_time_to_seconds(&date_time);
let nanos = date_time.nanos;
- Ok(Timestamp {
+ Timestamp {
seconds,
nanos: nanos as i32,
- })
+ }
}
}
@@ -591,7 +585,6 @@ impl TryFrom<DateTime> for Timestamp {
mod tests {
use super::*;
use proptest::prelude::*;
- use prost::alloc::format;
#[test]
fn test_min_max() {
@@ -621,7 +614,7 @@ mod tests {
};
assert_eq!(
expected,
- format!("{}", DateTime::from(timestamp)),
+ format!("{}", DateTime::from(timestamp.clone())),
"timestamp: {:?}",
timestamp
);
@@ -736,8 +729,8 @@ mod tests {
// Leap day
assert_eq!(
- "2020-02-29T01:02:03.00Z".parse::<Timestamp>(),
- Timestamp::try_from(DateTime {
+ "2020-02-29T01:02:03.00Z".parse::<Timestamp>().unwrap(),
+ Timestamp::from(DateTime {
year: 2020,
month: 2,
day: 29,
@@ -745,7 +738,7 @@ mod tests {
minute: 2,
second: 3,
nanos: 0,
- })
+ }),
);
// Test extensions to RFC 3339.
@@ -776,30 +769,6 @@ mod tests {
"2020-06-15 00:01:02.123 +0800".parse::<Timestamp>(),
Timestamp::date_time_nanos(2020, 6, 14, 16, 1, 2, 123_000_000),
);
-
- // Regression tests
- assert_eq!(
- "-11111111-z".parse::<Timestamp>(),
- Err(crate::TimestampError::ParseFailure),
- );
- assert_eq!(
- "1900-01-10".parse::<Timestamp>(),
- Ok(Timestamp {
- seconds: -2208211200,
- nanos: 0
- }),
- );
- // Leading '+' in two-digit numbers
- assert_eq!(
- "19+1-+2-+3T+4:+5:+6Z".parse::<Timestamp>(),
- Err(crate::TimestampError::ParseFailure),
- );
-
- // Very long seconds fraction
- assert_eq!(
- "1343-08-16 18:33:44.1666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666666660404z".parse::<Timestamp>(),
- Timestamp::date_time_nanos(1343, 8, 16, 18, 33, 44, 166_666_666),
- );
}
#[test]
@@ -860,94 +829,6 @@ mod tests {
assert!("1️⃣s".parse::<Duration>().is_err());
}
- #[test]
- fn check_invalid_datetimes() {
- assert_eq!(
- Timestamp::try_from(DateTime {
- year: i64::from_le_bytes([178, 2, 0, 0, 0, 0, 0, 128]),
- month: 2,
- day: 2,
- hour: 8,
- minute: 58,
- second: 8,
- nanos: u32::from_le_bytes([0, 0, 0, 50]),
- }),
- Err(TimestampError::InvalidDateTime)
- );
- assert_eq!(
- Timestamp::try_from(DateTime {
- year: i64::from_le_bytes([132, 7, 0, 0, 0, 0, 0, 128]),
- month: 2,
- day: 2,
- hour: 8,
- minute: 58,
- second: 8,
- nanos: u32::from_le_bytes([0, 0, 0, 50]),
- }),
- Err(TimestampError::InvalidDateTime)
- );
- assert_eq!(
- Timestamp::try_from(DateTime {
- year: i64::from_le_bytes([80, 96, 32, 240, 99, 0, 32, 180]),
- month: 1,
- day: 18,
- hour: 19,
- minute: 26,
- second: 8,
- nanos: u32::from_le_bytes([0, 0, 0, 50]),
- }),
- Err(TimestampError::InvalidDateTime)
- );
- assert_eq!(
- Timestamp::try_from(DateTime {
- year: DateTime::MIN.year - 1,
- month: 0,
- day: 0,
- hour: 0,
- minute: 0,
- second: 0,
- nanos: 0,
- }),
- Err(TimestampError::InvalidDateTime)
- );
- assert_eq!(
- Timestamp::try_from(DateTime {
- year: i64::MIN,
- month: 0,
- day: 0,
- hour: 0,
- minute: 0,
- second: 0,
- nanos: 0,
- }),
- Err(TimestampError::InvalidDateTime)
- );
- assert_eq!(
- Timestamp::try_from(DateTime {
- year: DateTime::MAX.year + 1,
- month: u8::MAX,
- day: u8::MAX,
- hour: u8::MAX,
- minute: u8::MAX,
- second: u8::MAX,
- nanos: u32::MAX,
- }),
- Err(TimestampError::InvalidDateTime)
- );
- assert_eq!(
- Timestamp::try_from(DateTime {
- year: i64::MAX,
- month: u8::MAX,
- day: u8::MAX,
- hour: u8::MAX,
- minute: u8::MAX,
- second: u8::MAX,
- nanos: u32::MAX,
- }),
- Err(TimestampError::InvalidDateTime)
- );
- }
-
proptest! {
#[cfg(feature = "std")]
#[test]
@@ -979,47 +860,5 @@ mod tests {
"{}", duration.to_string()
);
}
-
- #[test]
- fn check_timestamp_roundtrip_with_date_time(
- seconds in i64::arbitrary(),
- nanos in i32::arbitrary(),
- ) {
- let timestamp = Timestamp { seconds, nanos };
- let date_time = DateTime::from(timestamp);
- let roundtrip = Timestamp::try_from(date_time).unwrap();
-
- prop_assert_eq!(timestamp.normalized(), roundtrip);
- }
-
- #[test]
- fn check_date_time_roundtrip_with_timestamp(
- year in i64::arbitrary(),
- month in u8::arbitrary(),
- day in u8::arbitrary(),
- hour in u8::arbitrary(),
- minute in u8::arbitrary(),
- second in u8::arbitrary(),
- nanos in u32::arbitrary(),
- ) {
- let date_time = DateTime {
- year,
- month,
- day,
- hour,
- minute,
- second,
- nanos
- };
-
- if date_time.is_valid() {
- let timestamp = Timestamp::try_from(date_time).unwrap();
- let roundtrip = DateTime::from(timestamp);
-
- prop_assert_eq!(date_time, roundtrip);
- } else {
- prop_assert_eq!(Timestamp::try_from(date_time), Err(TimestampError::InvalidDateTime));
- }
- }
}
}
diff --git a/vendor/prost-types/src/duration.rs b/vendor/prost-types/src/duration.rs
index 3ce993ee..60071693 100644
--- a/vendor/prost-types/src/duration.rs
+++ b/vendor/prost-types/src/duration.rs
@@ -58,17 +58,6 @@ impl Duration {
// debug_assert!(self.seconds >= -315_576_000_000 && self.seconds <= 315_576_000_000,
// "invalid duration: {:?}", self);
}
-
- /// Returns a normalized copy of the duration to a canonical format.
- ///
- /// Based on [`google::protobuf::util::CreateNormalized`][1].
- ///
- /// [1]: https://github.com/google/protobuf/blob/v3.3.2/src/google/protobuf/util/time_util.cc#L79-L100
- pub fn normalized(&self) -> Self {
- let mut result = *self;
- result.normalize();
- result
- }
}
impl Name for Duration {
@@ -88,8 +77,9 @@ impl TryFrom<time::Duration> for Duration {
let seconds = i64::try_from(duration.as_secs()).map_err(|_| DurationError::OutOfRange)?;
let nanos = duration.subsec_nanos() as i32;
- let duration = Duration { seconds, nanos };
- Ok(duration.normalized())
+ let mut duration = Duration { seconds, nanos };
+ duration.normalize();
+ Ok(duration)
}
}
@@ -115,8 +105,9 @@ impl TryFrom<Duration> for time::Duration {
impl fmt::Display for Duration {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let d = self.normalized();
- if self.seconds < 0 || self.nanos < 0 {
+ let mut d = self.clone();
+ d.normalize();
+ if self.seconds < 0 && self.nanos < 0 {
write!(f, "-")?;
}
write!(f, "{}", d.seconds.abs())?;
@@ -136,6 +127,7 @@ impl fmt::Display for Duration {
}
/// A duration handling error.
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Debug, PartialEq)]
#[non_exhaustive]
pub enum DurationError {
@@ -182,163 +174,68 @@ impl FromStr for Duration {
datetime::parse_duration(s).ok_or(DurationError::ParseFailure)
}
}
-
-#[cfg(kani)]
-mod proofs {
- use super::*;
-
- #[cfg(feature = "std")]
- #[kani::proof]
- fn check_duration_roundtrip() {
- let seconds = kani::any();
- let nanos = kani::any();
- kani::assume(nanos < 1_000_000_000);
- let std_duration = std::time::Duration::new(seconds, nanos);
- let Ok(prost_duration) = Duration::try_from(std_duration) else {
- // Test case not valid: duration out of range
- return;
- };
- assert_eq!(
- time::Duration::try_from(prost_duration).unwrap(),
- std_duration
- );
-
- if std_duration != time::Duration::default() {
- let neg_prost_duration = Duration {
- seconds: -prost_duration.seconds,
- nanos: -prost_duration.nanos,
- };
-
- assert!(matches!(
- time::Duration::try_from(neg_prost_duration),
- Err(DurationError::NegativeDuration(d)) if d == std_duration,
- ))
- }
- }
-
- #[cfg(feature = "std")]
- #[kani::proof]
- fn check_duration_roundtrip_nanos() {
- let seconds = 0;
- let nanos = kani::any();
- let std_duration = std::time::Duration::new(seconds, nanos);
- let Ok(prost_duration) = Duration::try_from(std_duration) else {
- // Test case not valid: duration out of range
- return;
- };
- assert_eq!(
- time::Duration::try_from(prost_duration).unwrap(),
- std_duration
- );
-
- if std_duration != time::Duration::default() {
- let neg_prost_duration = Duration {
- seconds: -prost_duration.seconds,
- nanos: -prost_duration.nanos,
- };
-
- assert!(matches!(
- time::Duration::try_from(neg_prost_duration),
- Err(DurationError::NegativeDuration(d)) if d == std_duration,
- ))
- }
- }
-}
-
#[cfg(test)]
mod tests {
use super::*;
#[cfg(feature = "std")]
- #[test]
- fn test_duration_from_str() {
- assert_eq!(
- Duration::from_str("0s"),
- Ok(Duration {
- seconds: 0,
- nanos: 0
- })
- );
- assert_eq!(
- Duration::from_str("123s"),
- Ok(Duration {
- seconds: 123,
- nanos: 0
- })
- );
- assert_eq!(
- Duration::from_str("0.123s"),
- Ok(Duration {
- seconds: 0,
- nanos: 123_000_000
- })
- );
- assert_eq!(
- Duration::from_str("-123s"),
- Ok(Duration {
- seconds: -123,
- nanos: 0
- })
- );
- assert_eq!(
- Duration::from_str("-0.123s"),
- Ok(Duration {
- seconds: 0,
- nanos: -123_000_000
- })
- );
- assert_eq!(
- Duration::from_str("22041211.6666666666666s"),
- Ok(Duration {
- seconds: 22041211,
- nanos: 666_666_666
- })
- );
- }
+ use proptest::prelude::*;
#[cfg(feature = "std")]
- #[test]
- fn test_format_duration() {
- assert_eq!(
- "0s",
- Duration {
- seconds: 0,
- nanos: 0
- }
- .to_string()
- );
- assert_eq!(
- "123s",
- Duration {
- seconds: 123,
- nanos: 0
- }
- .to_string()
- );
- assert_eq!(
- "0.123s",
- Duration {
- seconds: 0,
- nanos: 123_000_000
- }
- .to_string()
- );
- assert_eq!(
- "-123s",
- Duration {
- seconds: -123,
- nanos: 0
+ proptest! {
+ #[test]
+ fn check_duration_roundtrip(
+ seconds in u64::arbitrary(),
+ nanos in 0u32..1_000_000_000u32,
+ ) {
+ let std_duration = time::Duration::new(seconds, nanos);
+ let prost_duration = match Duration::try_from(std_duration) {
+ Ok(duration) => duration,
+ Err(_) => return Err(TestCaseError::reject("duration out of range")),
+ };
+ prop_assert_eq!(time::Duration::try_from(prost_duration.clone()).unwrap(), std_duration);
+
+ if std_duration != time::Duration::default() {
+ let neg_prost_duration = Duration {
+ seconds: -prost_duration.seconds,
+ nanos: -prost_duration.nanos,
+ };
+
+ prop_assert!(
+ matches!(
+ time::Duration::try_from(neg_prost_duration),
+ Err(DurationError::NegativeDuration(d)) if d == std_duration,
+ )
+ )
}
- .to_string()
- );
- assert_eq!(
- "-0.123s",
- Duration {
- seconds: 0,
- nanos: -123_000_000
+ }
+
+ #[test]
+ fn check_duration_roundtrip_nanos(
+ nanos in u32::arbitrary(),
+ ) {
+ let seconds = 0;
+ let std_duration = std::time::Duration::new(seconds, nanos);
+ let prost_duration = match Duration::try_from(std_duration) {
+ Ok(duration) => duration,
+ Err(_) => return Err(TestCaseError::reject("duration out of range")),
+ };
+ prop_assert_eq!(time::Duration::try_from(prost_duration.clone()).unwrap(), std_duration);
+
+ if std_duration != time::Duration::default() {
+ let neg_prost_duration = Duration {
+ seconds: -prost_duration.seconds,
+ nanos: -prost_duration.nanos,
+ };
+
+ prop_assert!(
+ matches!(
+ time::Duration::try_from(neg_prost_duration),
+ Err(DurationError::NegativeDuration(d)) if d == std_duration,
+ )
+ )
}
- .to_string()
- );
+ }
}
#[cfg(feature = "std")]
@@ -416,13 +313,14 @@ mod tests {
];
for case in cases.iter() {
- let test_duration = Duration {
+ let mut test_duration = Duration {
seconds: case.1,
nanos: case.2,
};
+ test_duration.normalize();
assert_eq!(
- test_duration.normalized(),
+ test_duration,
Duration {
seconds: case.3,
nanos: case.4,
diff --git a/vendor/prost-types/src/lib.rs b/vendor/prost-types/src/lib.rs
index b531c1b0..a2a94d43 100644
--- a/vendor/prost-types/src/lib.rs
+++ b/vendor/prost-types/src/lib.rs
@@ -1,4 +1,4 @@
-#![doc(html_root_url = "https://docs.rs/prost-types/0.13.5")]
+#![doc(html_root_url = "https://docs.rs/prost-types/0.12.6")]
//! Protocol Buffers well-known types.
//!
@@ -7,35 +7,8 @@
//!
//! See the [Protobuf reference][1] for more information about well-known types.
//!
-//! ## Any
-//!
-//! The well-known [`Any`] type contains an arbitrary serialized message along with a URL that
-//! describes the type of the serialized message. Every message that also implements [`Name`]
-//! can be serialized to and deserialized from [`Any`].
-//!
-//! ### Serialization
-//!
-//! A message can be serialized using [`Any::from_msg`].
-//!
-//! ```rust
-//! let message = Timestamp::date(2000, 1, 1).unwrap();
-//! let any = Any::from_msg(&message).unwrap();
-//! ```
-//!
-//! ### Deserialization
-//!
-//! A message can be deserialized using [`Any::to_msg`].
-//!
-//! ```rust
-//! # let message = Timestamp::date(2000, 1, 1).unwrap();
-//! # let any = Any::from_msg(&message).unwrap();
-//! #
-//! let message = any.to_msg::<Timestamp>().unwrap();
-//! ```
-//!
//! ## Feature Flags
//! - `std`: Enable integration with standard library. Disable this feature for `no_std` support. This feature is enabled by default.
-//! - `arbitrary`: Enable integration with crate `arbitrary`. All types on this crate will implement `trait Arbitrary`.
//!
//! [1]: https://developers.google.com/protocol-buffers/docs/reference/google.protobuf
@@ -49,6 +22,8 @@ mod protobuf;
use core::convert::TryFrom;
use core::fmt;
+use core::i32;
+use core::i64;
use core::str::FromStr;
use core::time;
@@ -78,5 +53,3 @@ pub use timestamp::TimestampError;
mod type_url;
pub(crate) use type_url::{type_url_for, TypeUrl};
-
-mod conversions;
diff --git a/vendor/prost-types/src/protobuf.rs b/vendor/prost-types/src/protobuf.rs
index b2426490..edc1361b 100644
--- a/vendor/prost-types/src/protobuf.rs
+++ b/vendor/prost-types/src/protobuf.rs
@@ -1,14 +1,14 @@
// This file is @generated by prost-build.
/// The protocol compiler can output a FileDescriptorSet containing the .proto
/// files it parses.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FileDescriptorSet {
#[prost(message, repeated, tag = "1")]
pub file: ::prost::alloc::vec::Vec<FileDescriptorProto>,
}
/// Describes a complete .proto file.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FileDescriptorProto {
/// file name, relative to root of source tree
@@ -50,7 +50,7 @@ pub struct FileDescriptorProto {
pub syntax: ::core::option::Option<::prost::alloc::string::String>,
}
/// Describes a message type.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct DescriptorProto {
#[prost(string, optional, tag = "1")]
@@ -78,7 +78,7 @@ pub struct DescriptorProto {
}
/// Nested message and enum types in `DescriptorProto`.
pub mod descriptor_proto {
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+ #[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExtensionRange {
/// Inclusive.
@@ -93,8 +93,8 @@ pub mod descriptor_proto {
/// Range of reserved tag numbers. Reserved tag numbers may not be used by
/// fields or extension ranges in the same message. Reserved ranges may
/// not overlap.
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
- #[derive(Clone, Copy, PartialEq, ::prost::Message)]
+ #[allow(clippy::derive_partial_eq_without_eq)]
+ #[derive(Clone, PartialEq, ::prost::Message)]
pub struct ReservedRange {
/// Inclusive.
#[prost(int32, optional, tag = "1")]
@@ -104,7 +104,7 @@ pub mod descriptor_proto {
pub end: ::core::option::Option<i32>,
}
}
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ExtensionRangeOptions {
/// The parser stores options it doesn't recognize here. See above.
@@ -112,7 +112,7 @@ pub struct ExtensionRangeOptions {
pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
}
/// Describes a field within a message.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FieldDescriptorProto {
#[prost(string, optional, tag = "1")]
@@ -181,7 +181,6 @@ pub struct FieldDescriptorProto {
}
/// Nested message and enum types in `FieldDescriptorProto`.
pub mod field_descriptor_proto {
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(
Clone,
Copy,
@@ -235,24 +234,24 @@ pub mod field_descriptor_proto {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::Double => "TYPE_DOUBLE",
- Self::Float => "TYPE_FLOAT",
- Self::Int64 => "TYPE_INT64",
- Self::Uint64 => "TYPE_UINT64",
- Self::Int32 => "TYPE_INT32",
- Self::Fixed64 => "TYPE_FIXED64",
- Self::Fixed32 => "TYPE_FIXED32",
- Self::Bool => "TYPE_BOOL",
- Self::String => "TYPE_STRING",
- Self::Group => "TYPE_GROUP",
- Self::Message => "TYPE_MESSAGE",
- Self::Bytes => "TYPE_BYTES",
- Self::Uint32 => "TYPE_UINT32",
- Self::Enum => "TYPE_ENUM",
- Self::Sfixed32 => "TYPE_SFIXED32",
- Self::Sfixed64 => "TYPE_SFIXED64",
- Self::Sint32 => "TYPE_SINT32",
- Self::Sint64 => "TYPE_SINT64",
+ Type::Double => "TYPE_DOUBLE",
+ Type::Float => "TYPE_FLOAT",
+ Type::Int64 => "TYPE_INT64",
+ Type::Uint64 => "TYPE_UINT64",
+ Type::Int32 => "TYPE_INT32",
+ Type::Fixed64 => "TYPE_FIXED64",
+ Type::Fixed32 => "TYPE_FIXED32",
+ Type::Bool => "TYPE_BOOL",
+ Type::String => "TYPE_STRING",
+ Type::Group => "TYPE_GROUP",
+ Type::Message => "TYPE_MESSAGE",
+ Type::Bytes => "TYPE_BYTES",
+ Type::Uint32 => "TYPE_UINT32",
+ Type::Enum => "TYPE_ENUM",
+ Type::Sfixed32 => "TYPE_SFIXED32",
+ Type::Sfixed64 => "TYPE_SFIXED64",
+ Type::Sint32 => "TYPE_SINT32",
+ Type::Sint64 => "TYPE_SINT64",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -280,7 +279,6 @@ pub mod field_descriptor_proto {
}
}
}
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(
Clone,
Copy,
@@ -306,9 +304,9 @@ pub mod field_descriptor_proto {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::Optional => "LABEL_OPTIONAL",
- Self::Required => "LABEL_REQUIRED",
- Self::Repeated => "LABEL_REPEATED",
+ Label::Optional => "LABEL_OPTIONAL",
+ Label::Required => "LABEL_REQUIRED",
+ Label::Repeated => "LABEL_REPEATED",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -323,7 +321,7 @@ pub mod field_descriptor_proto {
}
}
/// Describes a oneof.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct OneofDescriptorProto {
#[prost(string, optional, tag = "1")]
@@ -332,7 +330,7 @@ pub struct OneofDescriptorProto {
pub options: ::core::option::Option<OneofOptions>,
}
/// Describes an enum type.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EnumDescriptorProto {
#[prost(string, optional, tag = "1")]
@@ -361,8 +359,8 @@ pub mod enum_descriptor_proto {
/// Note that this is distinct from DescriptorProto.ReservedRange in that it
/// is inclusive such that it can appropriately represent the entire int32
/// domain.
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
- #[derive(Clone, Copy, PartialEq, ::prost::Message)]
+ #[allow(clippy::derive_partial_eq_without_eq)]
+ #[derive(Clone, PartialEq, ::prost::Message)]
pub struct EnumReservedRange {
/// Inclusive.
#[prost(int32, optional, tag = "1")]
@@ -373,7 +371,7 @@ pub mod enum_descriptor_proto {
}
}
/// Describes a value within an enum.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EnumValueDescriptorProto {
#[prost(string, optional, tag = "1")]
@@ -384,7 +382,7 @@ pub struct EnumValueDescriptorProto {
pub options: ::core::option::Option<EnumValueOptions>,
}
/// Describes a service.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ServiceDescriptorProto {
#[prost(string, optional, tag = "1")]
@@ -395,7 +393,7 @@ pub struct ServiceDescriptorProto {
pub options: ::core::option::Option<ServiceOptions>,
}
/// Describes a method of a service.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MethodDescriptorProto {
#[prost(string, optional, tag = "1")]
@@ -444,7 +442,7 @@ pub struct MethodDescriptorProto {
/// <https://developers.google.com/protocol-buffers/docs/proto#options>
/// If this turns out to be popular, a web service will be set up
/// to automatically assign option numbers.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FileOptions {
/// Sets the Java package where classes generated from this .proto will be
@@ -563,7 +561,6 @@ pub struct FileOptions {
/// Nested message and enum types in `FileOptions`.
pub mod file_options {
/// Generated classes can be optimized for speed or code size.
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(
Clone,
Copy,
@@ -593,9 +590,9 @@ pub mod file_options {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::Speed => "SPEED",
- Self::CodeSize => "CODE_SIZE",
- Self::LiteRuntime => "LITE_RUNTIME",
+ OptimizeMode::Speed => "SPEED",
+ OptimizeMode::CodeSize => "CODE_SIZE",
+ OptimizeMode::LiteRuntime => "LITE_RUNTIME",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -609,7 +606,7 @@ pub mod file_options {
}
}
}
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MessageOptions {
/// Set true to use the old proto1 MessageSet wire format for extensions.
@@ -670,7 +667,7 @@ pub struct MessageOptions {
#[prost(message, repeated, tag = "999")]
pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
}
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FieldOptions {
/// The ctype option instructs the C++ code generator to use a different
@@ -753,7 +750,6 @@ pub struct FieldOptions {
}
/// Nested message and enum types in `FieldOptions`.
pub mod field_options {
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(
Clone,
Copy,
@@ -779,9 +775,9 @@ pub mod field_options {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::String => "STRING",
- Self::Cord => "CORD",
- Self::StringPiece => "STRING_PIECE",
+ CType::String => "STRING",
+ CType::Cord => "CORD",
+ CType::StringPiece => "STRING_PIECE",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -794,7 +790,6 @@ pub mod field_options {
}
}
}
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(
Clone,
Copy,
@@ -822,9 +817,9 @@ pub mod field_options {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::JsNormal => "JS_NORMAL",
- Self::JsString => "JS_STRING",
- Self::JsNumber => "JS_NUMBER",
+ JsType::JsNormal => "JS_NORMAL",
+ JsType::JsString => "JS_STRING",
+ JsType::JsNumber => "JS_NUMBER",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -838,14 +833,14 @@ pub mod field_options {
}
}
}
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct OneofOptions {
/// The parser stores options it doesn't recognize here. See above.
#[prost(message, repeated, tag = "999")]
pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
}
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EnumOptions {
/// Set this option to true to allow mapping different tag names to the same
@@ -862,7 +857,7 @@ pub struct EnumOptions {
#[prost(message, repeated, tag = "999")]
pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
}
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EnumValueOptions {
/// Is this enum value deprecated?
@@ -875,7 +870,7 @@ pub struct EnumValueOptions {
#[prost(message, repeated, tag = "999")]
pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
}
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ServiceOptions {
/// Is this service deprecated?
@@ -888,7 +883,7 @@ pub struct ServiceOptions {
#[prost(message, repeated, tag = "999")]
pub uninterpreted_option: ::prost::alloc::vec::Vec<UninterpretedOption>,
}
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct MethodOptions {
/// Is this method deprecated?
@@ -913,7 +908,6 @@ pub mod method_options {
/// Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
/// or neither? HTTP based RPC implementation may choose GET verb for safe
/// methods, and PUT verb for idempotent methods instead of the default POST.
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(
Clone,
Copy,
@@ -940,9 +934,9 @@ pub mod method_options {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::IdempotencyUnknown => "IDEMPOTENCY_UNKNOWN",
- Self::NoSideEffects => "NO_SIDE_EFFECTS",
- Self::Idempotent => "IDEMPOTENT",
+ IdempotencyLevel::IdempotencyUnknown => "IDEMPOTENCY_UNKNOWN",
+ IdempotencyLevel::NoSideEffects => "NO_SIDE_EFFECTS",
+ IdempotencyLevel::Idempotent => "IDEMPOTENT",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -962,7 +956,7 @@ pub mod method_options {
/// options protos in descriptor objects (e.g. returned by Descriptor::options(),
/// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions
/// in them.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct UninterpretedOption {
#[prost(message, repeated, tag = "2")]
@@ -989,7 +983,7 @@ pub mod uninterpreted_option {
/// extension (denoted with parentheses in options specs in .proto files).
/// E.g.,{ \["foo", false\], \["bar.baz", true\], \["qux", false\] } represents
/// "foo.(bar.baz).qux".
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+ #[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct NamePart {
#[prost(string, required, tag = "1")]
@@ -1000,7 +994,7 @@ pub mod uninterpreted_option {
}
/// Encapsulates information about the original source file from which a
/// FileDescriptorProto was generated.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SourceCodeInfo {
/// A Location identifies a piece of source code in a .proto file which
@@ -1052,7 +1046,7 @@ pub struct SourceCodeInfo {
}
/// Nested message and enum types in `SourceCodeInfo`.
pub mod source_code_info {
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+ #[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Location {
/// Identifies which part of the FileDescriptorProto was defined at this
@@ -1147,7 +1141,7 @@ pub mod source_code_info {
/// Describes the relationship between generated code and its original source
/// file. A GeneratedCodeInfo message is associated with only one generated
/// source file, but may contain references to different source .proto files.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GeneratedCodeInfo {
/// An Annotation connects some span of text in generated code to an element
@@ -1157,7 +1151,7 @@ pub struct GeneratedCodeInfo {
}
/// Nested message and enum types in `GeneratedCodeInfo`.
pub mod generated_code_info {
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+ #[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Annotation {
/// Identifies the element in the original source .proto file. This field
@@ -1271,7 +1265,7 @@ pub mod generated_code_info {
/// "value": "1.212s"
/// }
/// ```
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Any {
/// A URL/resource name that uniquely identifies the type of the serialized
@@ -1309,7 +1303,7 @@ pub struct Any {
}
/// `SourceContext` represents information about the source of a
/// protobuf element, like the file in which it is defined.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct SourceContext {
/// The path-qualified name of the .proto file that contained the associated
@@ -1318,7 +1312,7 @@ pub struct SourceContext {
pub file_name: ::prost::alloc::string::String,
}
/// A protocol buffer message type.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Type {
/// The fully qualified message name.
@@ -1341,7 +1335,7 @@ pub struct Type {
pub syntax: i32,
}
/// A single field of a message type.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Field {
/// The field type.
@@ -1380,7 +1374,6 @@ pub struct Field {
/// Nested message and enum types in `Field`.
pub mod field {
/// Basic field types.
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(
Clone,
Copy,
@@ -1440,25 +1433,25 @@ pub mod field {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::TypeUnknown => "TYPE_UNKNOWN",
- Self::TypeDouble => "TYPE_DOUBLE",
- Self::TypeFloat => "TYPE_FLOAT",
- Self::TypeInt64 => "TYPE_INT64",
- Self::TypeUint64 => "TYPE_UINT64",
- Self::TypeInt32 => "TYPE_INT32",
- Self::TypeFixed64 => "TYPE_FIXED64",
- Self::TypeFixed32 => "TYPE_FIXED32",
- Self::TypeBool => "TYPE_BOOL",
- Self::TypeString => "TYPE_STRING",
- Self::TypeGroup => "TYPE_GROUP",
- Self::TypeMessage => "TYPE_MESSAGE",
- Self::TypeBytes => "TYPE_BYTES",
- Self::TypeUint32 => "TYPE_UINT32",
- Self::TypeEnum => "TYPE_ENUM",
- Self::TypeSfixed32 => "TYPE_SFIXED32",
- Self::TypeSfixed64 => "TYPE_SFIXED64",
- Self::TypeSint32 => "TYPE_SINT32",
- Self::TypeSint64 => "TYPE_SINT64",
+ Kind::TypeUnknown => "TYPE_UNKNOWN",
+ Kind::TypeDouble => "TYPE_DOUBLE",
+ Kind::TypeFloat => "TYPE_FLOAT",
+ Kind::TypeInt64 => "TYPE_INT64",
+ Kind::TypeUint64 => "TYPE_UINT64",
+ Kind::TypeInt32 => "TYPE_INT32",
+ Kind::TypeFixed64 => "TYPE_FIXED64",
+ Kind::TypeFixed32 => "TYPE_FIXED32",
+ Kind::TypeBool => "TYPE_BOOL",
+ Kind::TypeString => "TYPE_STRING",
+ Kind::TypeGroup => "TYPE_GROUP",
+ Kind::TypeMessage => "TYPE_MESSAGE",
+ Kind::TypeBytes => "TYPE_BYTES",
+ Kind::TypeUint32 => "TYPE_UINT32",
+ Kind::TypeEnum => "TYPE_ENUM",
+ Kind::TypeSfixed32 => "TYPE_SFIXED32",
+ Kind::TypeSfixed64 => "TYPE_SFIXED64",
+ Kind::TypeSint32 => "TYPE_SINT32",
+ Kind::TypeSint64 => "TYPE_SINT64",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -1488,7 +1481,6 @@ pub mod field {
}
}
/// Whether a field is optional, required, or repeated.
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(
Clone,
Copy,
@@ -1518,10 +1510,10 @@ pub mod field {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::Unknown => "CARDINALITY_UNKNOWN",
- Self::Optional => "CARDINALITY_OPTIONAL",
- Self::Required => "CARDINALITY_REQUIRED",
- Self::Repeated => "CARDINALITY_REPEATED",
+ Cardinality::Unknown => "CARDINALITY_UNKNOWN",
+ Cardinality::Optional => "CARDINALITY_OPTIONAL",
+ Cardinality::Required => "CARDINALITY_REQUIRED",
+ Cardinality::Repeated => "CARDINALITY_REPEATED",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -1537,7 +1529,7 @@ pub mod field {
}
}
/// Enum type definition.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Enum {
/// Enum type name.
@@ -1557,7 +1549,7 @@ pub struct Enum {
pub syntax: i32,
}
/// Enum value definition.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct EnumValue {
/// Enum value name.
@@ -1572,7 +1564,7 @@ pub struct EnumValue {
}
/// A protocol buffer option, which can be attached to a message, field,
/// enumeration, etc.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Option {
/// The option's name. For protobuf built-in options (options defined in
@@ -1589,7 +1581,6 @@ pub struct Option {
pub value: ::core::option::Option<Any>,
}
/// The syntax in which a protocol buffer element is defined.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum Syntax {
@@ -1605,8 +1596,8 @@ impl Syntax {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::Proto2 => "SYNTAX_PROTO2",
- Self::Proto3 => "SYNTAX_PROTO3",
+ Syntax::Proto2 => "SYNTAX_PROTO2",
+ Syntax::Proto3 => "SYNTAX_PROTO3",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -1627,7 +1618,7 @@ impl Syntax {
/// sometimes simply referred to as "APIs" in other contexts, such as the name of
/// this message itself. See <https://cloud.google.com/apis/design/glossary> for
/// detailed terminology.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Api {
/// The fully qualified name of this interface, including package name
@@ -1673,7 +1664,7 @@ pub struct Api {
pub syntax: i32,
}
/// Method represents a method of an API interface.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Method {
/// The simple name of this method.
@@ -1786,7 +1777,7 @@ pub struct Method {
/// ...
/// }
/// ```
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Mixin {
/// The fully qualified name of the interface which is included.
@@ -1861,8 +1852,8 @@ pub struct Mixin {
/// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
/// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
/// microsecond should be expressed in JSON format as "3.000001s".
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
-#[derive(Clone, Copy, PartialEq, ::prost::Message)]
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Duration {
/// Signed seconds of the span of time. Must be from -315,576,000,000
/// to +315,576,000,000 inclusive. Note: these bounds are computed from:
@@ -2100,7 +2091,7 @@ pub struct Duration {
/// The implementation of any API method which has a FieldMask type field in the
/// request should verify the included field paths, and return an
/// `INVALID_ARGUMENT` error if any path is unmappable.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct FieldMask {
/// The set of field mask paths.
@@ -2115,7 +2106,7 @@ pub struct FieldMask {
/// with the proto support for the language.
///
/// The JSON representation for `Struct` is JSON object.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Struct {
/// Unordered map of dynamically typed values.
@@ -2131,7 +2122,7 @@ pub struct Struct {
/// variants. Absence of any variant indicates an error.
///
/// The JSON representation for `Value` is JSON value.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Value {
/// The kind of value.
@@ -2141,7 +2132,7 @@ pub struct Value {
/// Nested message and enum types in `Value`.
pub mod value {
/// The kind of value.
- #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+ #[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Kind {
/// Represents a null value.
@@ -2167,7 +2158,7 @@ pub mod value {
/// `ListValue` is a wrapper around a repeated field of values.
///
/// The JSON representation for `ListValue` is JSON array.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ListValue {
/// Repeated field of dynamically typed values.
@@ -2178,7 +2169,6 @@ pub struct ListValue {
/// `Value` type union.
///
/// The JSON representation for `NullValue` is JSON `null`.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)]
#[repr(i32)]
pub enum NullValue {
@@ -2192,7 +2182,7 @@ impl NullValue {
/// (if the ProtoBuf definition does not change) and safe for programmatic use.
pub fn as_str_name(&self) -> &'static str {
match self {
- Self::NullValue => "NULL_VALUE",
+ NullValue::NullValue => "NULL_VALUE",
}
}
/// Creates an enum from field names used in the ProtoBuf definition.
@@ -2302,8 +2292,8 @@ impl NullValue {
/// [`strftime`](<https://docs.python.org/2/library/time.html#time.strftime>) with
/// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
/// the Joda Time's [`ISODateTimeFormat.dateTime()`](<http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D>) to obtain a formatter capable of generating timestamps in this format.
-#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
-#[derive(Clone, Copy, PartialEq, ::prost::Message)]
+#[allow(clippy::derive_partial_eq_without_eq)]
+#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Timestamp {
/// Represents seconds of UTC time since Unix epoch
/// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
diff --git a/vendor/prost-types/src/timestamp.rs b/vendor/prost-types/src/timestamp.rs
index 9ed0db6e..216712a4 100644
--- a/vendor/prost-types/src/timestamp.rs
+++ b/vendor/prost-types/src/timestamp.rs
@@ -50,7 +50,7 @@ impl Timestamp {
///
/// [1]: https://github.com/google/protobuf/blob/v3.3.2/src/google/protobuf/util/time_util.cc#L59-L77
pub fn try_normalize(mut self) -> Result<Timestamp, Timestamp> {
- let before = self;
+ let before = self.clone();
self.normalize();
// If the seconds value has changed, and is either i64::MIN or i64::MAX, then the timestamp
// normalization overflowed.
@@ -62,17 +62,6 @@ impl Timestamp {
}
}
- /// Return a normalized copy of the timestamp to a canonical format.
- ///
- /// Based on [`google::protobuf::util::CreateNormalized`][1].
- ///
- /// [1]: https://github.com/google/protobuf/blob/v3.3.2/src/google/protobuf/util/time_util.cc#L59-L77
- pub fn normalized(&self) -> Self {
- let mut result = *self;
- result.normalize();
- result
- }
-
/// Creates a new `Timestamp` at the start of the provided UTC date.
pub fn date(year: i64, month: u8, day: u8) -> Result<Timestamp, TimestampError> {
Timestamp::date_time_nanos(year, month, day, 0, 0, 0, 0)
@@ -110,7 +99,11 @@ impl Timestamp {
nanos,
};
- Timestamp::try_from(date_time)
+ if date_time.is_valid() {
+ Ok(Timestamp::from(date_time))
+ } else {
+ Err(TimestampError::InvalidDateTime)
+ }
}
}
@@ -160,6 +153,7 @@ impl From<std::time::SystemTime> for Timestamp {
}
/// A timestamp handling error.
+#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Debug, PartialEq)]
#[non_exhaustive]
pub enum TimestampError {
@@ -207,7 +201,7 @@ impl TryFrom<Timestamp> for std::time::SystemTime {
type Error = TimestampError;
fn try_from(mut timestamp: Timestamp) -> Result<std::time::SystemTime, Self::Error> {
- let orig_timestamp = timestamp;
+ let orig_timestamp = timestamp.clone();
timestamp.normalize();
let system_time = if timestamp.seconds >= 0 {
@@ -217,7 +211,8 @@ impl TryFrom<Timestamp> for std::time::SystemTime {
timestamp
.seconds
.checked_neg()
- .ok_or(TimestampError::OutOfSystemRange(timestamp))? as u64,
+ .ok_or_else(|| TimestampError::OutOfSystemRange(timestamp.clone()))?
+ as u64,
))
};
@@ -239,30 +234,9 @@ impl FromStr for Timestamp {
impl fmt::Display for Timestamp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- datetime::DateTime::from(*self).fmt(f)
- }
-}
-
-#[cfg(kani)]
-mod proofs {
- use super::*;
-
- #[cfg(feature = "std")]
- #[kani::proof]
- #[kani::unwind(3)]
- fn check_timestamp_roundtrip_via_system_time() {
- let seconds = kani::any();
- let nanos = kani::any();
-
- let mut timestamp = Timestamp { seconds, nanos };
- timestamp.normalize();
-
- if let Ok(system_time) = std::time::SystemTime::try_from(timestamp) {
- assert_eq!(Timestamp::from(system_time), timestamp);
- }
+ datetime::DateTime::from(self.clone()).fmt(f)
}
}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -280,6 +254,18 @@ mod tests {
) {
prop_assert_eq!(SystemTime::try_from(Timestamp::from(system_time)).unwrap(), system_time);
}
+
+ #[test]
+ fn check_timestamp_roundtrip_via_system_time(
+ seconds in i64::arbitrary(),
+ nanos in i32::arbitrary(),
+ ) {
+ let mut timestamp = Timestamp { seconds, nanos };
+ timestamp.normalize();
+ if let Ok(system_time) = SystemTime::try_from(timestamp.clone()) {
+ prop_assert_eq!(Timestamp::from(system_time), timestamp);
+ }
+ }
}
#[cfg(feature = "std")]
@@ -410,13 +396,14 @@ mod tests {
];
for case in cases.iter() {
- let test_timestamp = crate::Timestamp {
+ let mut test_timestamp = crate::Timestamp {
seconds: case.1,
nanos: case.2,
};
+ test_timestamp.normalize();
assert_eq!(
- test_timestamp.normalized(),
+ test_timestamp,
crate::Timestamp {
seconds: case.3,
nanos: case.4,
@@ -426,20 +413,4 @@ mod tests {
);
}
}
-
- #[cfg(feature = "arbitrary")]
- #[test]
- fn check_timestamp_implements_arbitrary() {
- use arbitrary::{Arbitrary, Unstructured};
-
- let mut unstructured = Unstructured::new(&[]);
-
- assert_eq!(
- Timestamp::arbitrary(&mut unstructured),
- Ok(Timestamp {
- seconds: 0,
- nanos: 0
- })
- );
- }
}
diff --git a/vendor/tonic-health/.cargo-checksum.json b/vendor/tonic-health/.cargo-checksum.json
deleted file mode 100644
index d73ae1db..00000000
--- a/vendor/tonic-health/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.lock":"3118b74abb4f1d337c713a5e3f206f6f4d73ecd0d7ec83ab3d85164c5b340c15","Cargo.toml":"8e3987cb5cd417c0f3615e4e4275e0ecd7ebfa635e7ef7c0b740c791e9a52c77","LICENSE":"e24a56698aa6feaf3a02272b3624f9dc255d982970c5ed97ac4525a95056a5b3","README.md":"11fa9153709f659b751dcab4321c98d0af12ac11efe8aa859cf7925a5af0b3df","proto/health.proto":"8d44f54645557c1e10ba0da377883fd4d24ad994aff4f2139d61b7e9f0ece511","src/generated/grpc_health_v1.rs":"c393d5f9184837f9031baed068b44aafd6e91ce788bab3a3b38f2c3df493f2ef","src/generated/grpc_health_v1_fds.rs":"86f795eebc97f8264b0925dacf6b2880d0ca988b4201bec8ad4f85493bf29736","src/lib.rs":"74143df6bda4f5da7e61117c403ec26b53c9b30a674d0b3c13468ac7de028502","src/server.rs":"6135cf00623bde309bca0bb01cf9fe8244ef0925c7ee8ce9ffbb70e2b5b5f4e1"},"package":"cb87334d340313fefa513b6e60794d44a86d5f039b523229c99c323e4e19ca4b"} \ No newline at end of file
diff --git a/vendor/tonic-health/Cargo.lock b/vendor/tonic-health/Cargo.lock
deleted file mode 100644
index 469eb358..00000000
--- a/vendor/tonic-health/Cargo.lock
+++ /dev/null
@@ -1,474 +0,0 @@
-# This file is automatically @generated by Cargo.
-# It is not intended for manual editing.
-version = 3
-
-[[package]]
-name = "addr2line"
-version = "0.24.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
-dependencies = [
- "gimli",
-]
-
-[[package]]
-name = "adler2"
-version = "2.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
-
-[[package]]
-name = "anyhow"
-version = "1.0.98"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
-
-[[package]]
-name = "async-trait"
-version = "0.1.88"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "backtrace"
-version = "0.3.74"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
-dependencies = [
- "addr2line",
- "cfg-if",
- "libc",
- "miniz_oxide",
- "object",
- "rustc-demangle",
- "windows-targets",
-]
-
-[[package]]
-name = "base64"
-version = "0.22.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
-
-[[package]]
-name = "bytes"
-version = "1.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
-
-[[package]]
-name = "cfg-if"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
-
-[[package]]
-name = "either"
-version = "1.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
-
-[[package]]
-name = "fnv"
-version = "1.0.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
-
-[[package]]
-name = "futures-core"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
-
-[[package]]
-name = "futures-sink"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
-
-[[package]]
-name = "gimli"
-version = "0.31.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
-
-[[package]]
-name = "http"
-version = "1.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
-dependencies = [
- "bytes",
- "fnv",
- "itoa",
-]
-
-[[package]]
-name = "http-body"
-version = "1.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
-dependencies = [
- "bytes",
- "http",
-]
-
-[[package]]
-name = "http-body-util"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
-dependencies = [
- "bytes",
- "futures-core",
- "http",
- "http-body",
- "pin-project-lite",
-]
-
-[[package]]
-name = "itertools"
-version = "0.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
-dependencies = [
- "either",
-]
-
-[[package]]
-name = "itoa"
-version = "1.0.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
-
-[[package]]
-name = "libc"
-version = "0.2.172"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
-
-[[package]]
-name = "memchr"
-version = "2.7.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
-
-[[package]]
-name = "miniz_oxide"
-version = "0.8.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a"
-dependencies = [
- "adler2",
-]
-
-[[package]]
-name = "object"
-version = "0.36.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
-dependencies = [
- "memchr",
-]
-
-[[package]]
-name = "once_cell"
-version = "1.21.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
-
-[[package]]
-name = "percent-encoding"
-version = "2.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
-
-[[package]]
-name = "pin-project"
-version = "1.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a"
-dependencies = [
- "pin-project-internal",
-]
-
-[[package]]
-name = "pin-project-internal"
-version = "1.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "pin-project-lite"
-version = "0.2.16"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
-
-[[package]]
-name = "proc-macro2"
-version = "1.0.95"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
-dependencies = [
- "unicode-ident",
-]
-
-[[package]]
-name = "prost"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
-dependencies = [
- "bytes",
- "prost-derive",
-]
-
-[[package]]
-name = "prost-derive"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
-dependencies = [
- "anyhow",
- "itertools",
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "prost-types"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16"
-dependencies = [
- "prost",
-]
-
-[[package]]
-name = "quote"
-version = "1.0.40"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
-dependencies = [
- "proc-macro2",
-]
-
-[[package]]
-name = "rustc-demangle"
-version = "0.1.24"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
-
-[[package]]
-name = "syn"
-version = "2.0.101"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
-dependencies = [
- "proc-macro2",
- "quote",
- "unicode-ident",
-]
-
-[[package]]
-name = "tokio"
-version = "1.44.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48"
-dependencies = [
- "backtrace",
- "pin-project-lite",
- "tokio-macros",
-]
-
-[[package]]
-name = "tokio-macros"
-version = "2.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "tokio-stream"
-version = "0.1.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
-dependencies = [
- "futures-core",
- "pin-project-lite",
- "tokio",
- "tokio-util",
-]
-
-[[package]]
-name = "tokio-util"
-version = "0.7.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df"
-dependencies = [
- "bytes",
- "futures-core",
- "futures-sink",
- "pin-project-lite",
- "tokio",
-]
-
-[[package]]
-name = "tonic"
-version = "0.13.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e581ba15a835f4d9ea06c55ab1bd4dce26fc53752c69a04aac00703bfb49ba9"
-dependencies = [
- "async-trait",
- "base64",
- "bytes",
- "http",
- "http-body",
- "http-body-util",
- "percent-encoding",
- "pin-project",
- "prost",
- "tokio-stream",
- "tower-layer",
- "tower-service",
- "tracing",
-]
-
-[[package]]
-name = "tonic-health"
-version = "0.13.1"
-dependencies = [
- "prost",
- "prost-types",
- "tokio",
- "tokio-stream",
- "tonic",
-]
-
-[[package]]
-name = "tower-layer"
-version = "0.3.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
-
-[[package]]
-name = "tower-service"
-version = "0.3.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
-
-[[package]]
-name = "tracing"
-version = "0.1.41"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
-dependencies = [
- "pin-project-lite",
- "tracing-attributes",
- "tracing-core",
-]
-
-[[package]]
-name = "tracing-attributes"
-version = "0.1.28"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "tracing-core"
-version = "0.1.33"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
-dependencies = [
- "once_cell",
-]
-
-[[package]]
-name = "unicode-ident"
-version = "1.0.18"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
-
-[[package]]
-name = "windows-targets"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
-dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_gnullvm",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
-]
-
-[[package]]
-name = "windows_aarch64_gnullvm"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
-
-[[package]]
-name = "windows_aarch64_msvc"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
-
-[[package]]
-name = "windows_i686_gnu"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
-
-[[package]]
-name = "windows_i686_gnullvm"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
-
-[[package]]
-name = "windows_i686_msvc"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
-
-[[package]]
-name = "windows_x86_64_gnu"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
-
-[[package]]
-name = "windows_x86_64_gnullvm"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
-
-[[package]]
-name = "windows_x86_64_msvc"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
diff --git a/vendor/tonic-health/Cargo.toml b/vendor/tonic-health/Cargo.toml
deleted file mode 100644
index a43ef39f..00000000
--- a/vendor/tonic-health/Cargo.toml
+++ /dev/null
@@ -1,97 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies.
-#
-# If you are reading this file be aware that the original Cargo.toml
-# will likely look very different (and much more reasonable).
-# See Cargo.toml.orig for the original contents.
-
-[package]
-edition = "2021"
-rust-version = "1.75"
-name = "tonic-health"
-version = "0.13.1"
-authors = ["James Nugent <james@jen20.com>"]
-build = false
-autolib = false
-autobins = false
-autoexamples = false
-autotests = false
-autobenches = false
-description = """
-Health Checking module of `tonic` gRPC implementation.
-"""
-homepage = "https://github.com/hyperium/tonic"
-readme = "README.md"
-keywords = [
- "rpc",
- "grpc",
- "async",
- "healthcheck",
-]
-categories = [
- "network-programming",
- "asynchronous",
-]
-license = "MIT"
-repository = "https://github.com/hyperium/tonic"
-
-[package.metadata.cargo_check_external_types]
-allowed_external_types = [
- "tonic::*",
- "bytes::*",
- "http::*",
- "http_body::*",
- "prost::*",
- "futures_core::stream::Stream",
- "tower_service::Service",
-]
-
-[lib]
-name = "tonic_health"
-path = "src/lib.rs"
-
-[dependencies.prost]
-version = "0.13"
-
-[dependencies.tokio]
-version = "1.0"
-features = ["sync"]
-
-[dependencies.tokio-stream]
-version = "0.1"
-features = ["sync"]
-default-features = false
-
-[dependencies.tonic]
-version = "0.13.0"
-features = [
- "codegen",
- "prost",
-]
-default-features = false
-
-[dev-dependencies.prost-types]
-version = "0.13.0"
-
-[dev-dependencies.tokio]
-version = "1.0"
-features = [
- "rt-multi-thread",
- "macros",
-]
-
-[lints.clippy]
-uninlined_format_args = "deny"
-
-[lints.rust]
-missing_debug_implementations = "warn"
-missing_docs = "warn"
-rust_2018_idioms = "warn"
-unreachable_pub = "warn"
-
-[lints.rustdoc]
-broken_intra_doc_links = "deny"
diff --git a/vendor/tonic-health/LICENSE b/vendor/tonic-health/LICENSE
deleted file mode 100644
index c7f571db..00000000
--- a/vendor/tonic-health/LICENSE
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2025 Lucio Franco
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/vendor/tonic-health/README.md b/vendor/tonic-health/README.md
deleted file mode 100644
index b386668d..00000000
--- a/vendor/tonic-health/README.md
+++ /dev/null
@@ -1,14 +0,0 @@
-# tonic-health
-
-A `tonic` based gRPC healthcheck implementation. It closely follows the official [health checking protocol](https://github.com/grpc/grpc/blob/master/doc/health-checking.md), although it may not implement all features described in the specs.
-
-Please follow the example in the [main repo](https://github.com/hyperium/tonic/tree/master/examples/src/health) to see how it works.
-
-## Features
-
-- transport: Provides the ability to set the service by using the type system and the
-`NamedService` trait. You can use it like that:
-```rust
- let conn = tonic::transport::Endpoint::new(dst)?.connect().await?;
- let client = HealthClient::new(conn);
-```
diff --git a/vendor/tonic-health/proto/health.proto b/vendor/tonic-health/proto/health.proto
deleted file mode 100644
index 38843ff1..00000000
--- a/vendor/tonic-health/proto/health.proto
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2015 The gRPC Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// The canonical version of this proto can be found at
-// https://github.com/grpc/grpc-proto/blob/master/grpc/health/v1/health.proto
-
-syntax = "proto3";
-
-package grpc.health.v1;
-
-option csharp_namespace = "Grpc.Health.V1";
-option go_package = "google.golang.org/grpc/health/grpc_health_v1";
-option java_multiple_files = true;
-option java_outer_classname = "HealthProto";
-option java_package = "io.grpc.health.v1";
-
-message HealthCheckRequest {
- string service = 1;
-}
-
-message HealthCheckResponse {
- enum ServingStatus {
- UNKNOWN = 0;
- SERVING = 1;
- NOT_SERVING = 2;
- SERVICE_UNKNOWN = 3; // Used only by the Watch method.
- }
- ServingStatus status = 1;
-}
-
-service Health {
- // If the requested service is unknown, the call will fail with status
- // NOT_FOUND.
- rpc Check(HealthCheckRequest) returns (HealthCheckResponse);
-
- // Performs a watch for the serving status of the requested service.
- // The server will immediately send back a message indicating the current
- // serving status. It will then subsequently send a new message whenever
- // the service's serving status changes.
- //
- // If the requested service is unknown when the call is received, the
- // server will send a message setting the serving status to
- // SERVICE_UNKNOWN but will *not* terminate the call. If at some
- // future point, the serving status of the service becomes known, the
- // server will send a new message with the service's serving status.
- //
- // If the call terminates with status UNIMPLEMENTED, then clients
- // should assume this method is not supported and should not retry the
- // call. If the call terminates with any other status (including OK),
- // clients should retry the call with appropriate exponential backoff.
- rpc Watch(HealthCheckRequest) returns (stream HealthCheckResponse);
-}
diff --git a/vendor/tonic-health/src/generated/grpc_health_v1.rs b/vendor/tonic-health/src/generated/grpc_health_v1.rs
deleted file mode 100644
index 67ec57c9..00000000
--- a/vendor/tonic-health/src/generated/grpc_health_v1.rs
+++ /dev/null
@@ -1,459 +0,0 @@
-// This file is @generated by prost-build.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct HealthCheckRequest {
- #[prost(string, tag = "1")]
- pub service: ::prost::alloc::string::String,
-}
-#[derive(Clone, Copy, PartialEq, ::prost::Message)]
-pub struct HealthCheckResponse {
- #[prost(enumeration = "health_check_response::ServingStatus", tag = "1")]
- pub status: i32,
-}
-/// Nested message and enum types in `HealthCheckResponse`.
-pub mod health_check_response {
- #[derive(
- Clone,
- Copy,
- Debug,
- PartialEq,
- Eq,
- Hash,
- PartialOrd,
- Ord,
- ::prost::Enumeration
- )]
- #[repr(i32)]
- pub enum ServingStatus {
- Unknown = 0,
- Serving = 1,
- NotServing = 2,
- /// Used only by the Watch method.
- ServiceUnknown = 3,
- }
- impl ServingStatus {
- /// String value of the enum field names used in the ProtoBuf definition.
- ///
- /// The values are not transformed in any way and thus are considered stable
- /// (if the ProtoBuf definition does not change) and safe for programmatic use.
- pub fn as_str_name(&self) -> &'static str {
- match self {
- Self::Unknown => "UNKNOWN",
- Self::Serving => "SERVING",
- Self::NotServing => "NOT_SERVING",
- Self::ServiceUnknown => "SERVICE_UNKNOWN",
- }
- }
- /// Creates an enum from field names used in the ProtoBuf definition.
- pub fn from_str_name(value: &str) -> ::core::option::Option<Self> {
- match value {
- "UNKNOWN" => Some(Self::Unknown),
- "SERVING" => Some(Self::Serving),
- "NOT_SERVING" => Some(Self::NotServing),
- "SERVICE_UNKNOWN" => Some(Self::ServiceUnknown),
- _ => None,
- }
- }
- }
-}
-/// Generated client implementations.
-pub mod health_client {
- #![allow(
- unused_variables,
- dead_code,
- missing_docs,
- clippy::wildcard_imports,
- clippy::let_unit_value,
- )]
- use tonic::codegen::*;
- use tonic::codegen::http::Uri;
- #[derive(Debug, Clone)]
- pub struct HealthClient<T> {
- inner: tonic::client::Grpc<T>,
- }
- impl<T> HealthClient<T>
- where
- T: tonic::client::GrpcService<tonic::body::Body>,
- T::Error: Into<StdError>,
- T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
- <T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
- {
- pub fn new(inner: T) -> Self {
- let inner = tonic::client::Grpc::new(inner);
- Self { inner }
- }
- pub fn with_origin(inner: T, origin: Uri) -> Self {
- let inner = tonic::client::Grpc::with_origin(inner, origin);
- Self { inner }
- }
- pub fn with_interceptor<F>(
- inner: T,
- interceptor: F,
- ) -> HealthClient<InterceptedService<T, F>>
- where
- F: tonic::service::Interceptor,
- T::ResponseBody: Default,
- T: tonic::codegen::Service<
- http::Request<tonic::body::Body>,
- Response = http::Response<
- <T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
- >,
- >,
- <T as tonic::codegen::Service<
- http::Request<tonic::body::Body>,
- >>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
- {
- HealthClient::new(InterceptedService::new(inner, interceptor))
- }
- /// Compress requests with the given encoding.
- ///
- /// This requires the server to support it otherwise it might respond with an
- /// error.
- #[must_use]
- pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.inner = self.inner.send_compressed(encoding);
- self
- }
- /// Enable decompressing responses.
- #[must_use]
- pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.inner = self.inner.accept_compressed(encoding);
- self
- }
- /// Limits the maximum size of a decoded message.
- ///
- /// Default: `4MB`
- #[must_use]
- pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
- self.inner = self.inner.max_decoding_message_size(limit);
- self
- }
- /// Limits the maximum size of an encoded message.
- ///
- /// Default: `usize::MAX`
- #[must_use]
- pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
- self.inner = self.inner.max_encoding_message_size(limit);
- self
- }
- /// If the requested service is unknown, the call will fail with status
- /// NOT_FOUND.
- pub async fn check(
- &mut self,
- request: impl tonic::IntoRequest<super::HealthCheckRequest>,
- ) -> std::result::Result<
- tonic::Response<super::HealthCheckResponse>,
- tonic::Status,
- > {
- self.inner
- .ready()
- .await
- .map_err(|e| {
- tonic::Status::unknown(
- format!("Service was not ready: {}", e.into()),
- )
- })?;
- let codec = tonic::codec::ProstCodec::default();
- let path = http::uri::PathAndQuery::from_static(
- "/grpc.health.v1.Health/Check",
- );
- let mut req = request.into_request();
- req.extensions_mut()
- .insert(GrpcMethod::new("grpc.health.v1.Health", "Check"));
- self.inner.unary(req, path, codec).await
- }
- /// Performs a watch for the serving status of the requested service.
- /// The server will immediately send back a message indicating the current
- /// serving status. It will then subsequently send a new message whenever
- /// the service's serving status changes.
- ///
- /// If the requested service is unknown when the call is received, the
- /// server will send a message setting the serving status to
- /// SERVICE_UNKNOWN but will *not* terminate the call. If at some
- /// future point, the serving status of the service becomes known, the
- /// server will send a new message with the service's serving status.
- ///
- /// If the call terminates with status UNIMPLEMENTED, then clients
- /// should assume this method is not supported and should not retry the
- /// call. If the call terminates with any other status (including OK),
- /// clients should retry the call with appropriate exponential backoff.
- pub async fn watch(
- &mut self,
- request: impl tonic::IntoRequest<super::HealthCheckRequest>,
- ) -> std::result::Result<
- tonic::Response<tonic::codec::Streaming<super::HealthCheckResponse>>,
- tonic::Status,
- > {
- self.inner
- .ready()
- .await
- .map_err(|e| {
- tonic::Status::unknown(
- format!("Service was not ready: {}", e.into()),
- )
- })?;
- let codec = tonic::codec::ProstCodec::default();
- let path = http::uri::PathAndQuery::from_static(
- "/grpc.health.v1.Health/Watch",
- );
- let mut req = request.into_request();
- req.extensions_mut()
- .insert(GrpcMethod::new("grpc.health.v1.Health", "Watch"));
- self.inner.server_streaming(req, path, codec).await
- }
- }
-}
-/// Generated server implementations.
-pub mod health_server {
- #![allow(
- unused_variables,
- dead_code,
- missing_docs,
- clippy::wildcard_imports,
- clippy::let_unit_value,
- )]
- use tonic::codegen::*;
- /// Generated trait containing gRPC methods that should be implemented for use with HealthServer.
- #[async_trait]
- pub trait Health: std::marker::Send + std::marker::Sync + 'static {
- /// If the requested service is unknown, the call will fail with status
- /// NOT_FOUND.
- async fn check(
- &self,
- request: tonic::Request<super::HealthCheckRequest>,
- ) -> std::result::Result<
- tonic::Response<super::HealthCheckResponse>,
- tonic::Status,
- >;
- /// Server streaming response type for the Watch method.
- type WatchStream: tonic::codegen::tokio_stream::Stream<
- Item = std::result::Result<super::HealthCheckResponse, tonic::Status>,
- >
- + std::marker::Send
- + 'static;
- /// Performs a watch for the serving status of the requested service.
- /// The server will immediately send back a message indicating the current
- /// serving status. It will then subsequently send a new message whenever
- /// the service's serving status changes.
- ///
- /// If the requested service is unknown when the call is received, the
- /// server will send a message setting the serving status to
- /// SERVICE_UNKNOWN but will *not* terminate the call. If at some
- /// future point, the serving status of the service becomes known, the
- /// server will send a new message with the service's serving status.
- ///
- /// If the call terminates with status UNIMPLEMENTED, then clients
- /// should assume this method is not supported and should not retry the
- /// call. If the call terminates with any other status (including OK),
- /// clients should retry the call with appropriate exponential backoff.
- async fn watch(
- &self,
- request: tonic::Request<super::HealthCheckRequest>,
- ) -> std::result::Result<tonic::Response<Self::WatchStream>, tonic::Status>;
- }
- #[derive(Debug)]
- pub struct HealthServer<T> {
- inner: Arc<T>,
- accept_compression_encodings: EnabledCompressionEncodings,
- send_compression_encodings: EnabledCompressionEncodings,
- max_decoding_message_size: Option<usize>,
- max_encoding_message_size: Option<usize>,
- }
- impl<T> HealthServer<T> {
- pub fn new(inner: T) -> Self {
- Self::from_arc(Arc::new(inner))
- }
- pub fn from_arc(inner: Arc<T>) -> Self {
- Self {
- inner,
- accept_compression_encodings: Default::default(),
- send_compression_encodings: Default::default(),
- max_decoding_message_size: None,
- max_encoding_message_size: None,
- }
- }
- pub fn with_interceptor<F>(
- inner: T,
- interceptor: F,
- ) -> InterceptedService<Self, F>
- where
- F: tonic::service::Interceptor,
- {
- InterceptedService::new(Self::new(inner), interceptor)
- }
- /// Enable decompressing requests with the given encoding.
- #[must_use]
- pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.accept_compression_encodings.enable(encoding);
- self
- }
- /// Compress responses with the given encoding, if the client supports it.
- #[must_use]
- pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.send_compression_encodings.enable(encoding);
- self
- }
- /// Limits the maximum size of a decoded message.
- ///
- /// Default: `4MB`
- #[must_use]
- pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
- self.max_decoding_message_size = Some(limit);
- self
- }
- /// Limits the maximum size of an encoded message.
- ///
- /// Default: `usize::MAX`
- #[must_use]
- pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
- self.max_encoding_message_size = Some(limit);
- self
- }
- }
- impl<T, B> tonic::codegen::Service<http::Request<B>> for HealthServer<T>
- where
- T: Health,
- B: Body + std::marker::Send + 'static,
- B::Error: Into<StdError> + std::marker::Send + 'static,
- {
- type Response = http::Response<tonic::body::Body>;
- type Error = std::convert::Infallible;
- type Future = BoxFuture<Self::Response, Self::Error>;
- fn poll_ready(
- &mut self,
- _cx: &mut Context<'_>,
- ) -> Poll<std::result::Result<(), Self::Error>> {
- Poll::Ready(Ok(()))
- }
- fn call(&mut self, req: http::Request<B>) -> Self::Future {
- match req.uri().path() {
- "/grpc.health.v1.Health/Check" => {
- #[allow(non_camel_case_types)]
- struct CheckSvc<T: Health>(pub Arc<T>);
- impl<
- T: Health,
- > tonic::server::UnaryService<super::HealthCheckRequest>
- for CheckSvc<T> {
- type Response = super::HealthCheckResponse;
- type Future = BoxFuture<
- tonic::Response<Self::Response>,
- tonic::Status,
- >;
- fn call(
- &mut self,
- request: tonic::Request<super::HealthCheckRequest>,
- ) -> Self::Future {
- let inner = Arc::clone(&self.0);
- let fut = async move {
- <T as Health>::check(&inner, request).await
- };
- Box::pin(fut)
- }
- }
- let accept_compression_encodings = self.accept_compression_encodings;
- let send_compression_encodings = self.send_compression_encodings;
- let max_decoding_message_size = self.max_decoding_message_size;
- let max_encoding_message_size = self.max_encoding_message_size;
- let inner = self.inner.clone();
- let fut = async move {
- let method = CheckSvc(inner);
- let codec = tonic::codec::ProstCodec::default();
- let mut grpc = tonic::server::Grpc::new(codec)
- .apply_compression_config(
- accept_compression_encodings,
- send_compression_encodings,
- )
- .apply_max_message_size_config(
- max_decoding_message_size,
- max_encoding_message_size,
- );
- let res = grpc.unary(method, req).await;
- Ok(res)
- };
- Box::pin(fut)
- }
- "/grpc.health.v1.Health/Watch" => {
- #[allow(non_camel_case_types)]
- struct WatchSvc<T: Health>(pub Arc<T>);
- impl<
- T: Health,
- > tonic::server::ServerStreamingService<super::HealthCheckRequest>
- for WatchSvc<T> {
- type Response = super::HealthCheckResponse;
- type ResponseStream = T::WatchStream;
- type Future = BoxFuture<
- tonic::Response<Self::ResponseStream>,
- tonic::Status,
- >;
- fn call(
- &mut self,
- request: tonic::Request<super::HealthCheckRequest>,
- ) -> Self::Future {
- let inner = Arc::clone(&self.0);
- let fut = async move {
- <T as Health>::watch(&inner, request).await
- };
- Box::pin(fut)
- }
- }
- let accept_compression_encodings = self.accept_compression_encodings;
- let send_compression_encodings = self.send_compression_encodings;
- let max_decoding_message_size = self.max_decoding_message_size;
- let max_encoding_message_size = self.max_encoding_message_size;
- let inner = self.inner.clone();
- let fut = async move {
- let method = WatchSvc(inner);
- let codec = tonic::codec::ProstCodec::default();
- let mut grpc = tonic::server::Grpc::new(codec)
- .apply_compression_config(
- accept_compression_encodings,
- send_compression_encodings,
- )
- .apply_max_message_size_config(
- max_decoding_message_size,
- max_encoding_message_size,
- );
- let res = grpc.server_streaming(method, req).await;
- Ok(res)
- };
- Box::pin(fut)
- }
- _ => {
- Box::pin(async move {
- let mut response = http::Response::new(
- tonic::body::Body::default(),
- );
- let headers = response.headers_mut();
- headers
- .insert(
- tonic::Status::GRPC_STATUS,
- (tonic::Code::Unimplemented as i32).into(),
- );
- headers
- .insert(
- http::header::CONTENT_TYPE,
- tonic::metadata::GRPC_CONTENT_TYPE,
- );
- Ok(response)
- })
- }
- }
- }
- }
- impl<T> Clone for HealthServer<T> {
- fn clone(&self) -> Self {
- let inner = self.inner.clone();
- Self {
- inner,
- accept_compression_encodings: self.accept_compression_encodings,
- send_compression_encodings: self.send_compression_encodings,
- max_decoding_message_size: self.max_decoding_message_size,
- max_encoding_message_size: self.max_encoding_message_size,
- }
- }
- }
- /// Generated gRPC service name
- pub const SERVICE_NAME: &str = "grpc.health.v1.Health";
- impl<T> tonic::server::NamedService for HealthServer<T> {
- const NAME: &'static str = SERVICE_NAME;
- }
-}
diff --git a/vendor/tonic-health/src/generated/grpc_health_v1_fds.rs b/vendor/tonic-health/src/generated/grpc_health_v1_fds.rs
deleted file mode 100644
index 45cc00bf..00000000
--- a/vendor/tonic-health/src/generated/grpc_health_v1_fds.rs
+++ /dev/null
@@ -1,63 +0,0 @@
-// This file is @generated by codegen.
-// Copyright 2015 The gRPC Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// The canonical version of this proto can be found at
-// https://github.com/grpc/grpc-proto/blob/master/grpc/health/v1/health.proto
-//
-/// Byte encoded FILE_DESCRIPTOR_SET.
-pub const FILE_DESCRIPTOR_SET: &[u8] = &[
- 10u8, 158u8, 4u8, 10u8, 12u8, 104u8, 101u8, 97u8, 108u8, 116u8, 104u8, 46u8, 112u8,
- 114u8, 111u8, 116u8, 111u8, 18u8, 14u8, 103u8, 114u8, 112u8, 99u8, 46u8, 104u8,
- 101u8, 97u8, 108u8, 116u8, 104u8, 46u8, 118u8, 49u8, 34u8, 46u8, 10u8, 18u8, 72u8,
- 101u8, 97u8, 108u8, 116u8, 104u8, 67u8, 104u8, 101u8, 99u8, 107u8, 82u8, 101u8,
- 113u8, 117u8, 101u8, 115u8, 116u8, 18u8, 24u8, 10u8, 7u8, 115u8, 101u8, 114u8, 118u8,
- 105u8, 99u8, 101u8, 24u8, 1u8, 32u8, 1u8, 40u8, 9u8, 82u8, 7u8, 115u8, 101u8, 114u8,
- 118u8, 105u8, 99u8, 101u8, 34u8, 177u8, 1u8, 10u8, 19u8, 72u8, 101u8, 97u8, 108u8,
- 116u8, 104u8, 67u8, 104u8, 101u8, 99u8, 107u8, 82u8, 101u8, 115u8, 112u8, 111u8,
- 110u8, 115u8, 101u8, 18u8, 73u8, 10u8, 6u8, 115u8, 116u8, 97u8, 116u8, 117u8, 115u8,
- 24u8, 1u8, 32u8, 1u8, 40u8, 14u8, 50u8, 49u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8,
- 104u8, 101u8, 97u8, 108u8, 116u8, 104u8, 46u8, 118u8, 49u8, 46u8, 72u8, 101u8, 97u8,
- 108u8, 116u8, 104u8, 67u8, 104u8, 101u8, 99u8, 107u8, 82u8, 101u8, 115u8, 112u8,
- 111u8, 110u8, 115u8, 101u8, 46u8, 83u8, 101u8, 114u8, 118u8, 105u8, 110u8, 103u8,
- 83u8, 116u8, 97u8, 116u8, 117u8, 115u8, 82u8, 6u8, 115u8, 116u8, 97u8, 116u8, 117u8,
- 115u8, 34u8, 79u8, 10u8, 13u8, 83u8, 101u8, 114u8, 118u8, 105u8, 110u8, 103u8, 83u8,
- 116u8, 97u8, 116u8, 117u8, 115u8, 18u8, 11u8, 10u8, 7u8, 85u8, 78u8, 75u8, 78u8,
- 79u8, 87u8, 78u8, 16u8, 0u8, 18u8, 11u8, 10u8, 7u8, 83u8, 69u8, 82u8, 86u8, 73u8,
- 78u8, 71u8, 16u8, 1u8, 18u8, 15u8, 10u8, 11u8, 78u8, 79u8, 84u8, 95u8, 83u8, 69u8,
- 82u8, 86u8, 73u8, 78u8, 71u8, 16u8, 2u8, 18u8, 19u8, 10u8, 15u8, 83u8, 69u8, 82u8,
- 86u8, 73u8, 67u8, 69u8, 95u8, 85u8, 78u8, 75u8, 78u8, 79u8, 87u8, 78u8, 16u8, 3u8,
- 50u8, 174u8, 1u8, 10u8, 6u8, 72u8, 101u8, 97u8, 108u8, 116u8, 104u8, 18u8, 80u8,
- 10u8, 5u8, 67u8, 104u8, 101u8, 99u8, 107u8, 18u8, 34u8, 46u8, 103u8, 114u8, 112u8,
- 99u8, 46u8, 104u8, 101u8, 97u8, 108u8, 116u8, 104u8, 46u8, 118u8, 49u8, 46u8, 72u8,
- 101u8, 97u8, 108u8, 116u8, 104u8, 67u8, 104u8, 101u8, 99u8, 107u8, 82u8, 101u8,
- 113u8, 117u8, 101u8, 115u8, 116u8, 26u8, 35u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8,
- 104u8, 101u8, 97u8, 108u8, 116u8, 104u8, 46u8, 118u8, 49u8, 46u8, 72u8, 101u8, 97u8,
- 108u8, 116u8, 104u8, 67u8, 104u8, 101u8, 99u8, 107u8, 82u8, 101u8, 115u8, 112u8,
- 111u8, 110u8, 115u8, 101u8, 18u8, 82u8, 10u8, 5u8, 87u8, 97u8, 116u8, 99u8, 104u8,
- 18u8, 34u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 104u8, 101u8, 97u8, 108u8, 116u8,
- 104u8, 46u8, 118u8, 49u8, 46u8, 72u8, 101u8, 97u8, 108u8, 116u8, 104u8, 67u8, 104u8,
- 101u8, 99u8, 107u8, 82u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 26u8, 35u8, 46u8,
- 103u8, 114u8, 112u8, 99u8, 46u8, 104u8, 101u8, 97u8, 108u8, 116u8, 104u8, 46u8,
- 118u8, 49u8, 46u8, 72u8, 101u8, 97u8, 108u8, 116u8, 104u8, 67u8, 104u8, 101u8, 99u8,
- 107u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 48u8, 1u8, 66u8, 97u8,
- 10u8, 17u8, 105u8, 111u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 104u8, 101u8, 97u8,
- 108u8, 116u8, 104u8, 46u8, 118u8, 49u8, 66u8, 11u8, 72u8, 101u8, 97u8, 108u8, 116u8,
- 104u8, 80u8, 114u8, 111u8, 116u8, 111u8, 80u8, 1u8, 90u8, 44u8, 103u8, 111u8, 111u8,
- 103u8, 108u8, 101u8, 46u8, 103u8, 111u8, 108u8, 97u8, 110u8, 103u8, 46u8, 111u8,
- 114u8, 103u8, 47u8, 103u8, 114u8, 112u8, 99u8, 47u8, 104u8, 101u8, 97u8, 108u8,
- 116u8, 104u8, 47u8, 103u8, 114u8, 112u8, 99u8, 95u8, 104u8, 101u8, 97u8, 108u8,
- 116u8, 104u8, 95u8, 118u8, 49u8, 170u8, 2u8, 14u8, 71u8, 114u8, 112u8, 99u8, 46u8,
- 72u8, 101u8, 97u8, 108u8, 116u8, 104u8, 46u8, 86u8, 49u8, 98u8, 6u8, 112u8, 114u8,
- 111u8, 116u8, 111u8, 51u8,
-];
diff --git a/vendor/tonic-health/src/lib.rs b/vendor/tonic-health/src/lib.rs
deleted file mode 100644
index 5884fd82..00000000
--- a/vendor/tonic-health/src/lib.rs
+++ /dev/null
@@ -1,76 +0,0 @@
-//! A `tonic` based gRPC healthcheck implementation.
-//!
-//! # Example
-//!
-//! An example can be found [here].
-//!
-//! [here]: https://github.com/hyperium/tonic/blob/master/examples/src/health/server.rs
-
-#![doc(
- html_logo_url = "https://raw.githubusercontent.com/tokio-rs/website/master/public/img/icons/tonic.svg"
-)]
-#![doc(issue_tracker_base_url = "https://github.com/hyperium/tonic/issues/")]
-#![doc(test(no_crate_inject, attr(deny(rust_2018_idioms))))]
-#![cfg_attr(docsrs, feature(doc_auto_cfg))]
-
-use std::fmt::{Display, Formatter};
-
-mod generated {
- #![allow(unreachable_pub)]
- #![allow(missing_docs)]
- #[rustfmt::skip]
- pub mod grpc_health_v1;
- #[rustfmt::skip]
- pub mod grpc_health_v1_fds;
-
- pub use grpc_health_v1_fds::FILE_DESCRIPTOR_SET;
-
- #[cfg(test)]
- mod tests {
- use super::FILE_DESCRIPTOR_SET;
- use prost::Message as _;
-
- #[test]
- fn file_descriptor_set_is_valid() {
- prost_types::FileDescriptorSet::decode(FILE_DESCRIPTOR_SET).unwrap();
- }
- }
-}
-
-/// Generated protobuf types from the `grpc.health.v1` package.
-pub mod pb {
- pub use crate::generated::{grpc_health_v1::*, FILE_DESCRIPTOR_SET};
-}
-
-pub mod server;
-
-/// An enumeration of values representing gRPC service health.
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum ServingStatus {
- /// Unknown status
- Unknown,
- /// The service is currently up and serving requests.
- Serving,
- /// The service is currently down and not serving requests.
- NotServing,
-}
-
-impl Display for ServingStatus {
- fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
- match self {
- ServingStatus::Unknown => f.write_str("Unknown"),
- ServingStatus::Serving => f.write_str("Serving"),
- ServingStatus::NotServing => f.write_str("NotServing"),
- }
- }
-}
-
-impl From<ServingStatus> for pb::health_check_response::ServingStatus {
- fn from(s: ServingStatus) -> Self {
- match s {
- ServingStatus::Unknown => pb::health_check_response::ServingStatus::Unknown,
- ServingStatus::Serving => pb::health_check_response::ServingStatus::Serving,
- ServingStatus::NotServing => pb::health_check_response::ServingStatus::NotServing,
- }
- }
-}
diff --git a/vendor/tonic-health/src/server.rs b/vendor/tonic-health/src/server.rs
deleted file mode 100644
index 1a4d73e7..00000000
--- a/vendor/tonic-health/src/server.rs
+++ /dev/null
@@ -1,353 +0,0 @@
-//! Contains all healthcheck based server utilities.
-
-use crate::pb::health_server::{Health, HealthServer};
-use crate::pb::{HealthCheckRequest, HealthCheckResponse};
-use crate::ServingStatus;
-use std::collections::HashMap;
-use std::fmt;
-use std::sync::Arc;
-use tokio::sync::{watch, RwLock};
-use tokio_stream::Stream;
-use tonic::{server::NamedService, Request, Response, Status};
-
-/// Creates a `HealthReporter` and a linked `HealthServer` pair. Together,
-/// these types can be used to serve the gRPC Health Checking service.
-///
-/// A `HealthReporter` is used to update the state of gRPC services.
-///
-/// A `HealthServer` is a Tonic gRPC server for the `grpc.health.v1.Health`,
-/// which can be added to a Tonic runtime using `add_service` on the runtime
-/// builder.
-pub fn health_reporter() -> (HealthReporter, HealthServer<impl Health>) {
- let reporter = HealthReporter::new();
- let service = HealthService::new(reporter.statuses.clone());
- let server = HealthServer::new(service);
-
- (reporter, server)
-}
-
-type StatusPair = (watch::Sender<ServingStatus>, watch::Receiver<ServingStatus>);
-
-/// A handle providing methods to update the health status of gRPC services. A
-/// `HealthReporter` is connected to a `HealthServer` which serves the statuses
-/// over the `grpc.health.v1.Health` service.
-#[derive(Clone, Debug)]
-pub struct HealthReporter {
- statuses: Arc<RwLock<HashMap<String, StatusPair>>>,
-}
-
-impl HealthReporter {
- /// Create a new HealthReporter with an initial service (named ""), corresponding to overall server health
- pub fn new() -> Self {
- // According to the gRPC Health Check specification, the empty service "" corresponds to the overall server health
- let server_status = ("".to_string(), watch::channel(ServingStatus::Serving));
-
- let statuses = Arc::new(RwLock::new(HashMap::from([server_status])));
-
- HealthReporter { statuses }
- }
-
- /// Sets the status of the service implemented by `S` to `Serving`. This notifies any watchers
- /// if there is a change in status.
- pub async fn set_serving<S>(&self)
- where
- S: NamedService,
- {
- let service_name = <S as NamedService>::NAME;
- self.set_service_status(service_name, ServingStatus::Serving)
- .await;
- }
-
- /// Sets the status of the service implemented by `S` to `NotServing`. This notifies any watchers
- /// if there is a change in status.
- pub async fn set_not_serving<S>(&self)
- where
- S: NamedService,
- {
- let service_name = <S as NamedService>::NAME;
- self.set_service_status(service_name, ServingStatus::NotServing)
- .await;
- }
-
- /// Sets the status of the service with `service_name` to `status`. This notifies any watchers
- /// if there is a change in status.
- pub async fn set_service_status<S>(&self, service_name: S, status: ServingStatus)
- where
- S: AsRef<str>,
- {
- let service_name = service_name.as_ref();
- let mut writer = self.statuses.write().await;
- match writer.get(service_name) {
- Some((tx, _)) => {
- // We only ever hand out clones of the receiver, so the originally-created
- // receiver should always be present, only being dropped when clearing the
- // service status. Consequently, `tx.send` should not fail, making use
- // of `expect` here safe.
- tx.send(status).expect("channel should not be closed");
- }
- None => {
- writer.insert(service_name.to_string(), watch::channel(status));
- }
- };
- }
-
- /// Clear the status of the given service.
- pub async fn clear_service_status(&mut self, service_name: &str) {
- let mut writer = self.statuses.write().await;
- let _ = writer.remove(service_name);
- }
-}
-
-impl Default for HealthReporter {
- fn default() -> Self {
- Self::new()
- }
-}
-
-/// A service providing implementations of gRPC health checking protocol.
-#[derive(Debug)]
-pub struct HealthService {
- statuses: Arc<RwLock<HashMap<String, StatusPair>>>,
-}
-
-impl HealthService {
- fn new(services: Arc<RwLock<HashMap<String, StatusPair>>>) -> Self {
- HealthService { statuses: services }
- }
-
- /// Create a HealthService, carrying across the statuses from an existing HealthReporter
- pub fn from_health_reporter(health_reporter: HealthReporter) -> Self {
- Self::new(health_reporter.statuses)
- }
-
- async fn service_health(&self, service_name: &str) -> Option<ServingStatus> {
- let reader = self.statuses.read().await;
- reader.get(service_name).map(|p| *p.1.borrow())
- }
-}
-
-#[tonic::async_trait]
-impl Health for HealthService {
- async fn check(
- &self,
- request: Request<HealthCheckRequest>,
- ) -> Result<Response<HealthCheckResponse>, Status> {
- let service_name = request.get_ref().service.as_str();
- let Some(status) = self.service_health(service_name).await else {
- return Err(Status::not_found("service not registered"));
- };
-
- Ok(Response::new(HealthCheckResponse::new(status)))
- }
-
- type WatchStream = WatchStream;
-
- async fn watch(
- &self,
- request: Request<HealthCheckRequest>,
- ) -> Result<Response<Self::WatchStream>, Status> {
- let service_name = request.get_ref().service.as_str();
- let status_rx = match self.statuses.read().await.get(service_name) {
- Some((_tx, rx)) => rx.clone(),
- None => return Err(Status::not_found("service not registered")),
- };
-
- Ok(Response::new(WatchStream::new(status_rx)))
- }
-}
-
-/// A watch stream for the health service.
-pub struct WatchStream {
- inner: tokio_stream::wrappers::WatchStream<ServingStatus>,
-}
-
-impl WatchStream {
- fn new(status_rx: watch::Receiver<ServingStatus>) -> Self {
- let inner = tokio_stream::wrappers::WatchStream::new(status_rx);
- Self { inner }
- }
-}
-
-impl Stream for WatchStream {
- type Item = Result<HealthCheckResponse, Status>;
-
- fn poll_next(
- mut self: std::pin::Pin<&mut Self>,
- cx: &mut std::task::Context<'_>,
- ) -> std::task::Poll<Option<Self::Item>> {
- std::pin::Pin::new(&mut self.inner)
- .poll_next(cx)
- .map(|opt| opt.map(|status| Ok(HealthCheckResponse::new(status))))
- }
-}
-
-impl fmt::Debug for WatchStream {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("WatchStream").finish()
- }
-}
-
-impl HealthCheckResponse {
- fn new(status: ServingStatus) -> Self {
- let status = crate::pb::health_check_response::ServingStatus::from(status) as i32;
- Self { status }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use crate::pb::health_server::Health;
- use crate::pb::HealthCheckRequest;
- use crate::server::{HealthReporter, HealthService};
- use crate::ServingStatus;
- use tokio::sync::watch;
- use tokio_stream::StreamExt;
- use tonic::{Code, Request, Status};
-
- fn assert_serving_status(wire: i32, expected: ServingStatus) {
- let expected = crate::pb::health_check_response::ServingStatus::from(expected) as i32;
- assert_eq!(wire, expected);
- }
-
- fn assert_grpc_status(wire: Option<Status>, expected: Code) {
- let wire = wire.expect("status is not None").code();
- assert_eq!(wire, expected);
- }
-
- async fn make_test_service() -> (HealthReporter, HealthService) {
- let health_reporter = HealthReporter::new();
-
- // insert test value
- {
- let mut statuses = health_reporter.statuses.write().await;
- statuses.insert(
- "TestService".to_string(),
- watch::channel(ServingStatus::Unknown),
- );
- }
-
- let health_service = HealthService::new(health_reporter.statuses.clone());
- (health_reporter, health_service)
- }
-
- #[tokio::test]
- async fn test_service_check() {
- let (reporter, service) = make_test_service().await;
-
- // Overall server health
- let resp = service
- .check(Request::new(HealthCheckRequest {
- service: "".to_string(),
- }))
- .await;
- assert!(resp.is_ok());
- let resp = resp.unwrap().into_inner();
- assert_serving_status(resp.status, ServingStatus::Serving);
-
- // Unregistered service
- let resp = service
- .check(Request::new(HealthCheckRequest {
- service: "Unregistered".to_string(),
- }))
- .await;
- assert!(resp.is_err());
- assert_grpc_status(resp.err(), Code::NotFound);
-
- // Registered service - initial state
- let resp = service
- .check(Request::new(HealthCheckRequest {
- service: "TestService".to_string(),
- }))
- .await;
- assert!(resp.is_ok());
- let resp = resp.unwrap().into_inner();
- assert_serving_status(resp.status, ServingStatus::Unknown);
-
- // Registered service - updated state
- reporter
- .set_service_status("TestService", ServingStatus::Serving)
- .await;
- let resp = service
- .check(Request::new(HealthCheckRequest {
- service: "TestService".to_string(),
- }))
- .await;
- assert!(resp.is_ok());
- let resp = resp.unwrap().into_inner();
- assert_serving_status(resp.status, ServingStatus::Serving);
- }
-
- #[tokio::test]
- async fn test_service_watch() {
- let (mut reporter, service) = make_test_service().await;
-
- // Overall server health
- let resp = service
- .watch(Request::new(HealthCheckRequest {
- service: "".to_string(),
- }))
- .await;
- assert!(resp.is_ok());
- let mut resp = resp.unwrap().into_inner();
- let item = resp
- .next()
- .await
- .expect("streamed response is Some")
- .expect("response is ok");
- assert_serving_status(item.status, ServingStatus::Serving);
-
- // Unregistered service
- let resp = service
- .watch(Request::new(HealthCheckRequest {
- service: "Unregistered".to_string(),
- }))
- .await;
- assert!(resp.is_err());
- assert_grpc_status(resp.err(), Code::NotFound);
-
- // Registered service
- let resp = service
- .watch(Request::new(HealthCheckRequest {
- service: "TestService".to_string(),
- }))
- .await;
- assert!(resp.is_ok());
- let mut resp = resp.unwrap().into_inner();
-
- // Registered service - initial state
- let item = resp
- .next()
- .await
- .expect("streamed response is Some")
- .expect("response is ok");
- assert_serving_status(item.status, ServingStatus::Unknown);
-
- // Registered service - updated state
- reporter
- .set_service_status("TestService", ServingStatus::NotServing)
- .await;
-
- let item = resp
- .next()
- .await
- .expect("streamed response is Some")
- .expect("response is ok");
- assert_serving_status(item.status, ServingStatus::NotServing);
-
- // Registered service - updated state
- reporter
- .set_service_status("TestService", ServingStatus::Serving)
- .await;
- let item = resp
- .next()
- .await
- .expect("streamed response is Some")
- .expect("response is ok");
- assert_serving_status(item.status, ServingStatus::Serving);
-
- // De-registered service
- reporter.clear_service_status("TestService").await;
- let item = resp.next().await;
- assert!(item.is_none());
- }
-}
diff --git a/vendor/tonic-reflection/.cargo-checksum.json b/vendor/tonic-reflection/.cargo-checksum.json
deleted file mode 100644
index 9884b454..00000000
--- a/vendor/tonic-reflection/.cargo-checksum.json
+++ /dev/null
@@ -1 +0,0 @@
-{"files":{"Cargo.lock":"207196be6696256b109f40ef4a45a13e92c8369fffb15a7316759e5b831b6b61","Cargo.toml":"4ad6d66e3d73a3dca6be6450ee077aba3e350cfb7da7e029e4c300b342f47575","LICENSE":"e24a56698aa6feaf3a02272b3624f9dc255d982970c5ed97ac4525a95056a5b3","README.md":"86e302aa22d4df05f4f4272d25679741c59da3e4399fd51578b45924643eb1d2","proto/reflection_v1.proto":"92fe8aab9378b0b9807b081de1fc743f847262c1fb6d1ca16501f9e91d450b19","proto/reflection_v1alpha.proto":"7834b214ac40e2466295b7c4ddfa738e7e57f4f4cc7a3ee50033f408ea2176fa","src/generated/grpc_reflection_v1.rs":"8a4c51b467d887a406afa7bffd188bb6f11bc1865337e4650f1b17330c3c94ae","src/generated/grpc_reflection_v1alpha.rs":"4c0002b761f7ce7fd23d70dd91238993cd82d456d519eb8b39fa1d789f131323","src/generated/reflection_v1_fds.rs":"24fddd44c7a402972a75c7ebc4699a70d11deacd8938654b343024d6f7f1beec","src/generated/reflection_v1alpha1_fds.rs":"5ecb4122fb0b95322ced972ef57772655b364fcf3ece8f8576ac24e7030daf10","src/lib.rs":"56492411bf72bfe486d6455f0501df2ff7c092f19a52209e11dc5b81140fda01","src/server/mod.rs":"f121a12ec78c78fa40bccf82c376a6fe944adb5d92c56991f2c6f1038eec1e2f","src/server/v1.rs":"7425c63a1ef4aaa52445a4c57a44b8127ae0ba8446f0f8c9c09c016a4eb2c6e9","src/server/v1alpha.rs":"80a8fd0acec7d255a76872e616fe533ff4e898ee01a717265b58244f0c0e3f41","tests/server.rs":"5b237bfe3819b874a09d6466af00ce118eb307c5e000102b66e3608661fa7e61","tests/versions.rs":"c2e031afe3379df11bfaad151f4c11daebc5493b1ffc8624c3edf9052961de11"},"package":"f9687bd5bfeafebdded2356950f278bba8226f0b32109537c4253406e09aafe1"} \ No newline at end of file
diff --git a/vendor/tonic-reflection/Cargo.lock b/vendor/tonic-reflection/Cargo.lock
deleted file mode 100644
index 49e3543a..00000000
--- a/vendor/tonic-reflection/Cargo.lock
+++ /dev/null
@@ -1,728 +0,0 @@
-# This file is automatically @generated by Cargo.
-# It is not intended for manual editing.
-version = 3
-
-[[package]]
-name = "addr2line"
-version = "0.24.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
-dependencies = [
- "gimli",
-]
-
-[[package]]
-name = "adler2"
-version = "2.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
-
-[[package]]
-name = "anyhow"
-version = "1.0.98"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
-
-[[package]]
-name = "async-trait"
-version = "0.1.88"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "atomic-waker"
-version = "1.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
-
-[[package]]
-name = "autocfg"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
-
-[[package]]
-name = "backtrace"
-version = "0.3.74"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
-dependencies = [
- "addr2line",
- "cfg-if",
- "libc",
- "miniz_oxide",
- "object",
- "rustc-demangle",
- "windows-targets",
-]
-
-[[package]]
-name = "base64"
-version = "0.22.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
-
-[[package]]
-name = "bytes"
-version = "1.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
-
-[[package]]
-name = "cfg-if"
-version = "1.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
-
-[[package]]
-name = "either"
-version = "1.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
-
-[[package]]
-name = "equivalent"
-version = "1.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
-
-[[package]]
-name = "fnv"
-version = "1.0.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
-
-[[package]]
-name = "futures-channel"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
-dependencies = [
- "futures-core",
-]
-
-[[package]]
-name = "futures-core"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
-
-[[package]]
-name = "futures-sink"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
-
-[[package]]
-name = "futures-task"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
-
-[[package]]
-name = "futures-util"
-version = "0.3.31"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
-dependencies = [
- "futures-core",
- "futures-task",
- "pin-project-lite",
- "pin-utils",
-]
-
-[[package]]
-name = "gimli"
-version = "0.31.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
-
-[[package]]
-name = "h2"
-version = "0.4.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a9421a676d1b147b16b82c9225157dc629087ef8ec4d5e2960f9437a90dac0a5"
-dependencies = [
- "atomic-waker",
- "bytes",
- "fnv",
- "futures-core",
- "futures-sink",
- "http",
- "indexmap",
- "slab",
- "tokio",
- "tokio-util",
- "tracing",
-]
-
-[[package]]
-name = "hashbrown"
-version = "0.15.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3"
-
-[[package]]
-name = "http"
-version = "1.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
-dependencies = [
- "bytes",
- "fnv",
- "itoa",
-]
-
-[[package]]
-name = "http-body"
-version = "1.0.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
-dependencies = [
- "bytes",
- "http",
-]
-
-[[package]]
-name = "http-body-util"
-version = "0.1.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
-dependencies = [
- "bytes",
- "futures-core",
- "http",
- "http-body",
- "pin-project-lite",
-]
-
-[[package]]
-name = "httparse"
-version = "1.10.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
-
-[[package]]
-name = "httpdate"
-version = "1.0.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
-
-[[package]]
-name = "hyper"
-version = "1.6.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80"
-dependencies = [
- "bytes",
- "futures-channel",
- "futures-util",
- "h2",
- "http",
- "http-body",
- "httparse",
- "httpdate",
- "itoa",
- "pin-project-lite",
- "smallvec",
- "tokio",
- "want",
-]
-
-[[package]]
-name = "hyper-timeout"
-version = "0.5.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
-dependencies = [
- "hyper",
- "hyper-util",
- "pin-project-lite",
- "tokio",
- "tower-service",
-]
-
-[[package]]
-name = "hyper-util"
-version = "0.1.11"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "497bbc33a26fdd4af9ed9c70d63f61cf56a938375fbb32df34db9b1cd6d643f2"
-dependencies = [
- "bytes",
- "futures-channel",
- "futures-util",
- "http",
- "http-body",
- "hyper",
- "libc",
- "pin-project-lite",
- "socket2",
- "tokio",
- "tower-service",
- "tracing",
-]
-
-[[package]]
-name = "indexmap"
-version = "2.9.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
-dependencies = [
- "equivalent",
- "hashbrown",
-]
-
-[[package]]
-name = "itertools"
-version = "0.14.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
-dependencies = [
- "either",
-]
-
-[[package]]
-name = "itoa"
-version = "1.0.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
-
-[[package]]
-name = "libc"
-version = "0.2.172"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa"
-
-[[package]]
-name = "memchr"
-version = "2.7.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
-
-[[package]]
-name = "miniz_oxide"
-version = "0.8.8"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a"
-dependencies = [
- "adler2",
-]
-
-[[package]]
-name = "mio"
-version = "1.0.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd"
-dependencies = [
- "libc",
- "wasi",
- "windows-sys",
-]
-
-[[package]]
-name = "object"
-version = "0.36.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
-dependencies = [
- "memchr",
-]
-
-[[package]]
-name = "once_cell"
-version = "1.21.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
-
-[[package]]
-name = "percent-encoding"
-version = "2.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
-
-[[package]]
-name = "pin-project"
-version = "1.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a"
-dependencies = [
- "pin-project-internal",
-]
-
-[[package]]
-name = "pin-project-internal"
-version = "1.1.10"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "pin-project-lite"
-version = "0.2.16"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
-
-[[package]]
-name = "pin-utils"
-version = "0.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
-
-[[package]]
-name = "proc-macro2"
-version = "1.0.95"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
-dependencies = [
- "unicode-ident",
-]
-
-[[package]]
-name = "prost"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
-dependencies = [
- "bytes",
- "prost-derive",
-]
-
-[[package]]
-name = "prost-derive"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
-dependencies = [
- "anyhow",
- "itertools",
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "prost-types"
-version = "0.13.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16"
-dependencies = [
- "prost",
-]
-
-[[package]]
-name = "quote"
-version = "1.0.40"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
-dependencies = [
- "proc-macro2",
-]
-
-[[package]]
-name = "rustc-demangle"
-version = "0.1.24"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
-
-[[package]]
-name = "slab"
-version = "0.4.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
-dependencies = [
- "autocfg",
-]
-
-[[package]]
-name = "smallvec"
-version = "1.15.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9"
-
-[[package]]
-name = "socket2"
-version = "0.5.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef"
-dependencies = [
- "libc",
- "windows-sys",
-]
-
-[[package]]
-name = "syn"
-version = "2.0.101"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf"
-dependencies = [
- "proc-macro2",
- "quote",
- "unicode-ident",
-]
-
-[[package]]
-name = "sync_wrapper"
-version = "1.0.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
-
-[[package]]
-name = "tokio"
-version = "1.44.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48"
-dependencies = [
- "backtrace",
- "bytes",
- "libc",
- "mio",
- "pin-project-lite",
- "socket2",
- "tokio-macros",
- "windows-sys",
-]
-
-[[package]]
-name = "tokio-macros"
-version = "2.5.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "tokio-stream"
-version = "0.1.17"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047"
-dependencies = [
- "futures-core",
- "pin-project-lite",
- "tokio",
-]
-
-[[package]]
-name = "tokio-util"
-version = "0.7.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df"
-dependencies = [
- "bytes",
- "futures-core",
- "futures-sink",
- "pin-project-lite",
- "tokio",
-]
-
-[[package]]
-name = "tonic"
-version = "0.13.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e581ba15a835f4d9ea06c55ab1bd4dce26fc53752c69a04aac00703bfb49ba9"
-dependencies = [
- "async-trait",
- "base64",
- "bytes",
- "h2",
- "http",
- "http-body",
- "http-body-util",
- "hyper",
- "hyper-timeout",
- "hyper-util",
- "percent-encoding",
- "pin-project",
- "prost",
- "socket2",
- "tokio",
- "tokio-stream",
- "tower",
- "tower-layer",
- "tower-service",
- "tracing",
-]
-
-[[package]]
-name = "tonic-reflection"
-version = "0.13.1"
-dependencies = [
- "prost",
- "prost-types",
- "tokio",
- "tokio-stream",
- "tonic",
-]
-
-[[package]]
-name = "tower"
-version = "0.5.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
-dependencies = [
- "futures-core",
- "futures-util",
- "indexmap",
- "pin-project-lite",
- "slab",
- "sync_wrapper",
- "tokio",
- "tokio-util",
- "tower-layer",
- "tower-service",
- "tracing",
-]
-
-[[package]]
-name = "tower-layer"
-version = "0.3.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
-
-[[package]]
-name = "tower-service"
-version = "0.3.3"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
-
-[[package]]
-name = "tracing"
-version = "0.1.41"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
-dependencies = [
- "pin-project-lite",
- "tracing-attributes",
- "tracing-core",
-]
-
-[[package]]
-name = "tracing-attributes"
-version = "0.1.28"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
-]
-
-[[package]]
-name = "tracing-core"
-version = "0.1.33"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
-dependencies = [
- "once_cell",
-]
-
-[[package]]
-name = "try-lock"
-version = "0.2.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
-
-[[package]]
-name = "unicode-ident"
-version = "1.0.18"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
-
-[[package]]
-name = "want"
-version = "0.3.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
-dependencies = [
- "try-lock",
-]
-
-[[package]]
-name = "wasi"
-version = "0.11.0+wasi-snapshot-preview1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
-
-[[package]]
-name = "windows-sys"
-version = "0.52.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
-dependencies = [
- "windows-targets",
-]
-
-[[package]]
-name = "windows-targets"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
-dependencies = [
- "windows_aarch64_gnullvm",
- "windows_aarch64_msvc",
- "windows_i686_gnu",
- "windows_i686_gnullvm",
- "windows_i686_msvc",
- "windows_x86_64_gnu",
- "windows_x86_64_gnullvm",
- "windows_x86_64_msvc",
-]
-
-[[package]]
-name = "windows_aarch64_gnullvm"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
-
-[[package]]
-name = "windows_aarch64_msvc"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
-
-[[package]]
-name = "windows_i686_gnu"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
-
-[[package]]
-name = "windows_i686_gnullvm"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
-
-[[package]]
-name = "windows_i686_msvc"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
-
-[[package]]
-name = "windows_x86_64_gnu"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
-
-[[package]]
-name = "windows_x86_64_gnullvm"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
-
-[[package]]
-name = "windows_x86_64_msvc"
-version = "0.52.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
diff --git a/vendor/tonic-reflection/Cargo.toml b/vendor/tonic-reflection/Cargo.toml
deleted file mode 100644
index 3d0ab5c2..00000000
--- a/vendor/tonic-reflection/Cargo.toml
+++ /dev/null
@@ -1,128 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g., crates.io) dependencies.
-#
-# If you are reading this file be aware that the original Cargo.toml
-# will likely look very different (and much more reasonable).
-# See Cargo.toml.orig for the original contents.
-
-[package]
-edition = "2021"
-rust-version = "1.75"
-name = "tonic-reflection"
-version = "0.13.1"
-authors = [
- "James Nugent <james@jen20.com>",
- "Samani G. Gikandi <samani@gojulas.com>",
-]
-build = false
-autolib = false
-autobins = false
-autoexamples = false
-autotests = false
-autobenches = false
-description = """
-Server Reflection module of `tonic` gRPC implementation.
-"""
-homepage = "https://github.com/hyperium/tonic"
-readme = "README.md"
-keywords = [
- "rpc",
- "grpc",
- "async",
- "reflection",
-]
-categories = [
- "network-programming",
- "asynchronous",
-]
-license = "MIT"
-repository = "https://github.com/hyperium/tonic"
-
-[package.metadata.cargo_check_external_types]
-allowed_external_types = [
- "tonic::*",
- "bytes::*",
- "http::*",
- "http_body::*",
- "prost::*",
- "prost_types::*",
- "futures_core::stream::Stream",
- "tower_service::Service",
-]
-
-[package.metadata.docs.rs]
-all-features = true
-
-[features]
-default = ["server"]
-server = [
- "dep:prost-types",
- "dep:tokio",
- "dep:tokio-stream",
-]
-
-[lib]
-name = "tonic_reflection"
-path = "src/lib.rs"
-
-[[test]]
-name = "server"
-path = "tests/server.rs"
-
-[[test]]
-name = "versions"
-path = "tests/versions.rs"
-
-[dependencies.prost]
-version = "0.13"
-
-[dependencies.prost-types]
-version = "0.13"
-optional = true
-
-[dependencies.tokio]
-version = "1.0"
-features = [
- "sync",
- "rt",
-]
-optional = true
-
-[dependencies.tokio-stream]
-version = "0.1"
-optional = true
-default-features = false
-
-[dependencies.tonic]
-version = "0.13.0"
-features = [
- "codegen",
- "prost",
-]
-default-features = false
-
-[dev-dependencies.tokio-stream]
-version = "0.1"
-features = ["net"]
-default-features = false
-
-[dev-dependencies.tonic]
-version = "0.13.0"
-features = ["transport"]
-default-features = false
-
-[lints.clippy]
-uninlined_format_args = "deny"
-
-[lints.rust]
-missing_debug_implementations = "warn"
-missing_docs = "warn"
-rust_2018_idioms = "warn"
-unreachable_pub = "warn"
-
-[lints.rustdoc]
-broken_intra_doc_links = "deny"
diff --git a/vendor/tonic-reflection/LICENSE b/vendor/tonic-reflection/LICENSE
deleted file mode 100644
index c7f571db..00000000
--- a/vendor/tonic-reflection/LICENSE
+++ /dev/null
@@ -1,19 +0,0 @@
-Copyright (c) 2025 Lucio Franco
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/vendor/tonic-reflection/README.md b/vendor/tonic-reflection/README.md
deleted file mode 100644
index b4f29f59..00000000
--- a/vendor/tonic-reflection/README.md
+++ /dev/null
@@ -1,3 +0,0 @@
-# tonic-reflection
-
-A `tonic` based gRPC reflection implementation.
diff --git a/vendor/tonic-reflection/proto/reflection_v1.proto b/vendor/tonic-reflection/proto/reflection_v1.proto
deleted file mode 100644
index 1a2ceedc..00000000
--- a/vendor/tonic-reflection/proto/reflection_v1.proto
+++ /dev/null
@@ -1,147 +0,0 @@
-// Copyright 2016 The gRPC Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Service exported by server reflection. A more complete description of how
-// server reflection works can be found at
-// https://github.com/grpc/grpc/blob/master/doc/server-reflection.md
-//
-// The canonical version of this proto can be found at
-// https://github.com/grpc/grpc-proto/blob/master/grpc/reflection/v1/reflection.proto
-
-syntax = "proto3";
-
-package grpc.reflection.v1;
-
-option go_package = "google.golang.org/grpc/reflection/grpc_reflection_v1";
-option java_multiple_files = true;
-option java_package = "io.grpc.reflection.v1";
-option java_outer_classname = "ServerReflectionProto";
-
-service ServerReflection {
- // The reflection service is structured as a bidirectional stream, ensuring
- // all related requests go to a single server.
- rpc ServerReflectionInfo(stream ServerReflectionRequest)
- returns (stream ServerReflectionResponse);
-}
-
-// The message sent by the client when calling ServerReflectionInfo method.
-message ServerReflectionRequest {
- string host = 1;
- // To use reflection service, the client should set one of the following
- // fields in message_request. The server distinguishes requests by their
- // defined field and then handles them using corresponding methods.
- oneof message_request {
- // Find a proto file by the file name.
- string file_by_filename = 3;
-
- // Find the proto file that declares the given fully-qualified symbol name.
- // This field should be a fully-qualified symbol name
- // (e.g. <package>.<service>[.<method>] or <package>.<type>).
- string file_containing_symbol = 4;
-
- // Find the proto file which defines an extension extending the given
- // message type with the given field number.
- ExtensionRequest file_containing_extension = 5;
-
- // Finds the tag numbers used by all known extensions of the given message
- // type, and appends them to ExtensionNumberResponse in an undefined order.
- // Its corresponding method is best-effort: it's not guaranteed that the
- // reflection service will implement this method, and it's not guaranteed
- // that this method will provide all extensions. Returns
- // StatusCode::UNIMPLEMENTED if it's not implemented.
- // This field should be a fully-qualified type name. The format is
- // <package>.<type>
- string all_extension_numbers_of_type = 6;
-
- // List the full names of registered services. The content will not be
- // checked.
- string list_services = 7;
- }
-}
-
-// The type name and extension number sent by the client when requesting
-// file_containing_extension.
-message ExtensionRequest {
- // Fully-qualified type name. The format should be <package>.<type>
- string containing_type = 1;
- int32 extension_number = 2;
-}
-
-// The message sent by the server to answer ServerReflectionInfo method.
-message ServerReflectionResponse {
- string valid_host = 1;
- ServerReflectionRequest original_request = 2;
- // The server sets one of the following fields according to the message_request
- // in the request.
- oneof message_response {
- // This message is used to answer file_by_filename, file_containing_symbol,
- // file_containing_extension requests with transitive dependencies.
- // As the repeated label is not allowed in oneof fields, we use a
- // FileDescriptorResponse message to encapsulate the repeated fields.
- // The reflection service is allowed to avoid sending FileDescriptorProtos
- // that were previously sent in response to earlier requests in the stream.
- FileDescriptorResponse file_descriptor_response = 4;
-
- // This message is used to answer all_extension_numbers_of_type requests.
- ExtensionNumberResponse all_extension_numbers_response = 5;
-
- // This message is used to answer list_services requests.
- ListServiceResponse list_services_response = 6;
-
- // This message is used when an error occurs.
- ErrorResponse error_response = 7;
- }
-}
-
-// Serialized FileDescriptorProto messages sent by the server answering
-// a file_by_filename, file_containing_symbol, or file_containing_extension
-// request.
-message FileDescriptorResponse {
- // Serialized FileDescriptorProto messages. We avoid taking a dependency on
- // descriptor.proto, which uses proto2 only features, by making them opaque
- // bytes instead.
- repeated bytes file_descriptor_proto = 1;
-}
-
-// A list of extension numbers sent by the server answering
-// all_extension_numbers_of_type request.
-message ExtensionNumberResponse {
- // Full name of the base type, including the package name. The format
- // is <package>.<type>
- string base_type_name = 1;
- repeated int32 extension_number = 2;
-}
-
-// A list of ServiceResponse sent by the server answering list_services request.
-message ListServiceResponse {
- // The information of each service may be expanded in the future, so we use
- // ServiceResponse message to encapsulate it.
- repeated ServiceResponse service = 1;
-}
-
-// The information of a single service used by ListServiceResponse to answer
-// list_services request.
-message ServiceResponse {
- // Full name of a registered service, including its package name. The format
- // is <package>.<service>
- string name = 1;
-}
-
-// The error code and error message sent by the server when an error occurs.
-message ErrorResponse {
- // This field uses the error codes defined in grpc::StatusCode.
- int32 error_code = 1;
- string error_message = 2;
-}
-
diff --git a/vendor/tonic-reflection/proto/reflection_v1alpha.proto b/vendor/tonic-reflection/proto/reflection_v1alpha.proto
deleted file mode 100644
index c2da3146..00000000
--- a/vendor/tonic-reflection/proto/reflection_v1alpha.proto
+++ /dev/null
@@ -1,136 +0,0 @@
-// Copyright 2016 gRPC authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Service exported by server reflection
-
-syntax = "proto3";
-
-package grpc.reflection.v1alpha;
-
-service ServerReflection {
- // The reflection service is structured as a bidirectional stream, ensuring
- // all related requests go to a single server.
- rpc ServerReflectionInfo(stream ServerReflectionRequest)
- returns (stream ServerReflectionResponse);
-}
-
-// The message sent by the client when calling ServerReflectionInfo method.
-message ServerReflectionRequest {
- string host = 1;
- // To use reflection service, the client should set one of the following
- // fields in message_request. The server distinguishes requests by their
- // defined field and then handles them using corresponding methods.
- oneof message_request {
- // Find a proto file by the file name.
- string file_by_filename = 3;
-
- // Find the proto file that declares the given fully-qualified symbol name.
- // This field should be a fully-qualified symbol name
- // (e.g. <package>.<service>[.<method>] or <package>.<type>).
- string file_containing_symbol = 4;
-
- // Find the proto file which defines an extension extending the given
- // message type with the given field number.
- ExtensionRequest file_containing_extension = 5;
-
- // Finds the tag numbers used by all known extensions of extendee_type, and
- // appends them to ExtensionNumberResponse in an undefined order.
- // Its corresponding method is best-effort: it's not guaranteed that the
- // reflection service will implement this method, and it's not guaranteed
- // that this method will provide all extensions. Returns
- // StatusCode::UNIMPLEMENTED if it's not implemented.
- // This field should be a fully-qualified type name. The format is
- // <package>.<type>
- string all_extension_numbers_of_type = 6;
-
- // List the full names of registered services. The content will not be
- // checked.
- string list_services = 7;
- }
-}
-
-// The type name and extension number sent by the client when requesting
-// file_containing_extension.
-message ExtensionRequest {
- // Fully-qualified type name. The format should be <package>.<type>
- string containing_type = 1;
- int32 extension_number = 2;
-}
-
-// The message sent by the server to answer ServerReflectionInfo method.
-message ServerReflectionResponse {
- string valid_host = 1;
- ServerReflectionRequest original_request = 2;
- // The server sets one of the following fields according to the
- // message_request in the request.
- oneof message_response {
- // This message is used to answer file_by_filename, file_containing_symbol,
- // file_containing_extension requests with transitive dependencies.
- // As the repeated label is not allowed in oneof fields, we use a
- // FileDescriptorResponse message to encapsulate the repeated fields.
- // The reflection service is allowed to avoid sending FileDescriptorProtos
- // that were previously sent in response to earlier requests in the stream.
- FileDescriptorResponse file_descriptor_response = 4;
-
- // This message is used to answer all_extension_numbers_of_type requests.
- ExtensionNumberResponse all_extension_numbers_response = 5;
-
- // This message is used to answer list_services requests.
- ListServiceResponse list_services_response = 6;
-
- // This message is used when an error occurs.
- ErrorResponse error_response = 7;
- }
-}
-
-// Serialized FileDescriptorProto messages sent by the server answering
-// a file_by_filename, file_containing_symbol, or file_containing_extension
-// request.
-message FileDescriptorResponse {
- // Serialized FileDescriptorProto messages. We avoid taking a dependency on
- // descriptor.proto, which uses proto2 only features, by making them opaque
- // bytes instead.
- repeated bytes file_descriptor_proto = 1;
-}
-
-// A list of extension numbers sent by the server answering
-// all_extension_numbers_of_type request.
-message ExtensionNumberResponse {
- // Full name of the base type, including the package name. The format
- // is <package>.<type>
- string base_type_name = 1;
- repeated int32 extension_number = 2;
-}
-
-// A list of ServiceResponse sent by the server answering list_services request.
-message ListServiceResponse {
- // The information of each service may be expanded in the future, so we use
- // ServiceResponse message to encapsulate it.
- repeated ServiceResponse service = 1;
-}
-
-// The information of a single service used by ListServiceResponse to answer
-// list_services request.
-message ServiceResponse {
- // Full name of a registered service, including its package name. The format
- // is <package>.<service>
- string name = 1;
-}
-
-// The error code and error message sent by the server when an error occurs.
-message ErrorResponse {
- // This field uses the error codes defined in grpc::StatusCode.
- int32 error_code = 1;
- string error_message = 2;
-} \ No newline at end of file
diff --git a/vendor/tonic-reflection/src/generated/grpc_reflection_v1.rs b/vendor/tonic-reflection/src/generated/grpc_reflection_v1.rs
deleted file mode 100644
index 569ee671..00000000
--- a/vendor/tonic-reflection/src/generated/grpc_reflection_v1.rs
+++ /dev/null
@@ -1,461 +0,0 @@
-// This file is @generated by prost-build.
-/// The message sent by the client when calling ServerReflectionInfo method.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ServerReflectionRequest {
- #[prost(string, tag = "1")]
- pub host: ::prost::alloc::string::String,
- /// To use reflection service, the client should set one of the following
- /// fields in message_request. The server distinguishes requests by their
- /// defined field and then handles them using corresponding methods.
- #[prost(oneof = "server_reflection_request::MessageRequest", tags = "3, 4, 5, 6, 7")]
- pub message_request: ::core::option::Option<
- server_reflection_request::MessageRequest,
- >,
-}
-/// Nested message and enum types in `ServerReflectionRequest`.
-pub mod server_reflection_request {
- /// To use reflection service, the client should set one of the following
- /// fields in message_request. The server distinguishes requests by their
- /// defined field and then handles them using corresponding methods.
- #[derive(Clone, PartialEq, ::prost::Oneof)]
- pub enum MessageRequest {
- /// Find a proto file by the file name.
- #[prost(string, tag = "3")]
- FileByFilename(::prost::alloc::string::String),
- /// Find the proto file that declares the given fully-qualified symbol name.
- /// This field should be a fully-qualified symbol name
- /// (e.g. <package>.<service>\[.<method>\] or <package>.<type>).
- #[prost(string, tag = "4")]
- FileContainingSymbol(::prost::alloc::string::String),
- /// Find the proto file which defines an extension extending the given
- /// message type with the given field number.
- #[prost(message, tag = "5")]
- FileContainingExtension(super::ExtensionRequest),
- /// Finds the tag numbers used by all known extensions of the given message
- /// type, and appends them to ExtensionNumberResponse in an undefined order.
- /// Its corresponding method is best-effort: it's not guaranteed that the
- /// reflection service will implement this method, and it's not guaranteed
- /// that this method will provide all extensions. Returns
- /// StatusCode::UNIMPLEMENTED if it's not implemented.
- /// This field should be a fully-qualified type name. The format is
- /// <package>.<type>
- #[prost(string, tag = "6")]
- AllExtensionNumbersOfType(::prost::alloc::string::String),
- /// List the full names of registered services. The content will not be
- /// checked.
- #[prost(string, tag = "7")]
- ListServices(::prost::alloc::string::String),
- }
-}
-/// The type name and extension number sent by the client when requesting
-/// file_containing_extension.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ExtensionRequest {
- /// Fully-qualified type name. The format should be <package>.<type>
- #[prost(string, tag = "1")]
- pub containing_type: ::prost::alloc::string::String,
- #[prost(int32, tag = "2")]
- pub extension_number: i32,
-}
-/// The message sent by the server to answer ServerReflectionInfo method.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ServerReflectionResponse {
- #[prost(string, tag = "1")]
- pub valid_host: ::prost::alloc::string::String,
- #[prost(message, optional, tag = "2")]
- pub original_request: ::core::option::Option<ServerReflectionRequest>,
- /// The server sets one of the following fields according to the message_request
- /// in the request.
- #[prost(oneof = "server_reflection_response::MessageResponse", tags = "4, 5, 6, 7")]
- pub message_response: ::core::option::Option<
- server_reflection_response::MessageResponse,
- >,
-}
-/// Nested message and enum types in `ServerReflectionResponse`.
-pub mod server_reflection_response {
- /// The server sets one of the following fields according to the message_request
- /// in the request.
- #[derive(Clone, PartialEq, ::prost::Oneof)]
- pub enum MessageResponse {
- /// This message is used to answer file_by_filename, file_containing_symbol,
- /// file_containing_extension requests with transitive dependencies.
- /// As the repeated label is not allowed in oneof fields, we use a
- /// FileDescriptorResponse message to encapsulate the repeated fields.
- /// The reflection service is allowed to avoid sending FileDescriptorProtos
- /// that were previously sent in response to earlier requests in the stream.
- #[prost(message, tag = "4")]
- FileDescriptorResponse(super::FileDescriptorResponse),
- /// This message is used to answer all_extension_numbers_of_type requests.
- #[prost(message, tag = "5")]
- AllExtensionNumbersResponse(super::ExtensionNumberResponse),
- /// This message is used to answer list_services requests.
- #[prost(message, tag = "6")]
- ListServicesResponse(super::ListServiceResponse),
- /// This message is used when an error occurs.
- #[prost(message, tag = "7")]
- ErrorResponse(super::ErrorResponse),
- }
-}
-/// Serialized FileDescriptorProto messages sent by the server answering
-/// a file_by_filename, file_containing_symbol, or file_containing_extension
-/// request.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct FileDescriptorResponse {
- /// Serialized FileDescriptorProto messages. We avoid taking a dependency on
- /// descriptor.proto, which uses proto2 only features, by making them opaque
- /// bytes instead.
- #[prost(bytes = "vec", repeated, tag = "1")]
- pub file_descriptor_proto: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec<u8>>,
-}
-/// A list of extension numbers sent by the server answering
-/// all_extension_numbers_of_type request.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ExtensionNumberResponse {
- /// Full name of the base type, including the package name. The format
- /// is <package>.<type>
- #[prost(string, tag = "1")]
- pub base_type_name: ::prost::alloc::string::String,
- #[prost(int32, repeated, tag = "2")]
- pub extension_number: ::prost::alloc::vec::Vec<i32>,
-}
-/// A list of ServiceResponse sent by the server answering list_services request.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ListServiceResponse {
- /// The information of each service may be expanded in the future, so we use
- /// ServiceResponse message to encapsulate it.
- #[prost(message, repeated, tag = "1")]
- pub service: ::prost::alloc::vec::Vec<ServiceResponse>,
-}
-/// The information of a single service used by ListServiceResponse to answer
-/// list_services request.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ServiceResponse {
- /// Full name of a registered service, including its package name. The format
- /// is <package>.<service>
- #[prost(string, tag = "1")]
- pub name: ::prost::alloc::string::String,
-}
-/// The error code and error message sent by the server when an error occurs.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ErrorResponse {
- /// This field uses the error codes defined in grpc::StatusCode.
- #[prost(int32, tag = "1")]
- pub error_code: i32,
- #[prost(string, tag = "2")]
- pub error_message: ::prost::alloc::string::String,
-}
-/// Generated client implementations.
-pub mod server_reflection_client {
- #![allow(
- unused_variables,
- dead_code,
- missing_docs,
- clippy::wildcard_imports,
- clippy::let_unit_value,
- )]
- use tonic::codegen::*;
- use tonic::codegen::http::Uri;
- #[derive(Debug, Clone)]
- pub struct ServerReflectionClient<T> {
- inner: tonic::client::Grpc<T>,
- }
- impl<T> ServerReflectionClient<T>
- where
- T: tonic::client::GrpcService<tonic::body::Body>,
- T::Error: Into<StdError>,
- T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
- <T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
- {
- pub fn new(inner: T) -> Self {
- let inner = tonic::client::Grpc::new(inner);
- Self { inner }
- }
- pub fn with_origin(inner: T, origin: Uri) -> Self {
- let inner = tonic::client::Grpc::with_origin(inner, origin);
- Self { inner }
- }
- pub fn with_interceptor<F>(
- inner: T,
- interceptor: F,
- ) -> ServerReflectionClient<InterceptedService<T, F>>
- where
- F: tonic::service::Interceptor,
- T::ResponseBody: Default,
- T: tonic::codegen::Service<
- http::Request<tonic::body::Body>,
- Response = http::Response<
- <T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
- >,
- >,
- <T as tonic::codegen::Service<
- http::Request<tonic::body::Body>,
- >>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
- {
- ServerReflectionClient::new(InterceptedService::new(inner, interceptor))
- }
- /// Compress requests with the given encoding.
- ///
- /// This requires the server to support it otherwise it might respond with an
- /// error.
- #[must_use]
- pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.inner = self.inner.send_compressed(encoding);
- self
- }
- /// Enable decompressing responses.
- #[must_use]
- pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.inner = self.inner.accept_compressed(encoding);
- self
- }
- /// Limits the maximum size of a decoded message.
- ///
- /// Default: `4MB`
- #[must_use]
- pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
- self.inner = self.inner.max_decoding_message_size(limit);
- self
- }
- /// Limits the maximum size of an encoded message.
- ///
- /// Default: `usize::MAX`
- #[must_use]
- pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
- self.inner = self.inner.max_encoding_message_size(limit);
- self
- }
- /// The reflection service is structured as a bidirectional stream, ensuring
- /// all related requests go to a single server.
- pub async fn server_reflection_info(
- &mut self,
- request: impl tonic::IntoStreamingRequest<
- Message = super::ServerReflectionRequest,
- >,
- ) -> std::result::Result<
- tonic::Response<tonic::codec::Streaming<super::ServerReflectionResponse>>,
- tonic::Status,
- > {
- self.inner
- .ready()
- .await
- .map_err(|e| {
- tonic::Status::unknown(
- format!("Service was not ready: {}", e.into()),
- )
- })?;
- let codec = tonic::codec::ProstCodec::default();
- let path = http::uri::PathAndQuery::from_static(
- "/grpc.reflection.v1.ServerReflection/ServerReflectionInfo",
- );
- let mut req = request.into_streaming_request();
- req.extensions_mut()
- .insert(
- GrpcMethod::new(
- "grpc.reflection.v1.ServerReflection",
- "ServerReflectionInfo",
- ),
- );
- self.inner.streaming(req, path, codec).await
- }
- }
-}
-/// Generated server implementations.
-pub mod server_reflection_server {
- #![allow(
- unused_variables,
- dead_code,
- missing_docs,
- clippy::wildcard_imports,
- clippy::let_unit_value,
- )]
- use tonic::codegen::*;
- /// Generated trait containing gRPC methods that should be implemented for use with ServerReflectionServer.
- #[async_trait]
- pub trait ServerReflection: std::marker::Send + std::marker::Sync + 'static {
- /// Server streaming response type for the ServerReflectionInfo method.
- type ServerReflectionInfoStream: tonic::codegen::tokio_stream::Stream<
- Item = std::result::Result<
- super::ServerReflectionResponse,
- tonic::Status,
- >,
- >
- + std::marker::Send
- + 'static;
- /// The reflection service is structured as a bidirectional stream, ensuring
- /// all related requests go to a single server.
- async fn server_reflection_info(
- &self,
- request: tonic::Request<tonic::Streaming<super::ServerReflectionRequest>>,
- ) -> std::result::Result<
- tonic::Response<Self::ServerReflectionInfoStream>,
- tonic::Status,
- >;
- }
- #[derive(Debug)]
- pub struct ServerReflectionServer<T> {
- inner: Arc<T>,
- accept_compression_encodings: EnabledCompressionEncodings,
- send_compression_encodings: EnabledCompressionEncodings,
- max_decoding_message_size: Option<usize>,
- max_encoding_message_size: Option<usize>,
- }
- impl<T> ServerReflectionServer<T> {
- pub fn new(inner: T) -> Self {
- Self::from_arc(Arc::new(inner))
- }
- pub fn from_arc(inner: Arc<T>) -> Self {
- Self {
- inner,
- accept_compression_encodings: Default::default(),
- send_compression_encodings: Default::default(),
- max_decoding_message_size: None,
- max_encoding_message_size: None,
- }
- }
- pub fn with_interceptor<F>(
- inner: T,
- interceptor: F,
- ) -> InterceptedService<Self, F>
- where
- F: tonic::service::Interceptor,
- {
- InterceptedService::new(Self::new(inner), interceptor)
- }
- /// Enable decompressing requests with the given encoding.
- #[must_use]
- pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.accept_compression_encodings.enable(encoding);
- self
- }
- /// Compress responses with the given encoding, if the client supports it.
- #[must_use]
- pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.send_compression_encodings.enable(encoding);
- self
- }
- /// Limits the maximum size of a decoded message.
- ///
- /// Default: `4MB`
- #[must_use]
- pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
- self.max_decoding_message_size = Some(limit);
- self
- }
- /// Limits the maximum size of an encoded message.
- ///
- /// Default: `usize::MAX`
- #[must_use]
- pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
- self.max_encoding_message_size = Some(limit);
- self
- }
- }
- impl<T, B> tonic::codegen::Service<http::Request<B>> for ServerReflectionServer<T>
- where
- T: ServerReflection,
- B: Body + std::marker::Send + 'static,
- B::Error: Into<StdError> + std::marker::Send + 'static,
- {
- type Response = http::Response<tonic::body::Body>;
- type Error = std::convert::Infallible;
- type Future = BoxFuture<Self::Response, Self::Error>;
- fn poll_ready(
- &mut self,
- _cx: &mut Context<'_>,
- ) -> Poll<std::result::Result<(), Self::Error>> {
- Poll::Ready(Ok(()))
- }
- fn call(&mut self, req: http::Request<B>) -> Self::Future {
- match req.uri().path() {
- "/grpc.reflection.v1.ServerReflection/ServerReflectionInfo" => {
- #[allow(non_camel_case_types)]
- struct ServerReflectionInfoSvc<T: ServerReflection>(pub Arc<T>);
- impl<
- T: ServerReflection,
- > tonic::server::StreamingService<super::ServerReflectionRequest>
- for ServerReflectionInfoSvc<T> {
- type Response = super::ServerReflectionResponse;
- type ResponseStream = T::ServerReflectionInfoStream;
- type Future = BoxFuture<
- tonic::Response<Self::ResponseStream>,
- tonic::Status,
- >;
- fn call(
- &mut self,
- request: tonic::Request<
- tonic::Streaming<super::ServerReflectionRequest>,
- >,
- ) -> Self::Future {
- let inner = Arc::clone(&self.0);
- let fut = async move {
- <T as ServerReflection>::server_reflection_info(
- &inner,
- request,
- )
- .await
- };
- Box::pin(fut)
- }
- }
- let accept_compression_encodings = self.accept_compression_encodings;
- let send_compression_encodings = self.send_compression_encodings;
- let max_decoding_message_size = self.max_decoding_message_size;
- let max_encoding_message_size = self.max_encoding_message_size;
- let inner = self.inner.clone();
- let fut = async move {
- let method = ServerReflectionInfoSvc(inner);
- let codec = tonic::codec::ProstCodec::default();
- let mut grpc = tonic::server::Grpc::new(codec)
- .apply_compression_config(
- accept_compression_encodings,
- send_compression_encodings,
- )
- .apply_max_message_size_config(
- max_decoding_message_size,
- max_encoding_message_size,
- );
- let res = grpc.streaming(method, req).await;
- Ok(res)
- };
- Box::pin(fut)
- }
- _ => {
- Box::pin(async move {
- let mut response = http::Response::new(
- tonic::body::Body::default(),
- );
- let headers = response.headers_mut();
- headers
- .insert(
- tonic::Status::GRPC_STATUS,
- (tonic::Code::Unimplemented as i32).into(),
- );
- headers
- .insert(
- http::header::CONTENT_TYPE,
- tonic::metadata::GRPC_CONTENT_TYPE,
- );
- Ok(response)
- })
- }
- }
- }
- }
- impl<T> Clone for ServerReflectionServer<T> {
- fn clone(&self) -> Self {
- let inner = self.inner.clone();
- Self {
- inner,
- accept_compression_encodings: self.accept_compression_encodings,
- send_compression_encodings: self.send_compression_encodings,
- max_decoding_message_size: self.max_decoding_message_size,
- max_encoding_message_size: self.max_encoding_message_size,
- }
- }
- }
- /// Generated gRPC service name
- pub const SERVICE_NAME: &str = "grpc.reflection.v1.ServerReflection";
- impl<T> tonic::server::NamedService for ServerReflectionServer<T> {
- const NAME: &'static str = SERVICE_NAME;
- }
-}
diff --git a/vendor/tonic-reflection/src/generated/grpc_reflection_v1alpha.rs b/vendor/tonic-reflection/src/generated/grpc_reflection_v1alpha.rs
deleted file mode 100644
index 685d9b0a..00000000
--- a/vendor/tonic-reflection/src/generated/grpc_reflection_v1alpha.rs
+++ /dev/null
@@ -1,461 +0,0 @@
-// This file is @generated by prost-build.
-/// The message sent by the client when calling ServerReflectionInfo method.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ServerReflectionRequest {
- #[prost(string, tag = "1")]
- pub host: ::prost::alloc::string::String,
- /// To use reflection service, the client should set one of the following
- /// fields in message_request. The server distinguishes requests by their
- /// defined field and then handles them using corresponding methods.
- #[prost(oneof = "server_reflection_request::MessageRequest", tags = "3, 4, 5, 6, 7")]
- pub message_request: ::core::option::Option<
- server_reflection_request::MessageRequest,
- >,
-}
-/// Nested message and enum types in `ServerReflectionRequest`.
-pub mod server_reflection_request {
- /// To use reflection service, the client should set one of the following
- /// fields in message_request. The server distinguishes requests by their
- /// defined field and then handles them using corresponding methods.
- #[derive(Clone, PartialEq, ::prost::Oneof)]
- pub enum MessageRequest {
- /// Find a proto file by the file name.
- #[prost(string, tag = "3")]
- FileByFilename(::prost::alloc::string::String),
- /// Find the proto file that declares the given fully-qualified symbol name.
- /// This field should be a fully-qualified symbol name
- /// (e.g. <package>.<service>\[.<method>\] or <package>.<type>).
- #[prost(string, tag = "4")]
- FileContainingSymbol(::prost::alloc::string::String),
- /// Find the proto file which defines an extension extending the given
- /// message type with the given field number.
- #[prost(message, tag = "5")]
- FileContainingExtension(super::ExtensionRequest),
- /// Finds the tag numbers used by all known extensions of extendee_type, and
- /// appends them to ExtensionNumberResponse in an undefined order.
- /// Its corresponding method is best-effort: it's not guaranteed that the
- /// reflection service will implement this method, and it's not guaranteed
- /// that this method will provide all extensions. Returns
- /// StatusCode::UNIMPLEMENTED if it's not implemented.
- /// This field should be a fully-qualified type name. The format is
- /// <package>.<type>
- #[prost(string, tag = "6")]
- AllExtensionNumbersOfType(::prost::alloc::string::String),
- /// List the full names of registered services. The content will not be
- /// checked.
- #[prost(string, tag = "7")]
- ListServices(::prost::alloc::string::String),
- }
-}
-/// The type name and extension number sent by the client when requesting
-/// file_containing_extension.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ExtensionRequest {
- /// Fully-qualified type name. The format should be <package>.<type>
- #[prost(string, tag = "1")]
- pub containing_type: ::prost::alloc::string::String,
- #[prost(int32, tag = "2")]
- pub extension_number: i32,
-}
-/// The message sent by the server to answer ServerReflectionInfo method.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ServerReflectionResponse {
- #[prost(string, tag = "1")]
- pub valid_host: ::prost::alloc::string::String,
- #[prost(message, optional, tag = "2")]
- pub original_request: ::core::option::Option<ServerReflectionRequest>,
- /// The server sets one of the following fields according to the
- /// message_request in the request.
- #[prost(oneof = "server_reflection_response::MessageResponse", tags = "4, 5, 6, 7")]
- pub message_response: ::core::option::Option<
- server_reflection_response::MessageResponse,
- >,
-}
-/// Nested message and enum types in `ServerReflectionResponse`.
-pub mod server_reflection_response {
- /// The server sets one of the following fields according to the
- /// message_request in the request.
- #[derive(Clone, PartialEq, ::prost::Oneof)]
- pub enum MessageResponse {
- /// This message is used to answer file_by_filename, file_containing_symbol,
- /// file_containing_extension requests with transitive dependencies.
- /// As the repeated label is not allowed in oneof fields, we use a
- /// FileDescriptorResponse message to encapsulate the repeated fields.
- /// The reflection service is allowed to avoid sending FileDescriptorProtos
- /// that were previously sent in response to earlier requests in the stream.
- #[prost(message, tag = "4")]
- FileDescriptorResponse(super::FileDescriptorResponse),
- /// This message is used to answer all_extension_numbers_of_type requests.
- #[prost(message, tag = "5")]
- AllExtensionNumbersResponse(super::ExtensionNumberResponse),
- /// This message is used to answer list_services requests.
- #[prost(message, tag = "6")]
- ListServicesResponse(super::ListServiceResponse),
- /// This message is used when an error occurs.
- #[prost(message, tag = "7")]
- ErrorResponse(super::ErrorResponse),
- }
-}
-/// Serialized FileDescriptorProto messages sent by the server answering
-/// a file_by_filename, file_containing_symbol, or file_containing_extension
-/// request.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct FileDescriptorResponse {
- /// Serialized FileDescriptorProto messages. We avoid taking a dependency on
- /// descriptor.proto, which uses proto2 only features, by making them opaque
- /// bytes instead.
- #[prost(bytes = "vec", repeated, tag = "1")]
- pub file_descriptor_proto: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec<u8>>,
-}
-/// A list of extension numbers sent by the server answering
-/// all_extension_numbers_of_type request.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ExtensionNumberResponse {
- /// Full name of the base type, including the package name. The format
- /// is <package>.<type>
- #[prost(string, tag = "1")]
- pub base_type_name: ::prost::alloc::string::String,
- #[prost(int32, repeated, tag = "2")]
- pub extension_number: ::prost::alloc::vec::Vec<i32>,
-}
-/// A list of ServiceResponse sent by the server answering list_services request.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ListServiceResponse {
- /// The information of each service may be expanded in the future, so we use
- /// ServiceResponse message to encapsulate it.
- #[prost(message, repeated, tag = "1")]
- pub service: ::prost::alloc::vec::Vec<ServiceResponse>,
-}
-/// The information of a single service used by ListServiceResponse to answer
-/// list_services request.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ServiceResponse {
- /// Full name of a registered service, including its package name. The format
- /// is <package>.<service>
- #[prost(string, tag = "1")]
- pub name: ::prost::alloc::string::String,
-}
-/// The error code and error message sent by the server when an error occurs.
-#[derive(Clone, PartialEq, ::prost::Message)]
-pub struct ErrorResponse {
- /// This field uses the error codes defined in grpc::StatusCode.
- #[prost(int32, tag = "1")]
- pub error_code: i32,
- #[prost(string, tag = "2")]
- pub error_message: ::prost::alloc::string::String,
-}
-/// Generated client implementations.
-pub mod server_reflection_client {
- #![allow(
- unused_variables,
- dead_code,
- missing_docs,
- clippy::wildcard_imports,
- clippy::let_unit_value,
- )]
- use tonic::codegen::*;
- use tonic::codegen::http::Uri;
- #[derive(Debug, Clone)]
- pub struct ServerReflectionClient<T> {
- inner: tonic::client::Grpc<T>,
- }
- impl<T> ServerReflectionClient<T>
- where
- T: tonic::client::GrpcService<tonic::body::Body>,
- T::Error: Into<StdError>,
- T::ResponseBody: Body<Data = Bytes> + std::marker::Send + 'static,
- <T::ResponseBody as Body>::Error: Into<StdError> + std::marker::Send,
- {
- pub fn new(inner: T) -> Self {
- let inner = tonic::client::Grpc::new(inner);
- Self { inner }
- }
- pub fn with_origin(inner: T, origin: Uri) -> Self {
- let inner = tonic::client::Grpc::with_origin(inner, origin);
- Self { inner }
- }
- pub fn with_interceptor<F>(
- inner: T,
- interceptor: F,
- ) -> ServerReflectionClient<InterceptedService<T, F>>
- where
- F: tonic::service::Interceptor,
- T::ResponseBody: Default,
- T: tonic::codegen::Service<
- http::Request<tonic::body::Body>,
- Response = http::Response<
- <T as tonic::client::GrpcService<tonic::body::Body>>::ResponseBody,
- >,
- >,
- <T as tonic::codegen::Service<
- http::Request<tonic::body::Body>,
- >>::Error: Into<StdError> + std::marker::Send + std::marker::Sync,
- {
- ServerReflectionClient::new(InterceptedService::new(inner, interceptor))
- }
- /// Compress requests with the given encoding.
- ///
- /// This requires the server to support it otherwise it might respond with an
- /// error.
- #[must_use]
- pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.inner = self.inner.send_compressed(encoding);
- self
- }
- /// Enable decompressing responses.
- #[must_use]
- pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.inner = self.inner.accept_compressed(encoding);
- self
- }
- /// Limits the maximum size of a decoded message.
- ///
- /// Default: `4MB`
- #[must_use]
- pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
- self.inner = self.inner.max_decoding_message_size(limit);
- self
- }
- /// Limits the maximum size of an encoded message.
- ///
- /// Default: `usize::MAX`
- #[must_use]
- pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
- self.inner = self.inner.max_encoding_message_size(limit);
- self
- }
- /// The reflection service is structured as a bidirectional stream, ensuring
- /// all related requests go to a single server.
- pub async fn server_reflection_info(
- &mut self,
- request: impl tonic::IntoStreamingRequest<
- Message = super::ServerReflectionRequest,
- >,
- ) -> std::result::Result<
- tonic::Response<tonic::codec::Streaming<super::ServerReflectionResponse>>,
- tonic::Status,
- > {
- self.inner
- .ready()
- .await
- .map_err(|e| {
- tonic::Status::unknown(
- format!("Service was not ready: {}", e.into()),
- )
- })?;
- let codec = tonic::codec::ProstCodec::default();
- let path = http::uri::PathAndQuery::from_static(
- "/grpc.reflection.v1alpha.ServerReflection/ServerReflectionInfo",
- );
- let mut req = request.into_streaming_request();
- req.extensions_mut()
- .insert(
- GrpcMethod::new(
- "grpc.reflection.v1alpha.ServerReflection",
- "ServerReflectionInfo",
- ),
- );
- self.inner.streaming(req, path, codec).await
- }
- }
-}
-/// Generated server implementations.
-pub mod server_reflection_server {
- #![allow(
- unused_variables,
- dead_code,
- missing_docs,
- clippy::wildcard_imports,
- clippy::let_unit_value,
- )]
- use tonic::codegen::*;
- /// Generated trait containing gRPC methods that should be implemented for use with ServerReflectionServer.
- #[async_trait]
- pub trait ServerReflection: std::marker::Send + std::marker::Sync + 'static {
- /// Server streaming response type for the ServerReflectionInfo method.
- type ServerReflectionInfoStream: tonic::codegen::tokio_stream::Stream<
- Item = std::result::Result<
- super::ServerReflectionResponse,
- tonic::Status,
- >,
- >
- + std::marker::Send
- + 'static;
- /// The reflection service is structured as a bidirectional stream, ensuring
- /// all related requests go to a single server.
- async fn server_reflection_info(
- &self,
- request: tonic::Request<tonic::Streaming<super::ServerReflectionRequest>>,
- ) -> std::result::Result<
- tonic::Response<Self::ServerReflectionInfoStream>,
- tonic::Status,
- >;
- }
- #[derive(Debug)]
- pub struct ServerReflectionServer<T> {
- inner: Arc<T>,
- accept_compression_encodings: EnabledCompressionEncodings,
- send_compression_encodings: EnabledCompressionEncodings,
- max_decoding_message_size: Option<usize>,
- max_encoding_message_size: Option<usize>,
- }
- impl<T> ServerReflectionServer<T> {
- pub fn new(inner: T) -> Self {
- Self::from_arc(Arc::new(inner))
- }
- pub fn from_arc(inner: Arc<T>) -> Self {
- Self {
- inner,
- accept_compression_encodings: Default::default(),
- send_compression_encodings: Default::default(),
- max_decoding_message_size: None,
- max_encoding_message_size: None,
- }
- }
- pub fn with_interceptor<F>(
- inner: T,
- interceptor: F,
- ) -> InterceptedService<Self, F>
- where
- F: tonic::service::Interceptor,
- {
- InterceptedService::new(Self::new(inner), interceptor)
- }
- /// Enable decompressing requests with the given encoding.
- #[must_use]
- pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.accept_compression_encodings.enable(encoding);
- self
- }
- /// Compress responses with the given encoding, if the client supports it.
- #[must_use]
- pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self {
- self.send_compression_encodings.enable(encoding);
- self
- }
- /// Limits the maximum size of a decoded message.
- ///
- /// Default: `4MB`
- #[must_use]
- pub fn max_decoding_message_size(mut self, limit: usize) -> Self {
- self.max_decoding_message_size = Some(limit);
- self
- }
- /// Limits the maximum size of an encoded message.
- ///
- /// Default: `usize::MAX`
- #[must_use]
- pub fn max_encoding_message_size(mut self, limit: usize) -> Self {
- self.max_encoding_message_size = Some(limit);
- self
- }
- }
- impl<T, B> tonic::codegen::Service<http::Request<B>> for ServerReflectionServer<T>
- where
- T: ServerReflection,
- B: Body + std::marker::Send + 'static,
- B::Error: Into<StdError> + std::marker::Send + 'static,
- {
- type Response = http::Response<tonic::body::Body>;
- type Error = std::convert::Infallible;
- type Future = BoxFuture<Self::Response, Self::Error>;
- fn poll_ready(
- &mut self,
- _cx: &mut Context<'_>,
- ) -> Poll<std::result::Result<(), Self::Error>> {
- Poll::Ready(Ok(()))
- }
- fn call(&mut self, req: http::Request<B>) -> Self::Future {
- match req.uri().path() {
- "/grpc.reflection.v1alpha.ServerReflection/ServerReflectionInfo" => {
- #[allow(non_camel_case_types)]
- struct ServerReflectionInfoSvc<T: ServerReflection>(pub Arc<T>);
- impl<
- T: ServerReflection,
- > tonic::server::StreamingService<super::ServerReflectionRequest>
- for ServerReflectionInfoSvc<T> {
- type Response = super::ServerReflectionResponse;
- type ResponseStream = T::ServerReflectionInfoStream;
- type Future = BoxFuture<
- tonic::Response<Self::ResponseStream>,
- tonic::Status,
- >;
- fn call(
- &mut self,
- request: tonic::Request<
- tonic::Streaming<super::ServerReflectionRequest>,
- >,
- ) -> Self::Future {
- let inner = Arc::clone(&self.0);
- let fut = async move {
- <T as ServerReflection>::server_reflection_info(
- &inner,
- request,
- )
- .await
- };
- Box::pin(fut)
- }
- }
- let accept_compression_encodings = self.accept_compression_encodings;
- let send_compression_encodings = self.send_compression_encodings;
- let max_decoding_message_size = self.max_decoding_message_size;
- let max_encoding_message_size = self.max_encoding_message_size;
- let inner = self.inner.clone();
- let fut = async move {
- let method = ServerReflectionInfoSvc(inner);
- let codec = tonic::codec::ProstCodec::default();
- let mut grpc = tonic::server::Grpc::new(codec)
- .apply_compression_config(
- accept_compression_encodings,
- send_compression_encodings,
- )
- .apply_max_message_size_config(
- max_decoding_message_size,
- max_encoding_message_size,
- );
- let res = grpc.streaming(method, req).await;
- Ok(res)
- };
- Box::pin(fut)
- }
- _ => {
- Box::pin(async move {
- let mut response = http::Response::new(
- tonic::body::Body::default(),
- );
- let headers = response.headers_mut();
- headers
- .insert(
- tonic::Status::GRPC_STATUS,
- (tonic::Code::Unimplemented as i32).into(),
- );
- headers
- .insert(
- http::header::CONTENT_TYPE,
- tonic::metadata::GRPC_CONTENT_TYPE,
- );
- Ok(response)
- })
- }
- }
- }
- }
- impl<T> Clone for ServerReflectionServer<T> {
- fn clone(&self) -> Self {
- let inner = self.inner.clone();
- Self {
- inner,
- accept_compression_encodings: self.accept_compression_encodings,
- send_compression_encodings: self.send_compression_encodings,
- max_decoding_message_size: self.max_decoding_message_size,
- max_encoding_message_size: self.max_encoding_message_size,
- }
- }
- }
- /// Generated gRPC service name
- pub const SERVICE_NAME: &str = "grpc.reflection.v1alpha.ServerReflection";
- impl<T> tonic::server::NamedService for ServerReflectionServer<T> {
- const NAME: &'static str = SERVICE_NAME;
- }
-}
diff --git a/vendor/tonic-reflection/src/generated/reflection_v1_fds.rs b/vendor/tonic-reflection/src/generated/reflection_v1_fds.rs
deleted file mode 100644
index 4edd5c20..00000000
--- a/vendor/tonic-reflection/src/generated/reflection_v1_fds.rs
+++ /dev/null
@@ -1,161 +0,0 @@
-// This file is @generated by codegen.
-// Copyright 2016 The gRPC Authors
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Service exported by server reflection. A more complete description of how
-// server reflection works can be found at
-// https://github.com/grpc/grpc/blob/master/doc/server-reflection.md
-//
-// The canonical version of this proto can be found at
-// https://github.com/grpc/grpc-proto/blob/master/grpc/reflection/v1/reflection.proto
-//
-/// Byte encoded FILE_DESCRIPTOR_SET.
-pub const FILE_DESCRIPTOR_SET: &[u8] = &[
- 10u8, 192u8, 13u8, 10u8, 19u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8,
- 111u8, 110u8, 95u8, 118u8, 49u8, 46u8, 112u8, 114u8, 111u8, 116u8, 111u8, 18u8, 18u8,
- 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8,
- 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 34u8, 243u8, 2u8, 10u8, 23u8, 83u8, 101u8,
- 114u8, 118u8, 101u8, 114u8, 82u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8,
- 111u8, 110u8, 82u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 18u8, 18u8, 10u8, 4u8,
- 104u8, 111u8, 115u8, 116u8, 24u8, 1u8, 32u8, 1u8, 40u8, 9u8, 82u8, 4u8, 104u8, 111u8,
- 115u8, 116u8, 18u8, 42u8, 10u8, 16u8, 102u8, 105u8, 108u8, 101u8, 95u8, 98u8, 121u8,
- 95u8, 102u8, 105u8, 108u8, 101u8, 110u8, 97u8, 109u8, 101u8, 24u8, 3u8, 32u8, 1u8,
- 40u8, 9u8, 72u8, 0u8, 82u8, 14u8, 102u8, 105u8, 108u8, 101u8, 66u8, 121u8, 70u8,
- 105u8, 108u8, 101u8, 110u8, 97u8, 109u8, 101u8, 18u8, 54u8, 10u8, 22u8, 102u8, 105u8,
- 108u8, 101u8, 95u8, 99u8, 111u8, 110u8, 116u8, 97u8, 105u8, 110u8, 105u8, 110u8,
- 103u8, 95u8, 115u8, 121u8, 109u8, 98u8, 111u8, 108u8, 24u8, 4u8, 32u8, 1u8, 40u8,
- 9u8, 72u8, 0u8, 82u8, 20u8, 102u8, 105u8, 108u8, 101u8, 67u8, 111u8, 110u8, 116u8,
- 97u8, 105u8, 110u8, 105u8, 110u8, 103u8, 83u8, 121u8, 109u8, 98u8, 111u8, 108u8,
- 18u8, 98u8, 10u8, 25u8, 102u8, 105u8, 108u8, 101u8, 95u8, 99u8, 111u8, 110u8, 116u8,
- 97u8, 105u8, 110u8, 105u8, 110u8, 103u8, 95u8, 101u8, 120u8, 116u8, 101u8, 110u8,
- 115u8, 105u8, 111u8, 110u8, 24u8, 5u8, 32u8, 1u8, 40u8, 11u8, 50u8, 36u8, 46u8,
- 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8,
- 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 46u8, 69u8, 120u8, 116u8, 101u8, 110u8,
- 115u8, 105u8, 111u8, 110u8, 82u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 72u8,
- 0u8, 82u8, 23u8, 102u8, 105u8, 108u8, 101u8, 67u8, 111u8, 110u8, 116u8, 97u8, 105u8,
- 110u8, 105u8, 110u8, 103u8, 69u8, 120u8, 116u8, 101u8, 110u8, 115u8, 105u8, 111u8,
- 110u8, 18u8, 66u8, 10u8, 29u8, 97u8, 108u8, 108u8, 95u8, 101u8, 120u8, 116u8, 101u8,
- 110u8, 115u8, 105u8, 111u8, 110u8, 95u8, 110u8, 117u8, 109u8, 98u8, 101u8, 114u8,
- 115u8, 95u8, 111u8, 102u8, 95u8, 116u8, 121u8, 112u8, 101u8, 24u8, 6u8, 32u8, 1u8,
- 40u8, 9u8, 72u8, 0u8, 82u8, 25u8, 97u8, 108u8, 108u8, 69u8, 120u8, 116u8, 101u8,
- 110u8, 115u8, 105u8, 111u8, 110u8, 78u8, 117u8, 109u8, 98u8, 101u8, 114u8, 115u8,
- 79u8, 102u8, 84u8, 121u8, 112u8, 101u8, 18u8, 37u8, 10u8, 13u8, 108u8, 105u8, 115u8,
- 116u8, 95u8, 115u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8, 115u8, 24u8, 7u8, 32u8,
- 1u8, 40u8, 9u8, 72u8, 0u8, 82u8, 12u8, 108u8, 105u8, 115u8, 116u8, 83u8, 101u8,
- 114u8, 118u8, 105u8, 99u8, 101u8, 115u8, 66u8, 17u8, 10u8, 15u8, 109u8, 101u8, 115u8,
- 115u8, 97u8, 103u8, 101u8, 95u8, 114u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8,
- 34u8, 102u8, 10u8, 16u8, 69u8, 120u8, 116u8, 101u8, 110u8, 115u8, 105u8, 111u8,
- 110u8, 82u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 18u8, 39u8, 10u8, 15u8, 99u8,
- 111u8, 110u8, 116u8, 97u8, 105u8, 110u8, 105u8, 110u8, 103u8, 95u8, 116u8, 121u8,
- 112u8, 101u8, 24u8, 1u8, 32u8, 1u8, 40u8, 9u8, 82u8, 14u8, 99u8, 111u8, 110u8, 116u8,
- 97u8, 105u8, 110u8, 105u8, 110u8, 103u8, 84u8, 121u8, 112u8, 101u8, 18u8, 41u8, 10u8,
- 16u8, 101u8, 120u8, 116u8, 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 95u8, 110u8,
- 117u8, 109u8, 98u8, 101u8, 114u8, 24u8, 2u8, 32u8, 1u8, 40u8, 5u8, 82u8, 15u8, 101u8,
- 120u8, 116u8, 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 78u8, 117u8, 109u8, 98u8,
- 101u8, 114u8, 34u8, 174u8, 4u8, 10u8, 24u8, 83u8, 101u8, 114u8, 118u8, 101u8, 114u8,
- 82u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 82u8, 101u8,
- 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 29u8, 10u8, 10u8, 118u8, 97u8, 108u8,
- 105u8, 100u8, 95u8, 104u8, 111u8, 115u8, 116u8, 24u8, 1u8, 32u8, 1u8, 40u8, 9u8,
- 82u8, 9u8, 118u8, 97u8, 108u8, 105u8, 100u8, 72u8, 111u8, 115u8, 116u8, 18u8, 86u8,
- 10u8, 16u8, 111u8, 114u8, 105u8, 103u8, 105u8, 110u8, 97u8, 108u8, 95u8, 114u8,
- 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 24u8, 2u8, 32u8, 1u8, 40u8, 11u8, 50u8,
- 43u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8,
- 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 46u8, 83u8, 101u8, 114u8, 118u8,
- 101u8, 114u8, 82u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8,
- 82u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 82u8, 15u8, 111u8, 114u8, 105u8,
- 103u8, 105u8, 110u8, 97u8, 108u8, 82u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8,
- 18u8, 102u8, 10u8, 24u8, 102u8, 105u8, 108u8, 101u8, 95u8, 100u8, 101u8, 115u8, 99u8,
- 114u8, 105u8, 112u8, 116u8, 111u8, 114u8, 95u8, 114u8, 101u8, 115u8, 112u8, 111u8,
- 110u8, 115u8, 101u8, 24u8, 4u8, 32u8, 1u8, 40u8, 11u8, 50u8, 42u8, 46u8, 103u8,
- 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8,
- 111u8, 110u8, 46u8, 118u8, 49u8, 46u8, 70u8, 105u8, 108u8, 101u8, 68u8, 101u8, 115u8,
- 99u8, 114u8, 105u8, 112u8, 116u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8, 111u8,
- 110u8, 115u8, 101u8, 72u8, 0u8, 82u8, 22u8, 102u8, 105u8, 108u8, 101u8, 68u8, 101u8,
- 115u8, 99u8, 114u8, 105u8, 112u8, 116u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8,
- 111u8, 110u8, 115u8, 101u8, 18u8, 114u8, 10u8, 30u8, 97u8, 108u8, 108u8, 95u8, 101u8,
- 120u8, 116u8, 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 95u8, 110u8, 117u8, 109u8,
- 98u8, 101u8, 114u8, 115u8, 95u8, 114u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8,
- 101u8, 24u8, 5u8, 32u8, 1u8, 40u8, 11u8, 50u8, 43u8, 46u8, 103u8, 114u8, 112u8, 99u8,
- 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8,
- 118u8, 49u8, 46u8, 69u8, 120u8, 116u8, 101u8, 110u8, 115u8, 105u8, 111u8, 110u8,
- 78u8, 117u8, 109u8, 98u8, 101u8, 114u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8,
- 115u8, 101u8, 72u8, 0u8, 82u8, 27u8, 97u8, 108u8, 108u8, 69u8, 120u8, 116u8, 101u8,
- 110u8, 115u8, 105u8, 111u8, 110u8, 78u8, 117u8, 109u8, 98u8, 101u8, 114u8, 115u8,
- 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 95u8, 10u8, 22u8, 108u8,
- 105u8, 115u8, 116u8, 95u8, 115u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8, 115u8,
- 95u8, 114u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 24u8, 6u8, 32u8, 1u8,
- 40u8, 11u8, 50u8, 39u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8,
- 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 46u8, 76u8, 105u8,
- 115u8, 116u8, 83u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8, 82u8, 101u8, 115u8,
- 112u8, 111u8, 110u8, 115u8, 101u8, 72u8, 0u8, 82u8, 20u8, 108u8, 105u8, 115u8, 116u8,
- 83u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8, 115u8, 82u8, 101u8, 115u8, 112u8,
- 111u8, 110u8, 115u8, 101u8, 18u8, 74u8, 10u8, 14u8, 101u8, 114u8, 114u8, 111u8,
- 114u8, 95u8, 114u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 24u8, 7u8, 32u8,
- 1u8, 40u8, 11u8, 50u8, 33u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8,
- 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 46u8, 69u8,
- 114u8, 114u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8,
- 72u8, 0u8, 82u8, 13u8, 101u8, 114u8, 114u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8,
- 111u8, 110u8, 115u8, 101u8, 66u8, 18u8, 10u8, 16u8, 109u8, 101u8, 115u8, 115u8, 97u8,
- 103u8, 101u8, 95u8, 114u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 34u8,
- 76u8, 10u8, 22u8, 70u8, 105u8, 108u8, 101u8, 68u8, 101u8, 115u8, 99u8, 114u8, 105u8,
- 112u8, 116u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8,
- 18u8, 50u8, 10u8, 21u8, 102u8, 105u8, 108u8, 101u8, 95u8, 100u8, 101u8, 115u8, 99u8,
- 114u8, 105u8, 112u8, 116u8, 111u8, 114u8, 95u8, 112u8, 114u8, 111u8, 116u8, 111u8,
- 24u8, 1u8, 32u8, 3u8, 40u8, 12u8, 82u8, 19u8, 102u8, 105u8, 108u8, 101u8, 68u8,
- 101u8, 115u8, 99u8, 114u8, 105u8, 112u8, 116u8, 111u8, 114u8, 80u8, 114u8, 111u8,
- 116u8, 111u8, 34u8, 106u8, 10u8, 23u8, 69u8, 120u8, 116u8, 101u8, 110u8, 115u8,
- 105u8, 111u8, 110u8, 78u8, 117u8, 109u8, 98u8, 101u8, 114u8, 82u8, 101u8, 115u8,
- 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 36u8, 10u8, 14u8, 98u8, 97u8, 115u8, 101u8,
- 95u8, 116u8, 121u8, 112u8, 101u8, 95u8, 110u8, 97u8, 109u8, 101u8, 24u8, 1u8, 32u8,
- 1u8, 40u8, 9u8, 82u8, 12u8, 98u8, 97u8, 115u8, 101u8, 84u8, 121u8, 112u8, 101u8,
- 78u8, 97u8, 109u8, 101u8, 18u8, 41u8, 10u8, 16u8, 101u8, 120u8, 116u8, 101u8, 110u8,
- 115u8, 105u8, 111u8, 110u8, 95u8, 110u8, 117u8, 109u8, 98u8, 101u8, 114u8, 24u8, 2u8,
- 32u8, 3u8, 40u8, 5u8, 82u8, 15u8, 101u8, 120u8, 116u8, 101u8, 110u8, 115u8, 105u8,
- 111u8, 110u8, 78u8, 117u8, 109u8, 98u8, 101u8, 114u8, 34u8, 84u8, 10u8, 19u8, 76u8,
- 105u8, 115u8, 116u8, 83u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8, 82u8, 101u8,
- 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 61u8, 10u8, 7u8, 115u8, 101u8, 114u8,
- 118u8, 105u8, 99u8, 101u8, 24u8, 1u8, 32u8, 3u8, 40u8, 11u8, 50u8, 35u8, 46u8, 103u8,
- 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8,
- 111u8, 110u8, 46u8, 118u8, 49u8, 46u8, 83u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8,
- 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 82u8, 7u8, 115u8, 101u8,
- 114u8, 118u8, 105u8, 99u8, 101u8, 34u8, 37u8, 10u8, 15u8, 83u8, 101u8, 114u8, 118u8,
- 105u8, 99u8, 101u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8,
- 18u8, 10u8, 4u8, 110u8, 97u8, 109u8, 101u8, 24u8, 1u8, 32u8, 1u8, 40u8, 9u8, 82u8,
- 4u8, 110u8, 97u8, 109u8, 101u8, 34u8, 83u8, 10u8, 13u8, 69u8, 114u8, 114u8, 111u8,
- 114u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 29u8, 10u8, 10u8,
- 101u8, 114u8, 114u8, 111u8, 114u8, 95u8, 99u8, 111u8, 100u8, 101u8, 24u8, 1u8, 32u8,
- 1u8, 40u8, 5u8, 82u8, 9u8, 101u8, 114u8, 114u8, 111u8, 114u8, 67u8, 111u8, 100u8,
- 101u8, 18u8, 35u8, 10u8, 13u8, 101u8, 114u8, 114u8, 111u8, 114u8, 95u8, 109u8, 101u8,
- 115u8, 115u8, 97u8, 103u8, 101u8, 24u8, 2u8, 32u8, 1u8, 40u8, 9u8, 82u8, 12u8, 101u8,
- 114u8, 114u8, 111u8, 114u8, 77u8, 101u8, 115u8, 115u8, 97u8, 103u8, 101u8, 50u8,
- 137u8, 1u8, 10u8, 16u8, 83u8, 101u8, 114u8, 118u8, 101u8, 114u8, 82u8, 101u8, 102u8,
- 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 18u8, 117u8, 10u8, 20u8, 83u8, 101u8,
- 114u8, 118u8, 101u8, 114u8, 82u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8,
- 111u8, 110u8, 73u8, 110u8, 102u8, 111u8, 18u8, 43u8, 46u8, 103u8, 114u8, 112u8, 99u8,
- 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8,
- 118u8, 49u8, 46u8, 83u8, 101u8, 114u8, 118u8, 101u8, 114u8, 82u8, 101u8, 102u8,
- 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 82u8, 101u8, 113u8, 117u8, 101u8,
- 115u8, 116u8, 26u8, 44u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8,
- 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 46u8, 83u8, 101u8,
- 114u8, 118u8, 101u8, 114u8, 82u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8,
- 111u8, 110u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 40u8, 1u8, 48u8,
- 1u8, 66u8, 102u8, 10u8, 21u8, 105u8, 111u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8,
- 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8,
- 49u8, 66u8, 21u8, 83u8, 101u8, 114u8, 118u8, 101u8, 114u8, 82u8, 101u8, 102u8, 108u8,
- 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 80u8, 114u8, 111u8, 116u8, 111u8, 80u8, 1u8,
- 90u8, 52u8, 103u8, 111u8, 111u8, 103u8, 108u8, 101u8, 46u8, 103u8, 111u8, 108u8,
- 97u8, 110u8, 103u8, 46u8, 111u8, 114u8, 103u8, 47u8, 103u8, 114u8, 112u8, 99u8, 47u8,
- 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 47u8, 103u8,
- 114u8, 112u8, 99u8, 95u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8,
- 111u8, 110u8, 95u8, 118u8, 49u8, 98u8, 6u8, 112u8, 114u8, 111u8, 116u8, 111u8, 51u8,
-];
diff --git a/vendor/tonic-reflection/src/generated/reflection_v1alpha1_fds.rs b/vendor/tonic-reflection/src/generated/reflection_v1alpha1_fds.rs
deleted file mode 100644
index 240b4cb0..00000000
--- a/vendor/tonic-reflection/src/generated/reflection_v1alpha1_fds.rs
+++ /dev/null
@@ -1,153 +0,0 @@
-// This file is @generated by codegen.
-// Copyright 2016 gRPC authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Service exported by server reflection
-//
-/// Byte encoded FILE_DESCRIPTOR_SET.
-pub const FILE_DESCRIPTOR_SET: &[u8] = &[
- 10u8, 143u8, 13u8, 10u8, 24u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8,
- 111u8, 110u8, 95u8, 118u8, 49u8, 97u8, 108u8, 112u8, 104u8, 97u8, 46u8, 112u8, 114u8,
- 111u8, 116u8, 111u8, 18u8, 23u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8,
- 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 97u8,
- 108u8, 112u8, 104u8, 97u8, 34u8, 248u8, 2u8, 10u8, 23u8, 83u8, 101u8, 114u8, 118u8,
- 101u8, 114u8, 82u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8,
- 82u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 18u8, 18u8, 10u8, 4u8, 104u8, 111u8,
- 115u8, 116u8, 24u8, 1u8, 32u8, 1u8, 40u8, 9u8, 82u8, 4u8, 104u8, 111u8, 115u8, 116u8,
- 18u8, 42u8, 10u8, 16u8, 102u8, 105u8, 108u8, 101u8, 95u8, 98u8, 121u8, 95u8, 102u8,
- 105u8, 108u8, 101u8, 110u8, 97u8, 109u8, 101u8, 24u8, 3u8, 32u8, 1u8, 40u8, 9u8,
- 72u8, 0u8, 82u8, 14u8, 102u8, 105u8, 108u8, 101u8, 66u8, 121u8, 70u8, 105u8, 108u8,
- 101u8, 110u8, 97u8, 109u8, 101u8, 18u8, 54u8, 10u8, 22u8, 102u8, 105u8, 108u8, 101u8,
- 95u8, 99u8, 111u8, 110u8, 116u8, 97u8, 105u8, 110u8, 105u8, 110u8, 103u8, 95u8,
- 115u8, 121u8, 109u8, 98u8, 111u8, 108u8, 24u8, 4u8, 32u8, 1u8, 40u8, 9u8, 72u8, 0u8,
- 82u8, 20u8, 102u8, 105u8, 108u8, 101u8, 67u8, 111u8, 110u8, 116u8, 97u8, 105u8,
- 110u8, 105u8, 110u8, 103u8, 83u8, 121u8, 109u8, 98u8, 111u8, 108u8, 18u8, 103u8,
- 10u8, 25u8, 102u8, 105u8, 108u8, 101u8, 95u8, 99u8, 111u8, 110u8, 116u8, 97u8, 105u8,
- 110u8, 105u8, 110u8, 103u8, 95u8, 101u8, 120u8, 116u8, 101u8, 110u8, 115u8, 105u8,
- 111u8, 110u8, 24u8, 5u8, 32u8, 1u8, 40u8, 11u8, 50u8, 41u8, 46u8, 103u8, 114u8,
- 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8,
- 110u8, 46u8, 118u8, 49u8, 97u8, 108u8, 112u8, 104u8, 97u8, 46u8, 69u8, 120u8, 116u8,
- 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 82u8, 101u8, 113u8, 117u8, 101u8, 115u8,
- 116u8, 72u8, 0u8, 82u8, 23u8, 102u8, 105u8, 108u8, 101u8, 67u8, 111u8, 110u8, 116u8,
- 97u8, 105u8, 110u8, 105u8, 110u8, 103u8, 69u8, 120u8, 116u8, 101u8, 110u8, 115u8,
- 105u8, 111u8, 110u8, 18u8, 66u8, 10u8, 29u8, 97u8, 108u8, 108u8, 95u8, 101u8, 120u8,
- 116u8, 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 95u8, 110u8, 117u8, 109u8, 98u8,
- 101u8, 114u8, 115u8, 95u8, 111u8, 102u8, 95u8, 116u8, 121u8, 112u8, 101u8, 24u8, 6u8,
- 32u8, 1u8, 40u8, 9u8, 72u8, 0u8, 82u8, 25u8, 97u8, 108u8, 108u8, 69u8, 120u8, 116u8,
- 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 78u8, 117u8, 109u8, 98u8, 101u8, 114u8,
- 115u8, 79u8, 102u8, 84u8, 121u8, 112u8, 101u8, 18u8, 37u8, 10u8, 13u8, 108u8, 105u8,
- 115u8, 116u8, 95u8, 115u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8, 115u8, 24u8, 7u8,
- 32u8, 1u8, 40u8, 9u8, 72u8, 0u8, 82u8, 12u8, 108u8, 105u8, 115u8, 116u8, 83u8, 101u8,
- 114u8, 118u8, 105u8, 99u8, 101u8, 115u8, 66u8, 17u8, 10u8, 15u8, 109u8, 101u8, 115u8,
- 115u8, 97u8, 103u8, 101u8, 95u8, 114u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8,
- 34u8, 102u8, 10u8, 16u8, 69u8, 120u8, 116u8, 101u8, 110u8, 115u8, 105u8, 111u8,
- 110u8, 82u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 18u8, 39u8, 10u8, 15u8, 99u8,
- 111u8, 110u8, 116u8, 97u8, 105u8, 110u8, 105u8, 110u8, 103u8, 95u8, 116u8, 121u8,
- 112u8, 101u8, 24u8, 1u8, 32u8, 1u8, 40u8, 9u8, 82u8, 14u8, 99u8, 111u8, 110u8, 116u8,
- 97u8, 105u8, 110u8, 105u8, 110u8, 103u8, 84u8, 121u8, 112u8, 101u8, 18u8, 41u8, 10u8,
- 16u8, 101u8, 120u8, 116u8, 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 95u8, 110u8,
- 117u8, 109u8, 98u8, 101u8, 114u8, 24u8, 2u8, 32u8, 1u8, 40u8, 5u8, 82u8, 15u8, 101u8,
- 120u8, 116u8, 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 78u8, 117u8, 109u8, 98u8,
- 101u8, 114u8, 34u8, 199u8, 4u8, 10u8, 24u8, 83u8, 101u8, 114u8, 118u8, 101u8, 114u8,
- 82u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 82u8, 101u8,
- 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 29u8, 10u8, 10u8, 118u8, 97u8, 108u8,
- 105u8, 100u8, 95u8, 104u8, 111u8, 115u8, 116u8, 24u8, 1u8, 32u8, 1u8, 40u8, 9u8,
- 82u8, 9u8, 118u8, 97u8, 108u8, 105u8, 100u8, 72u8, 111u8, 115u8, 116u8, 18u8, 91u8,
- 10u8, 16u8, 111u8, 114u8, 105u8, 103u8, 105u8, 110u8, 97u8, 108u8, 95u8, 114u8,
- 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 24u8, 2u8, 32u8, 1u8, 40u8, 11u8, 50u8,
- 48u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8,
- 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 97u8, 108u8, 112u8, 104u8, 97u8, 46u8,
- 83u8, 101u8, 114u8, 118u8, 101u8, 114u8, 82u8, 101u8, 102u8, 108u8, 101u8, 99u8,
- 116u8, 105u8, 111u8, 110u8, 82u8, 101u8, 113u8, 117u8, 101u8, 115u8, 116u8, 82u8,
- 15u8, 111u8, 114u8, 105u8, 103u8, 105u8, 110u8, 97u8, 108u8, 82u8, 101u8, 113u8,
- 117u8, 101u8, 115u8, 116u8, 18u8, 107u8, 10u8, 24u8, 102u8, 105u8, 108u8, 101u8,
- 95u8, 100u8, 101u8, 115u8, 99u8, 114u8, 105u8, 112u8, 116u8, 111u8, 114u8, 95u8,
- 114u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 24u8, 4u8, 32u8, 1u8, 40u8,
- 11u8, 50u8, 47u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8,
- 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 97u8, 108u8, 112u8,
- 104u8, 97u8, 46u8, 70u8, 105u8, 108u8, 101u8, 68u8, 101u8, 115u8, 99u8, 114u8, 105u8,
- 112u8, 116u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8,
- 72u8, 0u8, 82u8, 22u8, 102u8, 105u8, 108u8, 101u8, 68u8, 101u8, 115u8, 99u8, 114u8,
- 105u8, 112u8, 116u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8,
- 101u8, 18u8, 119u8, 10u8, 30u8, 97u8, 108u8, 108u8, 95u8, 101u8, 120u8, 116u8, 101u8,
- 110u8, 115u8, 105u8, 111u8, 110u8, 95u8, 110u8, 117u8, 109u8, 98u8, 101u8, 114u8,
- 115u8, 95u8, 114u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 24u8, 5u8, 32u8,
- 1u8, 40u8, 11u8, 50u8, 48u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8,
- 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 97u8,
- 108u8, 112u8, 104u8, 97u8, 46u8, 69u8, 120u8, 116u8, 101u8, 110u8, 115u8, 105u8,
- 111u8, 110u8, 78u8, 117u8, 109u8, 98u8, 101u8, 114u8, 82u8, 101u8, 115u8, 112u8,
- 111u8, 110u8, 115u8, 101u8, 72u8, 0u8, 82u8, 27u8, 97u8, 108u8, 108u8, 69u8, 120u8,
- 116u8, 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 78u8, 117u8, 109u8, 98u8, 101u8,
- 114u8, 115u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 100u8,
- 10u8, 22u8, 108u8, 105u8, 115u8, 116u8, 95u8, 115u8, 101u8, 114u8, 118u8, 105u8,
- 99u8, 101u8, 115u8, 95u8, 114u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8,
- 24u8, 6u8, 32u8, 1u8, 40u8, 11u8, 50u8, 44u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8,
- 114u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8,
- 49u8, 97u8, 108u8, 112u8, 104u8, 97u8, 46u8, 76u8, 105u8, 115u8, 116u8, 83u8, 101u8,
- 114u8, 118u8, 105u8, 99u8, 101u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8,
- 101u8, 72u8, 0u8, 82u8, 20u8, 108u8, 105u8, 115u8, 116u8, 83u8, 101u8, 114u8, 118u8,
- 105u8, 99u8, 101u8, 115u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8,
- 18u8, 79u8, 10u8, 14u8, 101u8, 114u8, 114u8, 111u8, 114u8, 95u8, 114u8, 101u8, 115u8,
- 112u8, 111u8, 110u8, 115u8, 101u8, 24u8, 7u8, 32u8, 1u8, 40u8, 11u8, 50u8, 38u8,
- 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8, 101u8, 99u8,
- 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 97u8, 108u8, 112u8, 104u8, 97u8, 46u8,
- 69u8, 114u8, 114u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8,
- 101u8, 72u8, 0u8, 82u8, 13u8, 101u8, 114u8, 114u8, 111u8, 114u8, 82u8, 101u8, 115u8,
- 112u8, 111u8, 110u8, 115u8, 101u8, 66u8, 18u8, 10u8, 16u8, 109u8, 101u8, 115u8,
- 115u8, 97u8, 103u8, 101u8, 95u8, 114u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8,
- 101u8, 34u8, 76u8, 10u8, 22u8, 70u8, 105u8, 108u8, 101u8, 68u8, 101u8, 115u8, 99u8,
- 114u8, 105u8, 112u8, 116u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8,
- 115u8, 101u8, 18u8, 50u8, 10u8, 21u8, 102u8, 105u8, 108u8, 101u8, 95u8, 100u8, 101u8,
- 115u8, 99u8, 114u8, 105u8, 112u8, 116u8, 111u8, 114u8, 95u8, 112u8, 114u8, 111u8,
- 116u8, 111u8, 24u8, 1u8, 32u8, 3u8, 40u8, 12u8, 82u8, 19u8, 102u8, 105u8, 108u8,
- 101u8, 68u8, 101u8, 115u8, 99u8, 114u8, 105u8, 112u8, 116u8, 111u8, 114u8, 80u8,
- 114u8, 111u8, 116u8, 111u8, 34u8, 106u8, 10u8, 23u8, 69u8, 120u8, 116u8, 101u8,
- 110u8, 115u8, 105u8, 111u8, 110u8, 78u8, 117u8, 109u8, 98u8, 101u8, 114u8, 82u8,
- 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 36u8, 10u8, 14u8, 98u8, 97u8,
- 115u8, 101u8, 95u8, 116u8, 121u8, 112u8, 101u8, 95u8, 110u8, 97u8, 109u8, 101u8,
- 24u8, 1u8, 32u8, 1u8, 40u8, 9u8, 82u8, 12u8, 98u8, 97u8, 115u8, 101u8, 84u8, 121u8,
- 112u8, 101u8, 78u8, 97u8, 109u8, 101u8, 18u8, 41u8, 10u8, 16u8, 101u8, 120u8, 116u8,
- 101u8, 110u8, 115u8, 105u8, 111u8, 110u8, 95u8, 110u8, 117u8, 109u8, 98u8, 101u8,
- 114u8, 24u8, 2u8, 32u8, 3u8, 40u8, 5u8, 82u8, 15u8, 101u8, 120u8, 116u8, 101u8,
- 110u8, 115u8, 105u8, 111u8, 110u8, 78u8, 117u8, 109u8, 98u8, 101u8, 114u8, 34u8,
- 89u8, 10u8, 19u8, 76u8, 105u8, 115u8, 116u8, 83u8, 101u8, 114u8, 118u8, 105u8, 99u8,
- 101u8, 82u8, 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 66u8, 10u8, 7u8,
- 115u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8, 24u8, 1u8, 32u8, 3u8, 40u8, 11u8,
- 50u8, 40u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8, 101u8, 102u8, 108u8, 101u8,
- 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8, 97u8, 108u8, 112u8, 104u8, 97u8,
- 46u8, 83u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8, 82u8, 101u8, 115u8, 112u8,
- 111u8, 110u8, 115u8, 101u8, 82u8, 7u8, 115u8, 101u8, 114u8, 118u8, 105u8, 99u8,
- 101u8, 34u8, 37u8, 10u8, 15u8, 83u8, 101u8, 114u8, 118u8, 105u8, 99u8, 101u8, 82u8,
- 101u8, 115u8, 112u8, 111u8, 110u8, 115u8, 101u8, 18u8, 18u8, 10u8, 4u8, 110u8, 97u8,
- 109u8, 101u8, 24u8, 1u8, 32u8, 1u8, 40u8, 9u8, 82u8, 4u8, 110u8, 97u8, 109u8, 101u8,
- 34u8, 83u8, 10u8, 13u8, 69u8, 114u8, 114u8, 111u8, 114u8, 82u8, 101u8, 115u8, 112u8,
- 111u8, 110u8, 115u8, 101u8, 18u8, 29u8, 10u8, 10u8, 101u8, 114u8, 114u8, 111u8,
- 114u8, 95u8, 99u8, 111u8, 100u8, 101u8, 24u8, 1u8, 32u8, 1u8, 40u8, 5u8, 82u8, 9u8,
- 101u8, 114u8, 114u8, 111u8, 114u8, 67u8, 111u8, 100u8, 101u8, 18u8, 35u8, 10u8, 13u8,
- 101u8, 114u8, 114u8, 111u8, 114u8, 95u8, 109u8, 101u8, 115u8, 115u8, 97u8, 103u8,
- 101u8, 24u8, 2u8, 32u8, 1u8, 40u8, 9u8, 82u8, 12u8, 101u8, 114u8, 114u8, 111u8,
- 114u8, 77u8, 101u8, 115u8, 115u8, 97u8, 103u8, 101u8, 50u8, 147u8, 1u8, 10u8, 16u8,
- 83u8, 101u8, 114u8, 118u8, 101u8, 114u8, 82u8, 101u8, 102u8, 108u8, 101u8, 99u8,
- 116u8, 105u8, 111u8, 110u8, 18u8, 127u8, 10u8, 20u8, 83u8, 101u8, 114u8, 118u8,
- 101u8, 114u8, 82u8, 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8,
- 73u8, 110u8, 102u8, 111u8, 18u8, 48u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8,
- 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8,
- 97u8, 108u8, 112u8, 104u8, 97u8, 46u8, 83u8, 101u8, 114u8, 118u8, 101u8, 114u8, 82u8,
- 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 82u8, 101u8, 113u8,
- 117u8, 101u8, 115u8, 116u8, 26u8, 49u8, 46u8, 103u8, 114u8, 112u8, 99u8, 46u8, 114u8,
- 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 46u8, 118u8, 49u8,
- 97u8, 108u8, 112u8, 104u8, 97u8, 46u8, 83u8, 101u8, 114u8, 118u8, 101u8, 114u8, 82u8,
- 101u8, 102u8, 108u8, 101u8, 99u8, 116u8, 105u8, 111u8, 110u8, 82u8, 101u8, 115u8,
- 112u8, 111u8, 110u8, 115u8, 101u8, 40u8, 1u8, 48u8, 1u8, 98u8, 6u8, 112u8, 114u8,
- 111u8, 116u8, 111u8, 51u8,
-];
diff --git a/vendor/tonic-reflection/src/lib.rs b/vendor/tonic-reflection/src/lib.rs
deleted file mode 100644
index 97ea3165..00000000
--- a/vendor/tonic-reflection/src/lib.rs
+++ /dev/null
@@ -1,66 +0,0 @@
-//! A `tonic` based gRPC Server Reflection implementation.
-
-#![doc(
- html_logo_url = "https://github.com/hyperium/tonic/raw/master/.github/assets/tonic-docs.png"
-)]
-#![doc(issue_tracker_base_url = "https://github.com/hyperium/tonic/issues/")]
-#![doc(test(no_crate_inject, attr(deny(rust_2018_idioms))))]
-#![cfg_attr(docsrs, feature(doc_auto_cfg))]
-
-mod generated {
- #![allow(unreachable_pub)]
- #![allow(missing_docs)]
- #![allow(rustdoc::invalid_html_tags)]
-
- #[rustfmt::skip]
- pub mod grpc_reflection_v1alpha;
-
- #[rustfmt::skip]
- pub mod grpc_reflection_v1;
-
- #[rustfmt::skip]
- pub mod reflection_v1_fds;
-
- #[rustfmt::skip]
- pub mod reflection_v1alpha1_fds;
-
- pub use reflection_v1_fds::FILE_DESCRIPTOR_SET as FILE_DESCRIPTOR_SET_V1;
- pub use reflection_v1alpha1_fds::FILE_DESCRIPTOR_SET as FILE_DESCRIPTOR_SET_V1ALPHA;
-
- #[cfg(test)]
- mod tests {
- use super::{FILE_DESCRIPTOR_SET_V1, FILE_DESCRIPTOR_SET_V1ALPHA};
- use prost::Message as _;
-
- #[test]
- fn v1alpha_file_descriptor_set_is_valid() {
- prost_types::FileDescriptorSet::decode(FILE_DESCRIPTOR_SET_V1ALPHA).unwrap();
- }
-
- #[test]
- fn v1_file_descriptor_set_is_valid() {
- prost_types::FileDescriptorSet::decode(FILE_DESCRIPTOR_SET_V1).unwrap();
- }
- }
-}
-
-/// Generated protobuf types from the `grpc.reflection` namespace.
-pub mod pb {
- /// Generated protobuf types from the `grpc.reflection.v1` package.
- pub mod v1 {
- pub use crate::generated::{
- grpc_reflection_v1::*, FILE_DESCRIPTOR_SET_V1 as FILE_DESCRIPTOR_SET,
- };
- }
-
- /// Generated protobuf types from the `grpc.reflection.v1alpha` package.
- pub mod v1alpha {
- pub use crate::generated::{
- grpc_reflection_v1alpha::*, FILE_DESCRIPTOR_SET_V1ALPHA as FILE_DESCRIPTOR_SET,
- };
- }
-}
-
-/// Implementation of the server component of gRPC Server Reflection.
-#[cfg(feature = "server")]
-pub mod server;
diff --git a/vendor/tonic-reflection/src/server/mod.rs b/vendor/tonic-reflection/src/server/mod.rs
deleted file mode 100644
index 2b1a806a..00000000
--- a/vendor/tonic-reflection/src/server/mod.rs
+++ /dev/null
@@ -1,326 +0,0 @@
-use std::collections::HashMap;
-use std::fmt::{Display, Formatter};
-use std::sync::Arc;
-
-use prost::{DecodeError, Message};
-use prost_types::{
- DescriptorProto, EnumDescriptorProto, FieldDescriptorProto, FileDescriptorProto,
- FileDescriptorSet,
-};
-use tonic::Status;
-
-/// v1 interface for the gRPC Reflection Service server.
-pub mod v1;
-/// v1alpha interface for the gRPC Reflection Service server.
-pub mod v1alpha;
-
-/// A builder used to construct a gRPC Reflection Service.
-#[derive(Debug)]
-pub struct Builder<'b> {
- file_descriptor_sets: Vec<FileDescriptorSet>,
- encoded_file_descriptor_sets: Vec<&'b [u8]>,
- include_reflection_service: bool,
-
- service_names: Vec<String>,
- use_all_service_names: bool,
-}
-
-impl<'b> Builder<'b> {
- /// Create a new builder that can configure a gRPC Reflection Service.
- pub fn configure() -> Self {
- Builder {
- file_descriptor_sets: Vec::new(),
- encoded_file_descriptor_sets: Vec::new(),
- include_reflection_service: true,
-
- service_names: Vec::new(),
- use_all_service_names: true,
- }
- }
-
- /// Registers an instance of `prost_types::FileDescriptorSet` with the gRPC Reflection
- /// Service builder.
- pub fn register_file_descriptor_set(mut self, file_descriptor_set: FileDescriptorSet) -> Self {
- self.file_descriptor_sets.push(file_descriptor_set);
- self
- }
-
- /// Registers a byte slice containing an encoded `prost_types::FileDescriptorSet` with
- /// the gRPC Reflection Service builder.
- pub fn register_encoded_file_descriptor_set(
- mut self,
- encoded_file_descriptor_set: &'b [u8],
- ) -> Self {
- self.encoded_file_descriptor_sets
- .push(encoded_file_descriptor_set);
- self
- }
-
- /// Serve the gRPC Reflection Service descriptor via the Reflection Service. This is enabled
- /// by default - set `include` to false to disable.
- pub fn include_reflection_service(mut self, include: bool) -> Self {
- self.include_reflection_service = include;
- self
- }
-
- /// Advertise a fully-qualified gRPC service name.
- ///
- /// If not called, then all services present in the registered file descriptor sets
- /// will be advertised.
- pub fn with_service_name(mut self, name: impl Into<String>) -> Self {
- self.use_all_service_names = false;
- self.service_names.push(name.into());
- self
- }
-
- /// Build a v1 gRPC Reflection Service to be served via Tonic.
- pub fn build_v1(
- mut self,
- ) -> Result<v1::ServerReflectionServer<impl v1::ServerReflection>, Error> {
- if self.include_reflection_service {
- self = self.register_encoded_file_descriptor_set(crate::pb::v1::FILE_DESCRIPTOR_SET);
- }
-
- Ok(v1::ServerReflectionServer::new(
- v1::ReflectionService::from(ReflectionServiceState::new(
- self.service_names,
- self.encoded_file_descriptor_sets,
- self.file_descriptor_sets,
- self.use_all_service_names,
- )?),
- ))
- }
-
- /// Build a v1alpha gRPC Reflection Service to be served via Tonic.
- pub fn build_v1alpha(
- mut self,
- ) -> Result<v1alpha::ServerReflectionServer<impl v1alpha::ServerReflection>, Error> {
- if self.include_reflection_service {
- self =
- self.register_encoded_file_descriptor_set(crate::pb::v1alpha::FILE_DESCRIPTOR_SET);
- }
-
- Ok(v1alpha::ServerReflectionServer::new(
- v1alpha::ReflectionService::from(ReflectionServiceState::new(
- self.service_names,
- self.encoded_file_descriptor_sets,
- self.file_descriptor_sets,
- self.use_all_service_names,
- )?),
- ))
- }
-}
-
-#[derive(Debug)]
-struct ReflectionServiceState {
- service_names: Vec<String>,
- files: HashMap<String, Arc<FileDescriptorProto>>,
- symbols: HashMap<String, Arc<FileDescriptorProto>>,
-}
-
-impl ReflectionServiceState {
- fn new(
- service_names: Vec<String>,
- encoded_file_descriptor_sets: Vec<&[u8]>,
- mut file_descriptor_sets: Vec<FileDescriptorSet>,
- use_all_service_names: bool,
- ) -> Result<Self, Error> {
- for encoded in encoded_file_descriptor_sets {
- file_descriptor_sets.push(FileDescriptorSet::decode(encoded)?);
- }
-
- let mut state = ReflectionServiceState {
- service_names,
- files: HashMap::new(),
- symbols: HashMap::new(),
- };
-
- for fds in file_descriptor_sets {
- for fd in fds.file {
- let name = match fd.name.clone() {
- None => {
- return Err(Error::InvalidFileDescriptorSet("missing name".to_string()));
- }
- Some(n) => n,
- };
-
- if state.files.contains_key(&name) {
- continue;
- }
-
- let fd = Arc::new(fd);
- state.files.insert(name, fd.clone());
- state.process_file(fd, use_all_service_names)?;
- }
- }
-
- Ok(state)
- }
-
- fn process_file(
- &mut self,
- fd: Arc<FileDescriptorProto>,
- use_all_service_names: bool,
- ) -> Result<(), Error> {
- let prefix = &fd.package.clone().unwrap_or_default();
-
- for msg in &fd.message_type {
- self.process_message(fd.clone(), prefix, msg)?;
- }
-
- for en in &fd.enum_type {
- self.process_enum(fd.clone(), prefix, en)?;
- }
-
- for service in &fd.service {
- let service_name = extract_name(prefix, "service", service.name.as_ref())?;
- if use_all_service_names {
- self.service_names.push(service_name.clone());
- }
- self.symbols.insert(service_name.clone(), fd.clone());
-
- for method in &service.method {
- let method_name = extract_name(&service_name, "method", method.name.as_ref())?;
- self.symbols.insert(method_name, fd.clone());
- }
- }
-
- Ok(())
- }
-
- fn process_message(
- &mut self,
- fd: Arc<FileDescriptorProto>,
- prefix: &str,
- msg: &DescriptorProto,
- ) -> Result<(), Error> {
- let message_name = extract_name(prefix, "message", msg.name.as_ref())?;
- self.symbols.insert(message_name.clone(), fd.clone());
-
- for nested in &msg.nested_type {
- self.process_message(fd.clone(), &message_name, nested)?;
- }
-
- for en in &msg.enum_type {
- self.process_enum(fd.clone(), &message_name, en)?;
- }
-
- for field in &msg.field {
- self.process_field(fd.clone(), &message_name, field)?;
- }
-
- for oneof in &msg.oneof_decl {
- let oneof_name = extract_name(&message_name, "oneof", oneof.name.as_ref())?;
- self.symbols.insert(oneof_name, fd.clone());
- }
-
- Ok(())
- }
-
- fn process_enum(
- &mut self,
- fd: Arc<FileDescriptorProto>,
- prefix: &str,
- en: &EnumDescriptorProto,
- ) -> Result<(), Error> {
- let enum_name = extract_name(prefix, "enum", en.name.as_ref())?;
- self.symbols.insert(enum_name.clone(), fd.clone());
-
- for value in &en.value {
- let value_name = extract_name(&enum_name, "enum value", value.name.as_ref())?;
- self.symbols.insert(value_name, fd.clone());
- }
-
- Ok(())
- }
-
- fn process_field(
- &mut self,
- fd: Arc<FileDescriptorProto>,
- prefix: &str,
- field: &FieldDescriptorProto,
- ) -> Result<(), Error> {
- let field_name = extract_name(prefix, "field", field.name.as_ref())?;
- self.symbols.insert(field_name, fd);
- Ok(())
- }
-
- fn list_services(&self) -> &[String] {
- &self.service_names
- }
-
- fn symbol_by_name(&self, symbol: &str) -> Result<Vec<u8>, Status> {
- match self.symbols.get(symbol) {
- None => Err(Status::not_found(format!("symbol '{symbol}' not found"))),
- Some(fd) => {
- let mut encoded_fd = Vec::new();
- if fd.clone().encode(&mut encoded_fd).is_err() {
- return Err(Status::internal("encoding error"));
- };
-
- Ok(encoded_fd)
- }
- }
- }
-
- fn file_by_filename(&self, filename: &str) -> Result<Vec<u8>, Status> {
- match self.files.get(filename) {
- None => Err(Status::not_found(format!("file '{filename}' not found"))),
- Some(fd) => {
- let mut encoded_fd = Vec::new();
- if fd.clone().encode(&mut encoded_fd).is_err() {
- return Err(Status::internal("encoding error"));
- }
-
- Ok(encoded_fd)
- }
- }
- }
-}
-
-fn extract_name(
- prefix: &str,
- name_type: &str,
- maybe_name: Option<&String>,
-) -> Result<String, Error> {
- match maybe_name {
- None => Err(Error::InvalidFileDescriptorSet(format!(
- "missing {name_type} name"
- ))),
- Some(name) => {
- if prefix.is_empty() {
- Ok(name.to_string())
- } else {
- Ok(format!("{prefix}.{name}"))
- }
- }
- }
-}
-
-/// Represents an error in the construction of a gRPC Reflection Service.
-#[derive(Debug)]
-pub enum Error {
- /// An error was encountered decoding a `prost_types::FileDescriptorSet` from a buffer.
- DecodeError(prost::DecodeError),
- /// An invalid `prost_types::FileDescriptorProto` was encountered.
- InvalidFileDescriptorSet(String),
-}
-
-impl From<DecodeError> for Error {
- fn from(e: DecodeError) -> Self {
- Error::DecodeError(e)
- }
-}
-
-impl std::error::Error for Error {}
-
-impl Display for Error {
- fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
- match self {
- Error::DecodeError(_) => f.write_str("error decoding FileDescriptorSet from buffer"),
- Error::InvalidFileDescriptorSet(s) => {
- write!(f, "invalid FileDescriptorSet - {s}")
- }
- }
- }
-}
diff --git a/vendor/tonic-reflection/src/server/v1.rs b/vendor/tonic-reflection/src/server/v1.rs
deleted file mode 100644
index 6a5054f9..00000000
--- a/vendor/tonic-reflection/src/server/v1.rs
+++ /dev/null
@@ -1,138 +0,0 @@
-use std::{fmt, sync::Arc};
-
-use tokio::sync::mpsc;
-use tokio_stream::{Stream, StreamExt};
-use tonic::{Request, Response, Status, Streaming};
-
-use super::ReflectionServiceState;
-use crate::pb::v1::server_reflection_request::MessageRequest;
-use crate::pb::v1::server_reflection_response::MessageResponse;
-pub use crate::pb::v1::server_reflection_server::{ServerReflection, ServerReflectionServer};
-use crate::pb::v1::{
- ExtensionNumberResponse, FileDescriptorResponse, ListServiceResponse, ServerReflectionRequest,
- ServerReflectionResponse, ServiceResponse,
-};
-
-/// An implementation for `ServerReflection`.
-#[derive(Debug)]
-pub struct ReflectionService {
- state: Arc<ReflectionServiceState>,
-}
-
-#[tonic::async_trait]
-impl ServerReflection for ReflectionService {
- type ServerReflectionInfoStream = ServerReflectionInfoStream;
-
- async fn server_reflection_info(
- &self,
- req: Request<Streaming<ServerReflectionRequest>>,
- ) -> Result<Response<Self::ServerReflectionInfoStream>, Status> {
- let mut req_rx = req.into_inner();
- let (resp_tx, resp_rx) = mpsc::channel::<Result<ServerReflectionResponse, Status>>(1);
-
- let state = self.state.clone();
-
- tokio::spawn(async move {
- while let Some(req) = req_rx.next().await {
- let Ok(req) = req else {
- return;
- };
-
- let resp_msg = match req.message_request.clone() {
- None => Err(Status::invalid_argument("invalid MessageRequest")),
- Some(msg) => match msg {
- MessageRequest::FileByFilename(s) => state.file_by_filename(&s).map(|fd| {
- MessageResponse::FileDescriptorResponse(FileDescriptorResponse {
- file_descriptor_proto: vec![fd],
- })
- }),
- MessageRequest::FileContainingSymbol(s) => {
- state.symbol_by_name(&s).map(|fd| {
- MessageResponse::FileDescriptorResponse(FileDescriptorResponse {
- file_descriptor_proto: vec![fd],
- })
- })
- }
- MessageRequest::FileContainingExtension(_) => {
- Err(Status::not_found("extensions are not supported"))
- }
- MessageRequest::AllExtensionNumbersOfType(_) => {
- // NOTE: Workaround. Some grpc clients (e.g. grpcurl) expect this method not to fail.
- // https://github.com/hyperium/tonic/issues/1077
- Ok(MessageResponse::AllExtensionNumbersResponse(
- ExtensionNumberResponse::default(),
- ))
- }
- MessageRequest::ListServices(_) => {
- Ok(MessageResponse::ListServicesResponse(ListServiceResponse {
- service: state
- .list_services()
- .iter()
- .map(|s| ServiceResponse { name: s.clone() })
- .collect(),
- }))
- }
- },
- };
-
- match resp_msg {
- Ok(resp_msg) => {
- let resp = ServerReflectionResponse {
- valid_host: req.host.clone(),
- original_request: Some(req.clone()),
- message_response: Some(resp_msg),
- };
- resp_tx.send(Ok(resp)).await.expect("send");
- }
- Err(status) => {
- resp_tx.send(Err(status)).await.expect("send");
- return;
- }
- }
- }
- });
-
- Ok(Response::new(ServerReflectionInfoStream::new(resp_rx)))
- }
-}
-
-impl From<ReflectionServiceState> for ReflectionService {
- fn from(state: ReflectionServiceState) -> Self {
- Self {
- state: Arc::new(state),
- }
- }
-}
-
-/// A response stream.
-pub struct ServerReflectionInfoStream {
- inner: tokio_stream::wrappers::ReceiverStream<Result<ServerReflectionResponse, Status>>,
-}
-
-impl ServerReflectionInfoStream {
- fn new(resp_rx: mpsc::Receiver<Result<ServerReflectionResponse, Status>>) -> Self {
- let inner = tokio_stream::wrappers::ReceiverStream::new(resp_rx);
- Self { inner }
- }
-}
-
-impl Stream for ServerReflectionInfoStream {
- type Item = Result<ServerReflectionResponse, Status>;
-
- fn poll_next(
- mut self: std::pin::Pin<&mut Self>,
- cx: &mut std::task::Context<'_>,
- ) -> std::task::Poll<Option<Self::Item>> {
- std::pin::Pin::new(&mut self.inner).poll_next(cx)
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-
-impl fmt::Debug for ServerReflectionInfoStream {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("ServerReflectionInfoStream").finish()
- }
-}
diff --git a/vendor/tonic-reflection/src/server/v1alpha.rs b/vendor/tonic-reflection/src/server/v1alpha.rs
deleted file mode 100644
index b21d8d91..00000000
--- a/vendor/tonic-reflection/src/server/v1alpha.rs
+++ /dev/null
@@ -1,138 +0,0 @@
-use std::{fmt, sync::Arc};
-
-use tokio::sync::mpsc;
-use tokio_stream::{Stream, StreamExt};
-use tonic::{Request, Response, Status, Streaming};
-
-use super::ReflectionServiceState;
-use crate::pb::v1alpha::server_reflection_request::MessageRequest;
-use crate::pb::v1alpha::server_reflection_response::MessageResponse;
-pub use crate::pb::v1alpha::server_reflection_server::{ServerReflection, ServerReflectionServer};
-use crate::pb::v1alpha::{
- ExtensionNumberResponse, FileDescriptorResponse, ListServiceResponse, ServerReflectionRequest,
- ServerReflectionResponse, ServiceResponse,
-};
-
-/// An implementation for `ServerReflection`.
-#[derive(Debug)]
-pub struct ReflectionService {
- state: Arc<ReflectionServiceState>,
-}
-
-#[tonic::async_trait]
-impl ServerReflection for ReflectionService {
- type ServerReflectionInfoStream = ServerReflectionInfoStream;
-
- async fn server_reflection_info(
- &self,
- req: Request<Streaming<ServerReflectionRequest>>,
- ) -> Result<Response<Self::ServerReflectionInfoStream>, Status> {
- let mut req_rx = req.into_inner();
- let (resp_tx, resp_rx) = mpsc::channel::<Result<ServerReflectionResponse, Status>>(1);
-
- let state = self.state.clone();
-
- tokio::spawn(async move {
- while let Some(req) = req_rx.next().await {
- let Ok(req) = req else {
- return;
- };
-
- let resp_msg = match req.message_request.clone() {
- None => Err(Status::invalid_argument("invalid MessageRequest")),
- Some(msg) => match msg {
- MessageRequest::FileByFilename(s) => state.file_by_filename(&s).map(|fd| {
- MessageResponse::FileDescriptorResponse(FileDescriptorResponse {
- file_descriptor_proto: vec![fd],
- })
- }),
- MessageRequest::FileContainingSymbol(s) => {
- state.symbol_by_name(&s).map(|fd| {
- MessageResponse::FileDescriptorResponse(FileDescriptorResponse {
- file_descriptor_proto: vec![fd],
- })
- })
- }
- MessageRequest::FileContainingExtension(_) => {
- Err(Status::not_found("extensions are not supported"))
- }
- MessageRequest::AllExtensionNumbersOfType(_) => {
- // NOTE: Workaround. Some grpc clients (e.g. grpcurl) expect this method not to fail.
- // https://github.com/hyperium/tonic/issues/1077
- Ok(MessageResponse::AllExtensionNumbersResponse(
- ExtensionNumberResponse::default(),
- ))
- }
- MessageRequest::ListServices(_) => {
- Ok(MessageResponse::ListServicesResponse(ListServiceResponse {
- service: state
- .list_services()
- .iter()
- .map(|s| ServiceResponse { name: s.clone() })
- .collect(),
- }))
- }
- },
- };
-
- match resp_msg {
- Ok(resp_msg) => {
- let resp = ServerReflectionResponse {
- valid_host: req.host.clone(),
- original_request: Some(req.clone()),
- message_response: Some(resp_msg),
- };
- resp_tx.send(Ok(resp)).await.expect("send");
- }
- Err(status) => {
- resp_tx.send(Err(status)).await.expect("send");
- return;
- }
- }
- }
- });
-
- Ok(Response::new(ServerReflectionInfoStream::new(resp_rx)))
- }
-}
-
-impl From<ReflectionServiceState> for ReflectionService {
- fn from(state: ReflectionServiceState) -> Self {
- Self {
- state: Arc::new(state),
- }
- }
-}
-
-/// A response stream.
-pub struct ServerReflectionInfoStream {
- inner: tokio_stream::wrappers::ReceiverStream<Result<ServerReflectionResponse, Status>>,
-}
-
-impl ServerReflectionInfoStream {
- fn new(resp_rx: mpsc::Receiver<Result<ServerReflectionResponse, Status>>) -> Self {
- let inner = tokio_stream::wrappers::ReceiverStream::new(resp_rx);
- Self { inner }
- }
-}
-
-impl Stream for ServerReflectionInfoStream {
- type Item = Result<ServerReflectionResponse, Status>;
-
- fn poll_next(
- mut self: std::pin::Pin<&mut Self>,
- cx: &mut std::task::Context<'_>,
- ) -> std::task::Poll<Option<Self::Item>> {
- std::pin::Pin::new(&mut self.inner).poll_next(cx)
- }
-
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-
-impl fmt::Debug for ServerReflectionInfoStream {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_tuple("ServerReflectionInfoStream").finish()
- }
-}
diff --git a/vendor/tonic-reflection/tests/server.rs b/vendor/tonic-reflection/tests/server.rs
deleted file mode 100644
index 778e887e..00000000
--- a/vendor/tonic-reflection/tests/server.rs
+++ /dev/null
@@ -1,151 +0,0 @@
-#![allow(missing_docs)]
-
-use prost::Message;
-use std::net::SocketAddr;
-use tokio::sync::oneshot;
-use tokio_stream::{wrappers::TcpListenerStream, StreamExt};
-use tonic::{transport::Server, Request};
-use tonic_reflection::{
- pb::v1::{
- server_reflection_client::ServerReflectionClient,
- server_reflection_request::MessageRequest, server_reflection_response::MessageResponse,
- ServerReflectionRequest, ServiceResponse, FILE_DESCRIPTOR_SET,
- },
- server::Builder,
-};
-
-pub(crate) fn get_encoded_reflection_service_fd() -> Vec<u8> {
- let mut expected = Vec::new();
- prost_types::FileDescriptorSet::decode(FILE_DESCRIPTOR_SET)
- .expect("decode reflection service file descriptor set")
- .file[0]
- .encode(&mut expected)
- .expect("encode reflection service file descriptor");
- expected
-}
-
-#[tokio::test]
-async fn test_list_services() {
- let response = make_test_reflection_request(ServerReflectionRequest {
- host: "".to_string(),
- message_request: Some(MessageRequest::ListServices(String::new())),
- })
- .await;
-
- if let MessageResponse::ListServicesResponse(services) = response {
- assert_eq!(
- services.service,
- vec![ServiceResponse {
- name: String::from("grpc.reflection.v1.ServerReflection")
- }]
- );
- } else {
- panic!("Expected a ListServicesResponse variant");
- }
-}
-
-#[tokio::test]
-async fn test_file_by_filename() {
- let response = make_test_reflection_request(ServerReflectionRequest {
- host: "".to_string(),
- message_request: Some(MessageRequest::FileByFilename(String::from(
- "reflection_v1.proto",
- ))),
- })
- .await;
-
- if let MessageResponse::FileDescriptorResponse(descriptor) = response {
- let file_descriptor_proto = descriptor
- .file_descriptor_proto
- .first()
- .expect("descriptor");
- assert_eq!(
- file_descriptor_proto.as_ref(),
- get_encoded_reflection_service_fd()
- );
- } else {
- panic!("Expected a FileDescriptorResponse variant");
- }
-}
-
-#[tokio::test]
-async fn test_file_containing_symbol() {
- let response = make_test_reflection_request(ServerReflectionRequest {
- host: "".to_string(),
- message_request: Some(MessageRequest::FileContainingSymbol(String::from(
- "grpc.reflection.v1.ServerReflection",
- ))),
- })
- .await;
-
- if let MessageResponse::FileDescriptorResponse(descriptor) = response {
- let file_descriptor_proto = descriptor
- .file_descriptor_proto
- .first()
- .expect("descriptor");
- assert_eq!(
- file_descriptor_proto.as_ref(),
- get_encoded_reflection_service_fd()
- );
- } else {
- panic!("Expected a FileDescriptorResponse variant");
- }
-}
-
-async fn make_test_reflection_request(request: ServerReflectionRequest) -> MessageResponse {
- // Run a test server
- let (shutdown_tx, shutdown_rx) = oneshot::channel();
-
- let addr: SocketAddr = "127.0.0.1:0".parse().expect("SocketAddr parse");
- let listener = tokio::net::TcpListener::bind(addr).await.expect("bind");
- let local_addr = format!("http://{}", listener.local_addr().expect("local address"));
- let jh = tokio::spawn(async move {
- let service = Builder::configure()
- .register_encoded_file_descriptor_set(FILE_DESCRIPTOR_SET)
- .build_v1()
- .unwrap();
-
- Server::builder()
- .add_service(service)
- .serve_with_incoming_shutdown(TcpListenerStream::new(listener), async {
- drop(shutdown_rx.await)
- })
- .await
- .unwrap();
- });
-
- // Give the test server a few ms to become available
- tokio::time::sleep(std::time::Duration::from_millis(100)).await;
-
- // Construct client and send request, extract response
- let conn = tonic::transport::Endpoint::new(local_addr)
- .unwrap()
- .connect()
- .await
- .unwrap();
- let mut client = ServerReflectionClient::new(conn);
-
- let request = Request::new(tokio_stream::once(request));
- let mut inbound = client
- .server_reflection_info(request)
- .await
- .expect("request")
- .into_inner();
-
- let response = inbound
- .next()
- .await
- .expect("steamed response")
- .expect("successful response")
- .message_response
- .expect("some MessageResponse");
-
- // We only expect one response per request
- assert!(inbound.next().await.is_none());
-
- // Shut down test server
- shutdown_tx.send(()).expect("send shutdown");
- jh.await.expect("server shutdown");
-
- response
-}
diff --git a/vendor/tonic-reflection/tests/versions.rs b/vendor/tonic-reflection/tests/versions.rs
deleted file mode 100644
index 9ab0858e..00000000
--- a/vendor/tonic-reflection/tests/versions.rs
+++ /dev/null
@@ -1,172 +0,0 @@
-#![allow(missing_docs)]
-
-use std::net::SocketAddr;
-
-use tokio::sync::oneshot;
-use tokio_stream::{wrappers::TcpListenerStream, StreamExt};
-use tonic::{transport::Server, Request};
-
-use tonic_reflection::pb::{v1, v1alpha};
-use tonic_reflection::server::Builder;
-
-#[tokio::test]
-async fn test_v1() {
- let response = make_v1_request(v1::ServerReflectionRequest {
- host: "".to_string(),
- message_request: Some(v1::server_reflection_request::MessageRequest::ListServices(
- String::new(),
- )),
- })
- .await;
-
- if let v1::server_reflection_response::MessageResponse::ListServicesResponse(services) =
- response
- {
- assert_eq!(
- services.service,
- vec![v1::ServiceResponse {
- name: String::from("grpc.reflection.v1.ServerReflection")
- }]
- );
- } else {
- panic!("Expected a ListServicesResponse variant");
- }
-}
-
-#[tokio::test]
-async fn test_v1alpha() {
- let response = make_v1alpha_request(v1alpha::ServerReflectionRequest {
- host: "".to_string(),
- message_request: Some(
- v1alpha::server_reflection_request::MessageRequest::ListServices(String::new()),
- ),
- })
- .await;
-
- if let v1alpha::server_reflection_response::MessageResponse::ListServicesResponse(services) =
- response
- {
- assert_eq!(
- services.service,
- vec![v1alpha::ServiceResponse {
- name: String::from("grpc.reflection.v1alpha.ServerReflection")
- }]
- );
- } else {
- panic!("Expected a ListServicesResponse variant");
- }
-}
-
-async fn make_v1_request(
- request: v1::ServerReflectionRequest,
-) -> v1::server_reflection_response::MessageResponse {
- // Run a test server
- let (shutdown_tx, shutdown_rx) = oneshot::channel();
-
- let addr: SocketAddr = "127.0.0.1:0".parse().expect("SocketAddr parse");
- let listener = tokio::net::TcpListener::bind(addr).await.expect("bind");
- let local_addr = format!("http://{}", listener.local_addr().expect("local address"));
- let jh = tokio::spawn(async move {
- let service = Builder::configure().build_v1().unwrap();
-
- Server::builder()
- .add_service(service)
- .serve_with_incoming_shutdown(TcpListenerStream::new(listener), async {
- drop(shutdown_rx.await)
- })
- .await
- .unwrap();
- });
-
- // Give the test server a few ms to become available
- tokio::time::sleep(std::time::Duration::from_millis(100)).await;
-
- // Construct client and send request, extract response
- let conn = tonic::transport::Endpoint::new(local_addr)
- .unwrap()
- .connect()
- .await
- .unwrap();
- let mut client = v1::server_reflection_client::ServerReflectionClient::new(conn);
-
- let request = Request::new(tokio_stream::once(request));
- let mut inbound = client
- .server_reflection_info(request)
- .await
- .expect("request")
- .into_inner();
-
- let response = inbound
- .next()
- .await
- .expect("steamed response")
- .expect("successful response")
- .message_response
- .expect("some MessageResponse");
-
- // We only expect one response per request
- assert!(inbound.next().await.is_none());
-
- // Shut down test server
- shutdown_tx.send(()).expect("send shutdown");
- jh.await.expect("server shutdown");
-
- response
-}
-
-async fn make_v1alpha_request(
- request: v1alpha::ServerReflectionRequest,
-) -> v1alpha::server_reflection_response::MessageResponse {
- // Run a test server
- let (shutdown_tx, shutdown_rx) = oneshot::channel();
-
- let addr: SocketAddr = "127.0.0.1:0".parse().expect("SocketAddr parse");
- let listener = tokio::net::TcpListener::bind(addr).await.expect("bind");
- let local_addr = format!("http://{}", listener.local_addr().expect("local address"));
- let jh = tokio::spawn(async move {
- let service = Builder::configure().build_v1alpha().unwrap();
-
- Server::builder()
- .add_service(service)
- .serve_with_incoming_shutdown(TcpListenerStream::new(listener), async {
- drop(shutdown_rx.await)
- })
- .await
- .unwrap();
- });
-
- // Give the test server a few ms to become available
- tokio::time::sleep(std::time::Duration::from_millis(100)).await;
-
- // Construct client and send request, extract response
- let conn = tonic::transport::Endpoint::new(local_addr)
- .unwrap()
- .connect()
- .await
- .unwrap();
- let mut client = v1alpha::server_reflection_client::ServerReflectionClient::new(conn);
-
- let request = Request::new(tokio_stream::once(request));
- let mut inbound = client
- .server_reflection_info(request)
- .await
- .expect("request")
- .into_inner();
-
- let response = inbound
- .next()
- .await
- .expect("steamed response")
- .expect("successful response")
- .message_response
- .expect("some MessageResponse");
-
- // We only expect one response per request
- assert!(inbound.next().await.is_none());
-
- // Shut down test server
- shutdown_tx.send(()).expect("send shutdown");
- jh.await.expect("server shutdown");
-
- response
-}