diff options
| author | mo khan <mo@mokhan.ca> | 2025-07-15 16:37:08 -0600 |
|---|---|---|
| committer | mo khan <mo@mokhan.ca> | 2025-07-17 16:30:22 -0600 |
| commit | 45df4d0d9b577fecee798d672695fe24ff57fb1b (patch) | |
| tree | 1b99bf645035b58e0d6db08c7a83521f41f7a75b /vendor/proc-macro2 | |
| parent | f94f79608393d4ab127db63cc41668445ef6b243 (diff) | |
feat: migrate from Cedar to SpiceDB authorization system
This is a major architectural change that replaces the Cedar policy-based
authorization system with SpiceDB's relation-based authorization.
Key changes:
- Migrate from Rust to Go implementation
- Replace Cedar policies with SpiceDB schema and relationships
- Switch from envoy `ext_authz` with Cedar to SpiceDB permission checks
- Update build system and dependencies for Go ecosystem
- Maintain Envoy integration for external authorization
This change enables more flexible permission modeling through SpiceDB's
Google Zanzibar inspired relation-based system, supporting complex
hierarchical permissions that were difficult to express in Cedar.
Breaking change: Existing Cedar policies and Rust-based configuration
will no longer work and need to be migrated to SpiceDB schema.
Diffstat (limited to 'vendor/proc-macro2')
24 files changed, 0 insertions, 7180 deletions
diff --git a/vendor/proc-macro2/.cargo-checksum.json b/vendor/proc-macro2/.cargo-checksum.json deleted file mode 100644 index 72b547d0..00000000 --- a/vendor/proc-macro2/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{"Cargo.lock":"5c006588c9bc74f3f41e4aca25c4a6a28ed65b6dd4ea403bc5c9cedf99c0ae6c","Cargo.toml":"277a6ffb50768523faca5089df941e0d1baf2a7f91792beef3d30667bce985c8","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"c609b6865476d6c35879784e9155367a97a0da496aa5c3c61488440a20f59883","build.rs":"29344b7dbd94595f9fbcbc3f2be9b4bb52b5e8a26d21d4363714bab2a18f32ad","build/probe.rs":"df0d73191f20c207bb1051a4944fb6962e1f632d1e0535aba4b995aa7feba8d1","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/detection.rs":"ed9a5f9a979ab01247d7a68eeb1afa3c13209334c5bfff0f9289cb07e5bb4e8b","src/extra.rs":"29f094473279a29b71c3cc9f5fa27c2e2c30c670390cf7e4b7cf451486cc857e","src/fallback.rs":"9fa04ed4b93108271c65b7b99c0587697ffd1192bd022923b25f4ad9bb146a06","src/lib.rs":"96ed5707e1c61b14f0ad72558f3df69feed99925f8e09bdebd6deaee58557caf","src/location.rs":"9225c5a55f03b56cce42bc55ceb509e8216a5e0b24c94aa1cd071b04e3d6c15f","src/marker.rs":"c11c5a1be8bdf18be3fcd224393f350a9aae7ce282e19ce583c84910c6903a8f","src/parse.rs":"07e4e3f93ff3767e17b281b18867a7faa849399cf4081db2fd25d00416d36656","src/rcvec.rs":"a159d246cac59aae2d51b899471ce34766f51f3c11c376ac36ee501ee3f12a7a","src/wrapper.rs":"dbc8b042250101391b260646ccdd8ff3b544f9880eae369e9b1cb8eec9fd7de3","tests/comments.rs":"31115b3a56c83d93eef2fb4c9566bf4543e302560732986161b98aef504785ed","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"f16299460587d6c65603ed809f1a3b81853e4b99d6cb44d0b68bb07259d7e9f8","tests/test.rs":"17555666aea92454d65b8193ad7e8eebbfc0cd7cfabd582851f0af5e4de4515b","tests/test_fmt.rs":"b7743b612af65f2c88cbe109d50a093db7aa7e87f9e37bf45b7bbaeb240aa020","tests/test_size.rs":"62d8373ea46b669b87bc90a9c49b6d02f90ff4c21f9a25acebf60c9926e01fb7"},"package":"02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"}
\ No newline at end of file diff --git a/vendor/proc-macro2/Cargo.lock b/vendor/proc-macro2/Cargo.lock deleted file mode 100644 index 466b6841..00000000 --- a/vendor/proc-macro2/Cargo.lock +++ /dev/null @@ -1,303 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "adler2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" - -[[package]] -name = "bitflags" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "crc32fast" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "either" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" - -[[package]] -name = "errno" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" -dependencies = [ - "libc", - "windows-sys", -] - -[[package]] -name = "filetime" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" -dependencies = [ - "cfg-if", - "libc", - "libredox", - "windows-sys", -] - -[[package]] -name = "flate2" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - -[[package]] -name = "libc" -version = "0.2.172" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" - -[[package]] -name = "libredox" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags", - "libc", - "redox_syscall", -] - -[[package]] -name = "linux-raw-sys" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" - -[[package]] -name = "miniz_oxide" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" -dependencies = [ - "adler2", -] - -[[package]] -name = "proc-macro2" -version = "1.0.94" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "proc-macro2" -version = "1.0.95" -dependencies = [ - "flate2", - "quote", - "rayon", - "rustversion", - "tar", - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" -dependencies = [ - "proc-macro2 1.0.94", -] - -[[package]] -name = "rayon" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "redox_syscall" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2f103c6d277498fbceb16e84d317e2a400f160f46904d5f5410848c829511a3" -dependencies = [ - "bitflags", -] - -[[package]] -name = "rustix" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys", -] - -[[package]] -name = "rustversion" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" - -[[package]] -name = "tar" -version = "0.4.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" -dependencies = [ - "filetime", - "libc", - "xattr", -] - -[[package]] -name = "unicode-ident" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "xattr" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" -dependencies = [ - "libc", - "rustix", -] diff --git a/vendor/proc-macro2/Cargo.toml b/vendor/proc-macro2/Cargo.toml deleted file mode 100644 index 45b0f432..00000000 --- a/vendor/proc-macro2/Cargo.toml +++ /dev/null @@ -1,104 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2021" -rust-version = "1.56" -name = "proc-macro2" -version = "1.0.95" -authors = [ - "David Tolnay <dtolnay@gmail.com>", - "Alex Crichton <alex@alexcrichton.com>", -] -build = "build.rs" -autolib = false -autobins = false -autoexamples = false -autotests = false -autobenches = false -description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case." -documentation = "https://docs.rs/proc-macro2" -readme = "README.md" -keywords = [ - "macros", - "syn", -] -categories = ["development-tools::procedural-macro-helpers"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/dtolnay/proc-macro2" - -[package.metadata.docs.rs] -rustc-args = ["--cfg=procmacro2_semver_exempt"] -rustdoc-args = [ - "--cfg=procmacro2_semver_exempt", - "--generate-link-to-definition", - "--extern-html-root-url=core=https://doc.rust-lang.org", - "--extern-html-root-url=alloc=https://doc.rust-lang.org", - "--extern-html-root-url=std=https://doc.rust-lang.org", - "--extern-html-root-url=proc_macro=https://doc.rust-lang.org", -] -targets = ["x86_64-unknown-linux-gnu"] - -[package.metadata.playground] -features = ["span-locations"] - -[features] -default = ["proc-macro"] -nightly = [] -proc-macro = [] -span-locations = [] - -[lib] -name = "proc_macro2" -path = "src/lib.rs" - -[[test]] -name = "comments" -path = "tests/comments.rs" - -[[test]] -name = "features" -path = "tests/features.rs" - -[[test]] -name = "marker" -path = "tests/marker.rs" - -[[test]] -name = "test" -path = "tests/test.rs" - -[[test]] -name = "test_fmt" -path = "tests/test_fmt.rs" - -[[test]] -name = "test_size" -path = "tests/test_size.rs" - -[dependencies.unicode-ident] -version = "1.0" - -[dev-dependencies.flate2] -version = "1.0" - -[dev-dependencies.quote] -version = "1.0" -default-features = false - -[dev-dependencies.rayon] -version = "1.0" - -[dev-dependencies.rustversion] -version = "1" - -[dev-dependencies.tar] -version = "0.4" diff --git a/vendor/proc-macro2/LICENSE-APACHE b/vendor/proc-macro2/LICENSE-APACHE deleted file mode 100644 index 1b5ec8b7..00000000 --- a/vendor/proc-macro2/LICENSE-APACHE +++ /dev/null @@ -1,176 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS diff --git a/vendor/proc-macro2/LICENSE-MIT b/vendor/proc-macro2/LICENSE-MIT deleted file mode 100644 index 31aa7938..00000000 --- a/vendor/proc-macro2/LICENSE-MIT +++ /dev/null @@ -1,23 +0,0 @@ -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/vendor/proc-macro2/README.md b/vendor/proc-macro2/README.md deleted file mode 100644 index 3a29ce8b..00000000 --- a/vendor/proc-macro2/README.md +++ /dev/null @@ -1,94 +0,0 @@ -# proc-macro2 - -[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/proc--macro2-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/proc-macro2) -[<img alt="crates.io" src="https://img.shields.io/crates/v/proc-macro2.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/proc-macro2) -[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-proc--macro2-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/proc-macro2) -[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/proc-macro2/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/proc-macro2/actions?query=branch%3Amaster) - -A wrapper around the procedural macro API of the compiler's `proc_macro` crate. -This library serves two purposes: - -- **Bring proc-macro-like functionality to other contexts like build.rs and - main.rs.** Types from `proc_macro` are entirely specific to procedural macros - and cannot ever exist in code outside of a procedural macro. Meanwhile - `proc_macro2` types may exist anywhere including non-macro code. By developing - foundational libraries like [syn] and [quote] against `proc_macro2` rather - than `proc_macro`, the procedural macro ecosystem becomes easily applicable to - many other use cases and we avoid reimplementing non-macro equivalents of - those libraries. - -- **Make procedural macros unit testable.** As a consequence of being specific - to procedural macros, nothing that uses `proc_macro` can be executed from a - unit test. In order for helper libraries or components of a macro to be - testable in isolation, they must be implemented using `proc_macro2`. - -[syn]: https://github.com/dtolnay/syn -[quote]: https://github.com/dtolnay/quote - -## Usage - -```toml -[dependencies] -proc-macro2 = "1.0" -``` - -The skeleton of a typical procedural macro typically looks like this: - -```rust -extern crate proc_macro; - -#[proc_macro_derive(MyDerive)] -pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let input = proc_macro2::TokenStream::from(input); - - let output: proc_macro2::TokenStream = { - /* transform input */ - }; - - proc_macro::TokenStream::from(output) -} -``` - -If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate -parse errors correctly back to the compiler when parsing fails. - -[`parse_macro_input!`]: https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html - -## Unstable features - -The default feature set of proc-macro2 tracks the most recent stable compiler -API. Functionality in `proc_macro` that is not yet stable is not exposed by -proc-macro2 by default. - -To opt into the additional APIs available in the most recent nightly compiler, -the `procmacro2_semver_exempt` config flag must be passed to rustc. We will -polyfill those nightly-only APIs back to Rust 1.56.0. As these are unstable APIs -that track the nightly compiler, minor versions of proc-macro2 may make breaking -changes to them at any time. - -``` -RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build -``` - -Note that this must not only be done for your crate, but for any crate that -depends on your crate. This infectious nature is intentional, as it serves as a -reminder that you are outside of the normal semver guarantees. - -Semver exempt methods are marked as such in the proc-macro2 documentation. - -<br> - -#### License - -<sup> -Licensed under either of <a href="LICENSE-APACHE">Apache License, Version -2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option. -</sup> - -<br> - -<sub> -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in this crate by you, as defined in the Apache-2.0 license, shall -be dual licensed as above, without any additional terms or conditions. -</sub> diff --git a/vendor/proc-macro2/build.rs b/vendor/proc-macro2/build.rs deleted file mode 100644 index bced1e0a..00000000 --- a/vendor/proc-macro2/build.rs +++ /dev/null @@ -1,235 +0,0 @@ -#![allow(unknown_lints)] -#![allow(unexpected_cfgs)] - -use std::env; -use std::ffi::OsString; -use std::fs; -use std::io::ErrorKind; -use std::iter; -use std::path::Path; -use std::process::{self, Command, Stdio}; -use std::str; - -fn main() { - let rustc = rustc_minor_version().unwrap_or(u32::MAX); - - if rustc >= 80 { - println!("cargo:rustc-check-cfg=cfg(fuzzing)"); - println!("cargo:rustc-check-cfg=cfg(no_is_available)"); - println!("cargo:rustc-check-cfg=cfg(no_literal_byte_character)"); - println!("cargo:rustc-check-cfg=cfg(no_literal_c_string)"); - println!("cargo:rustc-check-cfg=cfg(no_source_text)"); - println!("cargo:rustc-check-cfg=cfg(proc_macro_span)"); - println!("cargo:rustc-check-cfg=cfg(procmacro2_backtrace)"); - println!("cargo:rustc-check-cfg=cfg(procmacro2_nightly_testing)"); - println!("cargo:rustc-check-cfg=cfg(procmacro2_semver_exempt)"); - println!("cargo:rustc-check-cfg=cfg(randomize_layout)"); - println!("cargo:rustc-check-cfg=cfg(span_locations)"); - println!("cargo:rustc-check-cfg=cfg(super_unstable)"); - println!("cargo:rustc-check-cfg=cfg(wrap_proc_macro)"); - } - - let docs_rs = env::var_os("DOCS_RS").is_some(); - let semver_exempt = cfg!(procmacro2_semver_exempt) || docs_rs; - if semver_exempt { - // https://github.com/dtolnay/proc-macro2/issues/147 - println!("cargo:rustc-cfg=procmacro2_semver_exempt"); - } - - if semver_exempt || cfg!(feature = "span-locations") { - // Provide methods Span::start and Span::end which give the line/column - // location of a token. This is behind a cfg because tracking location - // inside spans is a performance hit. - println!("cargo:rustc-cfg=span_locations"); - } - - if rustc < 57 { - // Do not use proc_macro::is_available() to detect whether the proc - // macro API is available vs needs to be polyfilled. Instead, use the - // proc macro API unconditionally and catch the panic that occurs if it - // isn't available. - println!("cargo:rustc-cfg=no_is_available"); - } - - if rustc < 66 { - // Do not call libproc_macro's Span::source_text. Always return None. - println!("cargo:rustc-cfg=no_source_text"); - } - - if rustc < 79 { - // Do not call Literal::byte_character nor Literal::c_string. They can - // be emulated by way of Literal::from_str. - println!("cargo:rustc-cfg=no_literal_byte_character"); - println!("cargo:rustc-cfg=no_literal_c_string"); - } - - if !cfg!(feature = "proc-macro") { - println!("cargo:rerun-if-changed=build.rs"); - return; - } - - println!("cargo:rerun-if-changed=build/probe.rs"); - - let proc_macro_span; - let consider_rustc_bootstrap; - if compile_probe(false) { - // This is a nightly or dev compiler, so it supports unstable features - // regardless of RUSTC_BOOTSTRAP. No need to rerun build script if - // RUSTC_BOOTSTRAP is changed. - proc_macro_span = true; - consider_rustc_bootstrap = false; - } else if let Some(rustc_bootstrap) = env::var_os("RUSTC_BOOTSTRAP") { - if compile_probe(true) { - // This is a stable or beta compiler for which the user has set - // RUSTC_BOOTSTRAP to turn on unstable features. Rerun build script - // if they change it. - proc_macro_span = true; - consider_rustc_bootstrap = true; - } else if rustc_bootstrap == "1" { - // This compiler does not support the proc macro Span API in the - // form that proc-macro2 expects. No need to pay attention to - // RUSTC_BOOTSTRAP. - proc_macro_span = false; - consider_rustc_bootstrap = false; - } else { - // This is a stable or beta compiler for which RUSTC_BOOTSTRAP is - // set to restrict the use of unstable features by this crate. - proc_macro_span = false; - consider_rustc_bootstrap = true; - } - } else { - // Without RUSTC_BOOTSTRAP, this compiler does not support the proc - // macro Span API in the form that proc-macro2 expects, but try again if - // the user turns on unstable features. - proc_macro_span = false; - consider_rustc_bootstrap = true; - } - - if proc_macro_span || !semver_exempt { - // Wrap types from libproc_macro rather than polyfilling the whole API. - // Enabled as long as procmacro2_semver_exempt is not set, because we - // can't emulate the unstable API without emulating everything else. - // Also enabled unconditionally on nightly, in which case the - // procmacro2_semver_exempt surface area is implemented by using the - // nightly-only proc_macro API. - println!("cargo:rustc-cfg=wrap_proc_macro"); - } - - if proc_macro_span { - // Enable non-dummy behavior of Span::start and Span::end methods which - // requires an unstable compiler feature. Enabled when building with - // nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable - // features. - println!("cargo:rustc-cfg=proc_macro_span"); - } - - if semver_exempt && proc_macro_span { - // Implement the semver exempt API in terms of the nightly-only - // proc_macro API. - println!("cargo:rustc-cfg=super_unstable"); - } - - if consider_rustc_bootstrap { - println!("cargo:rerun-if-env-changed=RUSTC_BOOTSTRAP"); - } -} - -fn compile_probe(rustc_bootstrap: bool) -> bool { - if env::var_os("RUSTC_STAGE").is_some() { - // We are running inside rustc bootstrap. This is a highly non-standard - // environment with issues such as: - // - // https://github.com/rust-lang/cargo/issues/11138 - // https://github.com/rust-lang/rust/issues/114839 - // - // Let's just not use nightly features here. - return false; - } - - let rustc = cargo_env_var("RUSTC"); - let out_dir = cargo_env_var("OUT_DIR"); - let out_subdir = Path::new(&out_dir).join("probe"); - let probefile = Path::new("build").join("probe.rs"); - - if let Err(err) = fs::create_dir(&out_subdir) { - if err.kind() != ErrorKind::AlreadyExists { - eprintln!("Failed to create {}: {}", out_subdir.display(), err); - process::exit(1); - } - } - - let rustc_wrapper = env::var_os("RUSTC_WRAPPER").filter(|wrapper| !wrapper.is_empty()); - let rustc_workspace_wrapper = - env::var_os("RUSTC_WORKSPACE_WRAPPER").filter(|wrapper| !wrapper.is_empty()); - let mut rustc = rustc_wrapper - .into_iter() - .chain(rustc_workspace_wrapper) - .chain(iter::once(rustc)); - let mut cmd = Command::new(rustc.next().unwrap()); - cmd.args(rustc); - - if !rustc_bootstrap { - cmd.env_remove("RUSTC_BOOTSTRAP"); - } - - cmd.stderr(Stdio::null()) - .arg("--edition=2021") - .arg("--crate-name=proc_macro2") - .arg("--crate-type=lib") - .arg("--cap-lints=allow") - .arg("--emit=dep-info,metadata") - .arg("--out-dir") - .arg(&out_subdir) - .arg(probefile); - - if let Some(target) = env::var_os("TARGET") { - cmd.arg("--target").arg(target); - } - - // If Cargo wants to set RUSTFLAGS, use that. - if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") { - if !rustflags.is_empty() { - for arg in rustflags.split('\x1f') { - cmd.arg(arg); - } - } - } - - let success = match cmd.status() { - Ok(status) => status.success(), - Err(_) => false, - }; - - // Clean up to avoid leaving nondeterministic absolute paths in the dep-info - // file in OUT_DIR, which causes nonreproducible builds in build systems - // that treat the entire OUT_DIR as an artifact. - if let Err(err) = fs::remove_dir_all(&out_subdir) { - if err.kind() != ErrorKind::NotFound { - eprintln!("Failed to clean up {}: {}", out_subdir.display(), err); - process::exit(1); - } - } - - success -} - -fn rustc_minor_version() -> Option<u32> { - let rustc = cargo_env_var("RUSTC"); - let output = Command::new(rustc).arg("--version").output().ok()?; - let version = str::from_utf8(&output.stdout).ok()?; - let mut pieces = version.split('.'); - if pieces.next() != Some("rustc 1") { - return None; - } - pieces.next()?.parse().ok() -} - -fn cargo_env_var(key: &str) -> OsString { - env::var_os(key).unwrap_or_else(|| { - eprintln!( - "Environment variable ${} is not set during execution of build script", - key, - ); - process::exit(1); - }) -} diff --git a/vendor/proc-macro2/build/probe.rs b/vendor/proc-macro2/build/probe.rs deleted file mode 100644 index 79c8ae23..00000000 --- a/vendor/proc-macro2/build/probe.rs +++ /dev/null @@ -1,41 +0,0 @@ -// This code exercises the surface area that we expect of Span's unstable API. -// If the current toolchain is able to compile it, then proc-macro2 is able to -// offer these APIs too. - -#![feature(proc_macro_span)] - -extern crate proc_macro; - -use core::ops::{Range, RangeBounds}; -use proc_macro::{Literal, Span}; - -pub fn byte_range(this: &Span) -> Range<usize> { - this.byte_range() -} - -pub fn start(this: &Span) -> Span { - this.start() -} - -pub fn end(this: &Span) -> Span { - this.end() -} - -pub fn line(this: &Span) -> usize { - this.line() -} - -pub fn column(this: &Span) -> usize { - this.column() -} - -pub fn join(this: &Span, other: Span) -> Option<Span> { - this.join(other) -} - -pub fn subspan<R: RangeBounds<usize>>(this: &Literal, range: R) -> Option<Span> { - this.subspan(range) -} - -// Include in sccache cache key. -const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP"); diff --git a/vendor/proc-macro2/rust-toolchain.toml b/vendor/proc-macro2/rust-toolchain.toml deleted file mode 100644 index 20fe888c..00000000 --- a/vendor/proc-macro2/rust-toolchain.toml +++ /dev/null @@ -1,2 +0,0 @@ -[toolchain] -components = ["rust-src"] diff --git a/vendor/proc-macro2/src/detection.rs b/vendor/proc-macro2/src/detection.rs deleted file mode 100644 index beba7b23..00000000 --- a/vendor/proc-macro2/src/detection.rs +++ /dev/null @@ -1,75 +0,0 @@ -use core::sync::atomic::{AtomicUsize, Ordering}; -use std::sync::Once; - -static WORKS: AtomicUsize = AtomicUsize::new(0); -static INIT: Once = Once::new(); - -pub(crate) fn inside_proc_macro() -> bool { - match WORKS.load(Ordering::Relaxed) { - 1 => return false, - 2 => return true, - _ => {} - } - - INIT.call_once(initialize); - inside_proc_macro() -} - -pub(crate) fn force_fallback() { - WORKS.store(1, Ordering::Relaxed); -} - -pub(crate) fn unforce_fallback() { - initialize(); -} - -#[cfg(not(no_is_available))] -fn initialize() { - let available = proc_macro::is_available(); - WORKS.store(available as usize + 1, Ordering::Relaxed); -} - -// Swap in a null panic hook to avoid printing "thread panicked" to stderr, -// then use catch_unwind to determine whether the compiler's proc_macro is -// working. When proc-macro2 is used from outside of a procedural macro all -// of the proc_macro crate's APIs currently panic. -// -// The Once is to prevent the possibility of this ordering: -// -// thread 1 calls take_hook, gets the user's original hook -// thread 1 calls set_hook with the null hook -// thread 2 calls take_hook, thinks null hook is the original hook -// thread 2 calls set_hook with the null hook -// thread 1 calls set_hook with the actual original hook -// thread 2 calls set_hook with what it thinks is the original hook -// -// in which the user's hook has been lost. -// -// There is still a race condition where a panic in a different thread can -// happen during the interval that the user's original panic hook is -// unregistered such that their hook is incorrectly not called. This is -// sufficiently unlikely and less bad than printing panic messages to stderr -// on correct use of this crate. Maybe there is a libstd feature request -// here. For now, if a user needs to guarantee that this failure mode does -// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from -// the main thread before launching any other threads. -#[cfg(no_is_available)] -fn initialize() { - use std::panic::{self, PanicInfo}; - - type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static; - - let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ }); - let sanity_check = &*null_hook as *const PanicHook; - let original_hook = panic::take_hook(); - panic::set_hook(null_hook); - - let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok(); - WORKS.store(works as usize + 1, Ordering::Relaxed); - - let hopefully_null_hook = panic::take_hook(); - panic::set_hook(original_hook); - if sanity_check != &*hopefully_null_hook { - panic!("observed race condition in proc_macro2::inside_proc_macro"); - } -} diff --git a/vendor/proc-macro2/src/extra.rs b/vendor/proc-macro2/src/extra.rs deleted file mode 100644 index 522a90e1..00000000 --- a/vendor/proc-macro2/src/extra.rs +++ /dev/null @@ -1,151 +0,0 @@ -//! Items which do not have a correspondence to any API in the proc_macro crate, -//! but are necessary to include in proc-macro2. - -use crate::fallback; -use crate::imp; -use crate::marker::{ProcMacroAutoTraits, MARKER}; -use crate::Span; -use core::fmt::{self, Debug}; - -/// Invalidate any `proc_macro2::Span` that exist on the current thread. -/// -/// The implementation of `Span` uses thread-local data structures and this -/// function clears them. Calling any method on a `Span` on the current thread -/// created prior to the invalidation will return incorrect values or crash. -/// -/// This function is useful for programs that process more than 2<sup>32</sup> -/// bytes of Rust source code on the same thread. Just like rustc, proc-macro2 -/// uses 32-bit source locations, and these wrap around when the total source -/// code processed by the same thread exceeds 2<sup>32</sup> bytes (4 -/// gigabytes). After a wraparound, `Span` methods such as `source_text()` can -/// return wrong data. -/// -/// # Example -/// -/// As of late 2023, there is 200 GB of Rust code published on crates.io. -/// Looking at just the newest version of every crate, it is 16 GB of code. So a -/// workload that involves parsing it all would overflow a 32-bit source -/// location unless spans are being invalidated. -/// -/// ``` -/// use flate2::read::GzDecoder; -/// use std::ffi::OsStr; -/// use std::io::{BufReader, Read}; -/// use std::str::FromStr; -/// use tar::Archive; -/// -/// rayon::scope(|s| { -/// for krate in every_version_of_every_crate() { -/// s.spawn(move |_| { -/// proc_macro2::extra::invalidate_current_thread_spans(); -/// -/// let reader = BufReader::new(krate); -/// let tar = GzDecoder::new(reader); -/// let mut archive = Archive::new(tar); -/// for entry in archive.entries().unwrap() { -/// let mut entry = entry.unwrap(); -/// let path = entry.path().unwrap(); -/// if path.extension() != Some(OsStr::new("rs")) { -/// continue; -/// } -/// let mut content = String::new(); -/// entry.read_to_string(&mut content).unwrap(); -/// match proc_macro2::TokenStream::from_str(&content) { -/// Ok(tokens) => {/* ... */}, -/// Err(_) => continue, -/// } -/// } -/// }); -/// } -/// }); -/// # -/// # fn every_version_of_every_crate() -> Vec<std::fs::File> { -/// # Vec::new() -/// # } -/// ``` -/// -/// # Panics -/// -/// This function is not applicable to and will panic if called from a -/// procedural macro. -#[cfg(span_locations)] -#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] -pub fn invalidate_current_thread_spans() { - crate::imp::invalidate_current_thread_spans(); -} - -/// An object that holds a [`Group`]'s `span_open()` and `span_close()` together -/// in a more compact representation than holding those 2 spans individually. -/// -/// [`Group`]: crate::Group -#[derive(Copy, Clone)] -pub struct DelimSpan { - inner: DelimSpanEnum, - _marker: ProcMacroAutoTraits, -} - -#[derive(Copy, Clone)] -enum DelimSpanEnum { - #[cfg(wrap_proc_macro)] - Compiler { - join: proc_macro::Span, - open: proc_macro::Span, - close: proc_macro::Span, - }, - Fallback(fallback::Span), -} - -impl DelimSpan { - pub(crate) fn new(group: &imp::Group) -> Self { - #[cfg(wrap_proc_macro)] - let inner = match group { - imp::Group::Compiler(group) => DelimSpanEnum::Compiler { - join: group.span(), - open: group.span_open(), - close: group.span_close(), - }, - imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()), - }; - - #[cfg(not(wrap_proc_macro))] - let inner = DelimSpanEnum::Fallback(group.span()); - - DelimSpan { - inner, - _marker: MARKER, - } - } - - /// Returns a span covering the entire delimited group. - pub fn join(&self) -> Span { - match &self.inner { - #[cfg(wrap_proc_macro)] - DelimSpanEnum::Compiler { join, .. } => Span::_new(imp::Span::Compiler(*join)), - DelimSpanEnum::Fallback(span) => Span::_new_fallback(*span), - } - } - - /// Returns a span for the opening punctuation of the group only. - pub fn open(&self) -> Span { - match &self.inner { - #[cfg(wrap_proc_macro)] - DelimSpanEnum::Compiler { open, .. } => Span::_new(imp::Span::Compiler(*open)), - DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()), - } - } - - /// Returns a span for the closing punctuation of the group only. - pub fn close(&self) -> Span { - match &self.inner { - #[cfg(wrap_proc_macro)] - DelimSpanEnum::Compiler { close, .. } => Span::_new(imp::Span::Compiler(*close)), - DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()), - } - } -} - -impl Debug for DelimSpan { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.join(), f) - } -} diff --git a/vendor/proc-macro2/src/fallback.rs b/vendor/proc-macro2/src/fallback.rs deleted file mode 100644 index fbce9c40..00000000 --- a/vendor/proc-macro2/src/fallback.rs +++ /dev/null @@ -1,1251 +0,0 @@ -#[cfg(wrap_proc_macro)] -use crate::imp; -#[cfg(span_locations)] -use crate::location::LineColumn; -use crate::parse::{self, Cursor}; -use crate::rcvec::{RcVec, RcVecBuilder, RcVecIntoIter, RcVecMut}; -use crate::{Delimiter, Spacing, TokenTree}; -#[cfg(all(span_locations, not(fuzzing)))] -use alloc::collections::BTreeMap; -#[cfg(all(span_locations, not(fuzzing)))] -use core::cell::RefCell; -#[cfg(span_locations)] -use core::cmp; -use core::fmt::{self, Debug, Display, Write}; -use core::mem::ManuallyDrop; -#[cfg(span_locations)] -use core::ops::Range; -use core::ops::RangeBounds; -use core::ptr; -use core::str; -#[cfg(feature = "proc-macro")] -use core::str::FromStr; -use std::ffi::CStr; -#[cfg(wrap_proc_macro)] -use std::panic; -#[cfg(procmacro2_semver_exempt)] -use std::path::PathBuf; - -/// Force use of proc-macro2's fallback implementation of the API for now, even -/// if the compiler's implementation is available. -pub fn force() { - #[cfg(wrap_proc_macro)] - crate::detection::force_fallback(); -} - -/// Resume using the compiler's implementation of the proc macro API if it is -/// available. -pub fn unforce() { - #[cfg(wrap_proc_macro)] - crate::detection::unforce_fallback(); -} - -#[derive(Clone)] -pub(crate) struct TokenStream { - inner: RcVec<TokenTree>, -} - -#[derive(Debug)] -pub(crate) struct LexError { - pub(crate) span: Span, -} - -impl LexError { - pub(crate) fn span(&self) -> Span { - self.span - } - - pub(crate) fn call_site() -> Self { - LexError { - span: Span::call_site(), - } - } -} - -impl TokenStream { - pub(crate) fn new() -> Self { - TokenStream { - inner: RcVecBuilder::new().build(), - } - } - - pub(crate) fn from_str_checked(src: &str) -> Result<Self, LexError> { - // Create a dummy file & add it to the source map - let mut cursor = get_cursor(src); - - // Strip a byte order mark if present - const BYTE_ORDER_MARK: &str = "\u{feff}"; - if cursor.starts_with(BYTE_ORDER_MARK) { - cursor = cursor.advance(BYTE_ORDER_MARK.len()); - } - - parse::token_stream(cursor) - } - - #[cfg(feature = "proc-macro")] - pub(crate) fn from_str_unchecked(src: &str) -> Self { - Self::from_str_checked(src).unwrap() - } - - pub(crate) fn is_empty(&self) -> bool { - self.inner.len() == 0 - } - - fn take_inner(self) -> RcVecBuilder<TokenTree> { - let nodrop = ManuallyDrop::new(self); - unsafe { ptr::read(&nodrop.inner) }.make_owned() - } -} - -fn push_token_from_proc_macro(mut vec: RcVecMut<TokenTree>, token: TokenTree) { - // https://github.com/dtolnay/proc-macro2/issues/235 - match token { - TokenTree::Literal(crate::Literal { - #[cfg(wrap_proc_macro)] - inner: crate::imp::Literal::Fallback(literal), - #[cfg(not(wrap_proc_macro))] - inner: literal, - .. - }) if literal.repr.starts_with('-') => { - push_negative_literal(vec, literal); - } - _ => vec.push(token), - } - - #[cold] - fn push_negative_literal(mut vec: RcVecMut<TokenTree>, mut literal: Literal) { - literal.repr.remove(0); - let mut punct = crate::Punct::new('-', Spacing::Alone); - punct.set_span(crate::Span::_new_fallback(literal.span)); - vec.push(TokenTree::Punct(punct)); - vec.push(TokenTree::Literal(crate::Literal::_new_fallback(literal))); - } -} - -// Nonrecursive to prevent stack overflow. -impl Drop for TokenStream { - fn drop(&mut self) { - let mut stack = Vec::new(); - let mut current = match self.inner.get_mut() { - Some(inner) => inner.take().into_iter(), - None => return, - }; - loop { - while let Some(token) = current.next() { - let group = match token { - TokenTree::Group(group) => group.inner, - _ => continue, - }; - #[cfg(wrap_proc_macro)] - let group = match group { - crate::imp::Group::Fallback(group) => group, - crate::imp::Group::Compiler(_) => continue, - }; - let mut group = group; - if let Some(inner) = group.stream.inner.get_mut() { - stack.push(current); - current = inner.take().into_iter(); - } - } - match stack.pop() { - Some(next) => current = next, - None => return, - } - } - } -} - -pub(crate) struct TokenStreamBuilder { - inner: RcVecBuilder<TokenTree>, -} - -impl TokenStreamBuilder { - pub(crate) fn new() -> Self { - TokenStreamBuilder { - inner: RcVecBuilder::new(), - } - } - - pub(crate) fn with_capacity(cap: usize) -> Self { - TokenStreamBuilder { - inner: RcVecBuilder::with_capacity(cap), - } - } - - pub(crate) fn push_token_from_parser(&mut self, tt: TokenTree) { - self.inner.push(tt); - } - - pub(crate) fn build(self) -> TokenStream { - TokenStream { - inner: self.inner.build(), - } - } -} - -#[cfg(span_locations)] -fn get_cursor(src: &str) -> Cursor { - #[cfg(fuzzing)] - return Cursor { rest: src, off: 1 }; - - // Create a dummy file & add it to the source map - #[cfg(not(fuzzing))] - SOURCE_MAP.with(|sm| { - let mut sm = sm.borrow_mut(); - let span = sm.add_file(src); - Cursor { - rest: src, - off: span.lo, - } - }) -} - -#[cfg(not(span_locations))] -fn get_cursor(src: &str) -> Cursor { - Cursor { rest: src } -} - -impl Display for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("cannot parse string into token stream") - } -} - -impl Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut joint = false; - for (i, tt) in self.inner.iter().enumerate() { - if i != 0 && !joint { - write!(f, " ")?; - } - joint = false; - match tt { - TokenTree::Group(tt) => Display::fmt(tt, f), - TokenTree::Ident(tt) => Display::fmt(tt, f), - TokenTree::Punct(tt) => { - joint = tt.spacing() == Spacing::Joint; - Display::fmt(tt, f) - } - TokenTree::Literal(tt) => Display::fmt(tt, f), - }?; - } - - Ok(()) - } -} - -impl Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("TokenStream ")?; - f.debug_list().entries(self.clone()).finish() - } -} - -#[cfg(feature = "proc-macro")] -impl From<proc_macro::TokenStream> for TokenStream { - fn from(inner: proc_macro::TokenStream) -> Self { - TokenStream::from_str_unchecked(&inner.to_string()) - } -} - -#[cfg(feature = "proc-macro")] -impl From<TokenStream> for proc_macro::TokenStream { - fn from(inner: TokenStream) -> Self { - proc_macro::TokenStream::from_str_unchecked(&inner.to_string()) - } -} - -impl From<TokenTree> for TokenStream { - fn from(tree: TokenTree) -> Self { - let mut stream = RcVecBuilder::new(); - push_token_from_proc_macro(stream.as_mut(), tree); - TokenStream { - inner: stream.build(), - } - } -} - -impl FromIterator<TokenTree> for TokenStream { - fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self { - let mut stream = TokenStream::new(); - stream.extend(tokens); - stream - } -} - -impl FromIterator<TokenStream> for TokenStream { - fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { - let mut v = RcVecBuilder::new(); - - for stream in streams { - v.extend(stream.take_inner()); - } - - TokenStream { inner: v.build() } - } -} - -impl Extend<TokenTree> for TokenStream { - fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) { - let mut vec = self.inner.make_mut(); - tokens - .into_iter() - .for_each(|token| push_token_from_proc_macro(vec.as_mut(), token)); - } -} - -impl Extend<TokenStream> for TokenStream { - fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { - self.inner.make_mut().extend(streams.into_iter().flatten()); - } -} - -pub(crate) type TokenTreeIter = RcVecIntoIter<TokenTree>; - -impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = TokenTreeIter; - - fn into_iter(self) -> TokenTreeIter { - self.take_inner().into_iter() - } -} - -#[cfg(all(span_locations, not(fuzzing)))] -thread_local! { - static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap { - // Start with a single dummy file which all call_site() and def_site() - // spans reference. - files: vec![FileInfo { - source_text: String::new(), - span: Span { lo: 0, hi: 0 }, - lines: vec![0], - char_index_to_byte_offset: BTreeMap::new(), - }], - }); -} - -#[cfg(span_locations)] -pub(crate) fn invalidate_current_thread_spans() { - #[cfg(not(fuzzing))] - SOURCE_MAP.with(|sm| sm.borrow_mut().files.truncate(1)); -} - -#[cfg(all(span_locations, not(fuzzing)))] -struct FileInfo { - source_text: String, - span: Span, - lines: Vec<usize>, - char_index_to_byte_offset: BTreeMap<usize, usize>, -} - -#[cfg(all(span_locations, not(fuzzing)))] -impl FileInfo { - fn offset_line_column(&self, offset: usize) -> LineColumn { - assert!(self.span_within(Span { - lo: offset as u32, - hi: offset as u32, - })); - let offset = offset - self.span.lo as usize; - match self.lines.binary_search(&offset) { - Ok(found) => LineColumn { - line: found + 1, - column: 0, - }, - Err(idx) => LineColumn { - line: idx, - column: offset - self.lines[idx - 1], - }, - } - } - - fn span_within(&self, span: Span) -> bool { - span.lo >= self.span.lo && span.hi <= self.span.hi - } - - fn byte_range(&mut self, span: Span) -> Range<usize> { - let lo_char = (span.lo - self.span.lo) as usize; - - // Look up offset of the largest already-computed char index that is - // less than or equal to the current requested one. We resume counting - // chars from that point. - let (&last_char_index, &last_byte_offset) = self - .char_index_to_byte_offset - .range(..=lo_char) - .next_back() - .unwrap_or((&0, &0)); - - let lo_byte = if last_char_index == lo_char { - last_byte_offset - } else { - let total_byte_offset = match self.source_text[last_byte_offset..] - .char_indices() - .nth(lo_char - last_char_index) - { - Some((additional_offset, _ch)) => last_byte_offset + additional_offset, - None => self.source_text.len(), - }; - self.char_index_to_byte_offset - .insert(lo_char, total_byte_offset); - total_byte_offset - }; - - let trunc_lo = &self.source_text[lo_byte..]; - let char_len = (span.hi - span.lo) as usize; - lo_byte..match trunc_lo.char_indices().nth(char_len) { - Some((offset, _ch)) => lo_byte + offset, - None => self.source_text.len(), - } - } - - fn source_text(&mut self, span: Span) -> String { - let byte_range = self.byte_range(span); - self.source_text[byte_range].to_owned() - } -} - -/// Computes the offsets of each line in the given source string -/// and the total number of characters -#[cfg(all(span_locations, not(fuzzing)))] -fn lines_offsets(s: &str) -> (usize, Vec<usize>) { - let mut lines = vec![0]; - let mut total = 0; - - for ch in s.chars() { - total += 1; - if ch == '\n' { - lines.push(total); - } - } - - (total, lines) -} - -#[cfg(all(span_locations, not(fuzzing)))] -struct SourceMap { - files: Vec<FileInfo>, -} - -#[cfg(all(span_locations, not(fuzzing)))] -impl SourceMap { - fn next_start_pos(&self) -> u32 { - // Add 1 so there's always space between files. - // - // We'll always have at least 1 file, as we initialize our files list - // with a dummy file. - self.files.last().unwrap().span.hi + 1 - } - - fn add_file(&mut self, src: &str) -> Span { - let (len, lines) = lines_offsets(src); - let lo = self.next_start_pos(); - let span = Span { - lo, - hi: lo + (len as u32), - }; - - self.files.push(FileInfo { - source_text: src.to_owned(), - span, - lines, - // Populated lazily by source_text(). - char_index_to_byte_offset: BTreeMap::new(), - }); - - span - } - - #[cfg(procmacro2_semver_exempt)] - fn filepath(&self, span: Span) -> String { - for (i, file) in self.files.iter().enumerate() { - if file.span_within(span) { - return if i == 0 { - "<unspecified>".to_owned() - } else { - format!("<parsed string {}>", i) - }; - } - } - unreachable!("Invalid span with no related FileInfo!"); - } - - fn fileinfo(&self, span: Span) -> &FileInfo { - for file in &self.files { - if file.span_within(span) { - return file; - } - } - unreachable!("Invalid span with no related FileInfo!"); - } - - fn fileinfo_mut(&mut self, span: Span) -> &mut FileInfo { - for file in &mut self.files { - if file.span_within(span) { - return file; - } - } - unreachable!("Invalid span with no related FileInfo!"); - } -} - -#[derive(Clone, Copy, PartialEq, Eq)] -pub(crate) struct Span { - #[cfg(span_locations)] - pub(crate) lo: u32, - #[cfg(span_locations)] - pub(crate) hi: u32, -} - -impl Span { - #[cfg(not(span_locations))] - pub(crate) fn call_site() -> Self { - Span {} - } - - #[cfg(span_locations)] - pub(crate) fn call_site() -> Self { - Span { lo: 0, hi: 0 } - } - - pub(crate) fn mixed_site() -> Self { - Span::call_site() - } - - #[cfg(procmacro2_semver_exempt)] - pub(crate) fn def_site() -> Self { - Span::call_site() - } - - pub(crate) fn resolved_at(&self, _other: Span) -> Span { - // Stable spans consist only of line/column information, so - // `resolved_at` and `located_at` only select which span the - // caller wants line/column information from. - *self - } - - pub(crate) fn located_at(&self, other: Span) -> Span { - other - } - - #[cfg(span_locations)] - pub(crate) fn byte_range(&self) -> Range<usize> { - #[cfg(fuzzing)] - return 0..0; - - #[cfg(not(fuzzing))] - { - if self.is_call_site() { - 0..0 - } else { - SOURCE_MAP.with(|sm| sm.borrow_mut().fileinfo_mut(*self).byte_range(*self)) - } - } - } - - #[cfg(span_locations)] - pub(crate) fn start(&self) -> LineColumn { - #[cfg(fuzzing)] - return LineColumn { line: 0, column: 0 }; - - #[cfg(not(fuzzing))] - SOURCE_MAP.with(|sm| { - let sm = sm.borrow(); - let fi = sm.fileinfo(*self); - fi.offset_line_column(self.lo as usize) - }) - } - - #[cfg(span_locations)] - pub(crate) fn end(&self) -> LineColumn { - #[cfg(fuzzing)] - return LineColumn { line: 0, column: 0 }; - - #[cfg(not(fuzzing))] - SOURCE_MAP.with(|sm| { - let sm = sm.borrow(); - let fi = sm.fileinfo(*self); - fi.offset_line_column(self.hi as usize) - }) - } - - #[cfg(procmacro2_semver_exempt)] - pub(crate) fn file(&self) -> String { - #[cfg(fuzzing)] - return "<unspecified>".to_owned(); - - #[cfg(not(fuzzing))] - SOURCE_MAP.with(|sm| { - let sm = sm.borrow(); - sm.filepath(*self) - }) - } - - #[cfg(procmacro2_semver_exempt)] - pub(crate) fn local_file(&self) -> Option<PathBuf> { - None - } - - #[cfg(not(span_locations))] - pub(crate) fn join(&self, _other: Span) -> Option<Span> { - Some(Span {}) - } - - #[cfg(span_locations)] - pub(crate) fn join(&self, other: Span) -> Option<Span> { - #[cfg(fuzzing)] - return { - let _ = other; - None - }; - - #[cfg(not(fuzzing))] - SOURCE_MAP.with(|sm| { - let sm = sm.borrow(); - // If `other` is not within the same FileInfo as us, return None. - if !sm.fileinfo(*self).span_within(other) { - return None; - } - Some(Span { - lo: cmp::min(self.lo, other.lo), - hi: cmp::max(self.hi, other.hi), - }) - }) - } - - #[cfg(not(span_locations))] - pub(crate) fn source_text(&self) -> Option<String> { - None - } - - #[cfg(span_locations)] - pub(crate) fn source_text(&self) -> Option<String> { - #[cfg(fuzzing)] - return None; - - #[cfg(not(fuzzing))] - { - if self.is_call_site() { - None - } else { - Some(SOURCE_MAP.with(|sm| sm.borrow_mut().fileinfo_mut(*self).source_text(*self))) - } - } - } - - #[cfg(not(span_locations))] - pub(crate) fn first_byte(self) -> Self { - self - } - - #[cfg(span_locations)] - pub(crate) fn first_byte(self) -> Self { - Span { - lo: self.lo, - hi: cmp::min(self.lo.saturating_add(1), self.hi), - } - } - - #[cfg(not(span_locations))] - pub(crate) fn last_byte(self) -> Self { - self - } - - #[cfg(span_locations)] - pub(crate) fn last_byte(self) -> Self { - Span { - lo: cmp::max(self.hi.saturating_sub(1), self.lo), - hi: self.hi, - } - } - - #[cfg(span_locations)] - fn is_call_site(&self) -> bool { - self.lo == 0 && self.hi == 0 - } -} - -impl Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - #[cfg(span_locations)] - return write!(f, "bytes({}..{})", self.lo, self.hi); - - #[cfg(not(span_locations))] - write!(f, "Span") - } -} - -pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) { - #[cfg(span_locations)] - { - if span.is_call_site() { - return; - } - } - - if cfg!(span_locations) { - debug.field("span", &span); - } -} - -#[derive(Clone)] -pub(crate) struct Group { - delimiter: Delimiter, - stream: TokenStream, - span: Span, -} - -impl Group { - pub(crate) fn new(delimiter: Delimiter, stream: TokenStream) -> Self { - Group { - delimiter, - stream, - span: Span::call_site(), - } - } - - pub(crate) fn delimiter(&self) -> Delimiter { - self.delimiter - } - - pub(crate) fn stream(&self) -> TokenStream { - self.stream.clone() - } - - pub(crate) fn span(&self) -> Span { - self.span - } - - pub(crate) fn span_open(&self) -> Span { - self.span.first_byte() - } - - pub(crate) fn span_close(&self) -> Span { - self.span.last_byte() - } - - pub(crate) fn set_span(&mut self, span: Span) { - self.span = span; - } -} - -impl Display for Group { - // We attempt to match libproc_macro's formatting. - // Empty parens: () - // Nonempty parens: (...) - // Empty brackets: [] - // Nonempty brackets: [...] - // Empty braces: { } - // Nonempty braces: { ... } - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let (open, close) = match self.delimiter { - Delimiter::Parenthesis => ("(", ")"), - Delimiter::Brace => ("{ ", "}"), - Delimiter::Bracket => ("[", "]"), - Delimiter::None => ("", ""), - }; - - f.write_str(open)?; - Display::fmt(&self.stream, f)?; - if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() { - f.write_str(" ")?; - } - f.write_str(close)?; - - Ok(()) - } -} - -impl Debug for Group { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut debug = fmt.debug_struct("Group"); - debug.field("delimiter", &self.delimiter); - debug.field("stream", &self.stream); - debug_span_field_if_nontrivial(&mut debug, self.span); - debug.finish() - } -} - -#[derive(Clone)] -pub(crate) struct Ident { - sym: Box<str>, - span: Span, - raw: bool, -} - -impl Ident { - #[track_caller] - pub(crate) fn new_checked(string: &str, span: Span) -> Self { - validate_ident(string); - Ident::new_unchecked(string, span) - } - - pub(crate) fn new_unchecked(string: &str, span: Span) -> Self { - Ident { - sym: Box::from(string), - span, - raw: false, - } - } - - #[track_caller] - pub(crate) fn new_raw_checked(string: &str, span: Span) -> Self { - validate_ident_raw(string); - Ident::new_raw_unchecked(string, span) - } - - pub(crate) fn new_raw_unchecked(string: &str, span: Span) -> Self { - Ident { - sym: Box::from(string), - span, - raw: true, - } - } - - pub(crate) fn span(&self) -> Span { - self.span - } - - pub(crate) fn set_span(&mut self, span: Span) { - self.span = span; - } -} - -pub(crate) fn is_ident_start(c: char) -> bool { - c == '_' || unicode_ident::is_xid_start(c) -} - -pub(crate) fn is_ident_continue(c: char) -> bool { - unicode_ident::is_xid_continue(c) -} - -#[track_caller] -fn validate_ident(string: &str) { - if string.is_empty() { - panic!("Ident is not allowed to be empty; use Option<Ident>"); - } - - if string.bytes().all(|digit| b'0' <= digit && digit <= b'9') { - panic!("Ident cannot be a number; use Literal instead"); - } - - fn ident_ok(string: &str) -> bool { - let mut chars = string.chars(); - let first = chars.next().unwrap(); - if !is_ident_start(first) { - return false; - } - for ch in chars { - if !is_ident_continue(ch) { - return false; - } - } - true - } - - if !ident_ok(string) { - panic!("{:?} is not a valid Ident", string); - } -} - -#[track_caller] -fn validate_ident_raw(string: &str) { - validate_ident(string); - - match string { - "_" | "super" | "self" | "Self" | "crate" => { - panic!("`r#{}` cannot be a raw identifier", string); - } - _ => {} - } -} - -impl PartialEq for Ident { - fn eq(&self, other: &Ident) -> bool { - self.sym == other.sym && self.raw == other.raw - } -} - -impl<T> PartialEq<T> for Ident -where - T: ?Sized + AsRef<str>, -{ - fn eq(&self, other: &T) -> bool { - let other = other.as_ref(); - if self.raw { - other.starts_with("r#") && *self.sym == other[2..] - } else { - *self.sym == *other - } - } -} - -impl Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if self.raw { - f.write_str("r#")?; - } - Display::fmt(&self.sym, f) - } -} - -#[allow(clippy::missing_fields_in_debug)] -impl Debug for Ident { - // Ident(proc_macro), Ident(r#union) - #[cfg(not(span_locations))] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut debug = f.debug_tuple("Ident"); - debug.field(&format_args!("{}", self)); - debug.finish() - } - - // Ident { - // sym: proc_macro, - // span: bytes(128..138) - // } - #[cfg(span_locations)] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut debug = f.debug_struct("Ident"); - debug.field("sym", &format_args!("{}", self)); - debug_span_field_if_nontrivial(&mut debug, self.span); - debug.finish() - } -} - -#[derive(Clone)] -pub(crate) struct Literal { - pub(crate) repr: String, - span: Span, -} - -macro_rules! suffixed_numbers { - ($($name:ident => $kind:ident,)*) => ($( - pub(crate) fn $name(n: $kind) -> Literal { - Literal::_new(format!(concat!("{}", stringify!($kind)), n)) - } - )*) -} - -macro_rules! unsuffixed_numbers { - ($($name:ident => $kind:ident,)*) => ($( - pub(crate) fn $name(n: $kind) -> Literal { - Literal::_new(n.to_string()) - } - )*) -} - -impl Literal { - pub(crate) fn _new(repr: String) -> Self { - Literal { - repr, - span: Span::call_site(), - } - } - - pub(crate) fn from_str_checked(repr: &str) -> Result<Self, LexError> { - let mut cursor = get_cursor(repr); - #[cfg(span_locations)] - let lo = cursor.off; - - let negative = cursor.starts_with_char('-'); - if negative { - cursor = cursor.advance(1); - if !cursor.starts_with_fn(|ch| ch.is_ascii_digit()) { - return Err(LexError::call_site()); - } - } - - if let Ok((rest, mut literal)) = parse::literal(cursor) { - if rest.is_empty() { - if negative { - literal.repr.insert(0, '-'); - } - literal.span = Span { - #[cfg(span_locations)] - lo, - #[cfg(span_locations)] - hi: rest.off, - }; - return Ok(literal); - } - } - Err(LexError::call_site()) - } - - pub(crate) unsafe fn from_str_unchecked(repr: &str) -> Self { - Literal::_new(repr.to_owned()) - } - - suffixed_numbers! { - u8_suffixed => u8, - u16_suffixed => u16, - u32_suffixed => u32, - u64_suffixed => u64, - u128_suffixed => u128, - usize_suffixed => usize, - i8_suffixed => i8, - i16_suffixed => i16, - i32_suffixed => i32, - i64_suffixed => i64, - i128_suffixed => i128, - isize_suffixed => isize, - - f32_suffixed => f32, - f64_suffixed => f64, - } - - unsuffixed_numbers! { - u8_unsuffixed => u8, - u16_unsuffixed => u16, - u32_unsuffixed => u32, - u64_unsuffixed => u64, - u128_unsuffixed => u128, - usize_unsuffixed => usize, - i8_unsuffixed => i8, - i16_unsuffixed => i16, - i32_unsuffixed => i32, - i64_unsuffixed => i64, - i128_unsuffixed => i128, - isize_unsuffixed => isize, - } - - pub(crate) fn f32_unsuffixed(f: f32) -> Literal { - let mut s = f.to_string(); - if !s.contains('.') { - s.push_str(".0"); - } - Literal::_new(s) - } - - pub(crate) fn f64_unsuffixed(f: f64) -> Literal { - let mut s = f.to_string(); - if !s.contains('.') { - s.push_str(".0"); - } - Literal::_new(s) - } - - pub(crate) fn string(string: &str) -> Literal { - let mut repr = String::with_capacity(string.len() + 2); - repr.push('"'); - escape_utf8(string, &mut repr); - repr.push('"'); - Literal::_new(repr) - } - - pub(crate) fn character(ch: char) -> Literal { - let mut repr = String::new(); - repr.push('\''); - if ch == '"' { - // escape_debug turns this into '\"' which is unnecessary. - repr.push(ch); - } else { - repr.extend(ch.escape_debug()); - } - repr.push('\''); - Literal::_new(repr) - } - - pub(crate) fn byte_character(byte: u8) -> Literal { - let mut repr = "b'".to_string(); - #[allow(clippy::match_overlapping_arm)] - match byte { - b'\0' => repr.push_str(r"\0"), - b'\t' => repr.push_str(r"\t"), - b'\n' => repr.push_str(r"\n"), - b'\r' => repr.push_str(r"\r"), - b'\'' => repr.push_str(r"\'"), - b'\\' => repr.push_str(r"\\"), - b'\x20'..=b'\x7E' => repr.push(byte as char), - _ => { - let _ = write!(repr, r"\x{:02X}", byte); - } - } - repr.push('\''); - Literal::_new(repr) - } - - pub(crate) fn byte_string(bytes: &[u8]) -> Literal { - let mut repr = "b\"".to_string(); - let mut bytes = bytes.iter(); - while let Some(&b) = bytes.next() { - #[allow(clippy::match_overlapping_arm)] - match b { - b'\0' => repr.push_str(match bytes.as_slice().first() { - // circumvent clippy::octal_escapes lint - Some(b'0'..=b'7') => r"\x00", - _ => r"\0", - }), - b'\t' => repr.push_str(r"\t"), - b'\n' => repr.push_str(r"\n"), - b'\r' => repr.push_str(r"\r"), - b'"' => repr.push_str("\\\""), - b'\\' => repr.push_str(r"\\"), - b'\x20'..=b'\x7E' => repr.push(b as char), - _ => { - let _ = write!(repr, r"\x{:02X}", b); - } - } - } - repr.push('"'); - Literal::_new(repr) - } - - pub(crate) fn c_string(string: &CStr) -> Literal { - let mut repr = "c\"".to_string(); - let mut bytes = string.to_bytes(); - while !bytes.is_empty() { - let (valid, invalid) = match str::from_utf8(bytes) { - Ok(all_valid) => { - bytes = b""; - (all_valid, bytes) - } - Err(utf8_error) => { - let (valid, rest) = bytes.split_at(utf8_error.valid_up_to()); - let valid = str::from_utf8(valid).unwrap(); - let invalid = utf8_error - .error_len() - .map_or(rest, |error_len| &rest[..error_len]); - bytes = &bytes[valid.len() + invalid.len()..]; - (valid, invalid) - } - }; - escape_utf8(valid, &mut repr); - for &byte in invalid { - let _ = write!(repr, r"\x{:02X}", byte); - } - } - repr.push('"'); - Literal::_new(repr) - } - - pub(crate) fn span(&self) -> Span { - self.span - } - - pub(crate) fn set_span(&mut self, span: Span) { - self.span = span; - } - - pub(crate) fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> { - #[cfg(not(span_locations))] - { - let _ = range; - None - } - - #[cfg(span_locations)] - { - use core::ops::Bound; - - let lo = match range.start_bound() { - Bound::Included(start) => { - let start = u32::try_from(*start).ok()?; - self.span.lo.checked_add(start)? - } - Bound::Excluded(start) => { - let start = u32::try_from(*start).ok()?; - self.span.lo.checked_add(start)?.checked_add(1)? - } - Bound::Unbounded => self.span.lo, - }; - let hi = match range.end_bound() { - Bound::Included(end) => { - let end = u32::try_from(*end).ok()?; - self.span.lo.checked_add(end)?.checked_add(1)? - } - Bound::Excluded(end) => { - let end = u32::try_from(*end).ok()?; - self.span.lo.checked_add(end)? - } - Bound::Unbounded => self.span.hi, - }; - if lo <= hi && hi <= self.span.hi { - Some(Span { lo, hi }) - } else { - None - } - } - } -} - -impl Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.repr, f) - } -} - -impl Debug for Literal { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut debug = fmt.debug_struct("Literal"); - debug.field("lit", &format_args!("{}", self.repr)); - debug_span_field_if_nontrivial(&mut debug, self.span); - debug.finish() - } -} - -fn escape_utf8(string: &str, repr: &mut String) { - let mut chars = string.chars(); - while let Some(ch) = chars.next() { - if ch == '\0' { - repr.push_str( - if chars - .as_str() - .starts_with(|next| '0' <= next && next <= '7') - { - // circumvent clippy::octal_escapes lint - r"\x00" - } else { - r"\0" - }, - ); - } else if ch == '\'' { - // escape_debug turns this into "\'" which is unnecessary. - repr.push(ch); - } else { - repr.extend(ch.escape_debug()); - } - } -} - -#[cfg(feature = "proc-macro")] -pub(crate) trait FromStr2: FromStr<Err = proc_macro::LexError> { - #[cfg(wrap_proc_macro)] - fn valid(src: &str) -> bool; - - #[cfg(wrap_proc_macro)] - fn from_str_checked(src: &str) -> Result<Self, imp::LexError> { - // Validate using fallback parser, because rustc is incapable of - // returning a recoverable Err for certain invalid token streams, and - // will instead permanently poison the compilation. - if !Self::valid(src) { - return Err(imp::LexError::CompilerPanic); - } - - // Catch panic to work around https://github.com/rust-lang/rust/issues/58736. - match panic::catch_unwind(|| Self::from_str(src)) { - Ok(Ok(ok)) => Ok(ok), - Ok(Err(lex)) => Err(imp::LexError::Compiler(lex)), - Err(_panic) => Err(imp::LexError::CompilerPanic), - } - } - - fn from_str_unchecked(src: &str) -> Self { - Self::from_str(src).unwrap() - } -} - -#[cfg(feature = "proc-macro")] -impl FromStr2 for proc_macro::TokenStream { - #[cfg(wrap_proc_macro)] - fn valid(src: &str) -> bool { - TokenStream::from_str_checked(src).is_ok() - } -} - -#[cfg(feature = "proc-macro")] -impl FromStr2 for proc_macro::Literal { - #[cfg(wrap_proc_macro)] - fn valid(src: &str) -> bool { - Literal::from_str_checked(src).is_ok() - } -} diff --git a/vendor/proc-macro2/src/lib.rs b/vendor/proc-macro2/src/lib.rs deleted file mode 100644 index 6f830377..00000000 --- a/vendor/proc-macro2/src/lib.rs +++ /dev/null @@ -1,1351 +0,0 @@ -//! [![github]](https://github.com/dtolnay/proc-macro2) [![crates-io]](https://crates.io/crates/proc-macro2) [![docs-rs]](crate) -//! -//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github -//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust -//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs -//! -//! <br> -//! -//! A wrapper around the procedural macro API of the compiler's [`proc_macro`] -//! crate. This library serves two purposes: -//! -//! - **Bring proc-macro-like functionality to other contexts like build.rs and -//! main.rs.** Types from `proc_macro` are entirely specific to procedural -//! macros and cannot ever exist in code outside of a procedural macro. -//! Meanwhile `proc_macro2` types may exist anywhere including non-macro code. -//! By developing foundational libraries like [syn] and [quote] against -//! `proc_macro2` rather than `proc_macro`, the procedural macro ecosystem -//! becomes easily applicable to many other use cases and we avoid -//! reimplementing non-macro equivalents of those libraries. -//! -//! - **Make procedural macros unit testable.** As a consequence of being -//! specific to procedural macros, nothing that uses `proc_macro` can be -//! executed from a unit test. In order for helper libraries or components of -//! a macro to be testable in isolation, they must be implemented using -//! `proc_macro2`. -//! -//! [syn]: https://github.com/dtolnay/syn -//! [quote]: https://github.com/dtolnay/quote -//! -//! # Usage -//! -//! The skeleton of a typical procedural macro typically looks like this: -//! -//! ``` -//! extern crate proc_macro; -//! -//! # const IGNORE: &str = stringify! { -//! #[proc_macro_derive(MyDerive)] -//! # }; -//! # #[cfg(wrap_proc_macro)] -//! pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { -//! let input = proc_macro2::TokenStream::from(input); -//! -//! let output: proc_macro2::TokenStream = { -//! /* transform input */ -//! # input -//! }; -//! -//! proc_macro::TokenStream::from(output) -//! } -//! ``` -//! -//! If parsing with [Syn], you'll use [`parse_macro_input!`] instead to -//! propagate parse errors correctly back to the compiler when parsing fails. -//! -//! [`parse_macro_input!`]: https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html -//! -//! # Unstable features -//! -//! The default feature set of proc-macro2 tracks the most recent stable -//! compiler API. Functionality in `proc_macro` that is not yet stable is not -//! exposed by proc-macro2 by default. -//! -//! To opt into the additional APIs available in the most recent nightly -//! compiler, the `procmacro2_semver_exempt` config flag must be passed to -//! rustc. We will polyfill those nightly-only APIs back to Rust 1.56.0. As -//! these are unstable APIs that track the nightly compiler, minor versions of -//! proc-macro2 may make breaking changes to them at any time. -//! -//! ```sh -//! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build -//! ``` -//! -//! Note that this must not only be done for your crate, but for any crate that -//! depends on your crate. This infectious nature is intentional, as it serves -//! as a reminder that you are outside of the normal semver guarantees. -//! -//! Semver exempt methods are marked as such in the proc-macro2 documentation. -//! -//! # Thread-Safety -//! -//! Most types in this crate are `!Sync` because the underlying compiler -//! types make use of thread-local memory, meaning they cannot be accessed from -//! a different thread. - -// Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.95")] -#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))] -#![cfg_attr(super_unstable, feature(proc_macro_def_site))] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![deny(unsafe_op_in_unsafe_fn)] -#![allow( - clippy::cast_lossless, - clippy::cast_possible_truncation, - clippy::checked_conversions, - clippy::doc_markdown, - clippy::elidable_lifetime_names, - clippy::incompatible_msrv, - clippy::items_after_statements, - clippy::iter_without_into_iter, - clippy::let_underscore_untyped, - clippy::manual_assert, - clippy::manual_range_contains, - clippy::missing_panics_doc, - clippy::missing_safety_doc, - clippy::must_use_candidate, - clippy::needless_doctest_main, - clippy::needless_lifetimes, - clippy::new_without_default, - clippy::return_self_not_must_use, - clippy::shadow_unrelated, - clippy::trivially_copy_pass_by_ref, - clippy::unnecessary_wraps, - clippy::unused_self, - clippy::used_underscore_binding, - clippy::vec_init_then_push -)] - -#[cfg(all(procmacro2_semver_exempt, wrap_proc_macro, not(super_unstable)))] -compile_error! {"\ - Something is not right. If you've tried to turn on \ - procmacro2_semver_exempt, you need to ensure that it \ - is turned on for the compilation of the proc-macro2 \ - build script as well. -"} - -#[cfg(all( - procmacro2_nightly_testing, - feature = "proc-macro", - not(proc_macro_span) -))] -compile_error! {"\ - Build script probe failed to compile. -"} - -extern crate alloc; - -#[cfg(feature = "proc-macro")] -extern crate proc_macro; - -mod marker; -mod parse; -mod rcvec; - -#[cfg(wrap_proc_macro)] -mod detection; - -// Public for proc_macro2::fallback::force() and unforce(), but those are quite -// a niche use case so we omit it from rustdoc. -#[doc(hidden)] -pub mod fallback; - -pub mod extra; - -#[cfg(not(wrap_proc_macro))] -use crate::fallback as imp; -#[path = "wrapper.rs"] -#[cfg(wrap_proc_macro)] -mod imp; - -#[cfg(span_locations)] -mod location; - -use crate::extra::DelimSpan; -use crate::marker::{ProcMacroAutoTraits, MARKER}; -use core::cmp::Ordering; -use core::fmt::{self, Debug, Display}; -use core::hash::{Hash, Hasher}; -#[cfg(span_locations)] -use core::ops::Range; -use core::ops::RangeBounds; -use core::str::FromStr; -use std::error::Error; -use std::ffi::CStr; -#[cfg(procmacro2_semver_exempt)] -use std::path::PathBuf; - -#[cfg(span_locations)] -#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] -pub use crate::location::LineColumn; - -/// An abstract stream of tokens, or more concretely a sequence of token trees. -/// -/// This type provides interfaces for iterating over token trees and for -/// collecting token trees into one stream. -/// -/// Token stream is both the input and output of `#[proc_macro]`, -/// `#[proc_macro_attribute]` and `#[proc_macro_derive]` definitions. -#[derive(Clone)] -pub struct TokenStream { - inner: imp::TokenStream, - _marker: ProcMacroAutoTraits, -} - -/// Error returned from `TokenStream::from_str`. -pub struct LexError { - inner: imp::LexError, - _marker: ProcMacroAutoTraits, -} - -impl TokenStream { - fn _new(inner: imp::TokenStream) -> Self { - TokenStream { - inner, - _marker: MARKER, - } - } - - fn _new_fallback(inner: fallback::TokenStream) -> Self { - TokenStream { - inner: imp::TokenStream::from(inner), - _marker: MARKER, - } - } - - /// Returns an empty `TokenStream` containing no token trees. - pub fn new() -> Self { - TokenStream::_new(imp::TokenStream::new()) - } - - /// Checks if this `TokenStream` is empty. - pub fn is_empty(&self) -> bool { - self.inner.is_empty() - } -} - -/// `TokenStream::default()` returns an empty stream, -/// i.e. this is equivalent with `TokenStream::new()`. -impl Default for TokenStream { - fn default() -> Self { - TokenStream::new() - } -} - -/// Attempts to break the string into tokens and parse those tokens into a token -/// stream. -/// -/// May fail for a number of reasons, for example, if the string contains -/// unbalanced delimiters or characters not existing in the language. -/// -/// NOTE: Some errors may cause panics instead of returning `LexError`. We -/// reserve the right to change these errors into `LexError`s later. -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result<TokenStream, LexError> { - match imp::TokenStream::from_str_checked(src) { - Ok(tokens) => Ok(TokenStream::_new(tokens)), - Err(lex) => Err(LexError { - inner: lex, - _marker: MARKER, - }), - } - } -} - -#[cfg(feature = "proc-macro")] -#[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))] -impl From<proc_macro::TokenStream> for TokenStream { - fn from(inner: proc_macro::TokenStream) -> Self { - TokenStream::_new(imp::TokenStream::from(inner)) - } -} - -#[cfg(feature = "proc-macro")] -#[cfg_attr(docsrs, doc(cfg(feature = "proc-macro")))] -impl From<TokenStream> for proc_macro::TokenStream { - fn from(inner: TokenStream) -> Self { - proc_macro::TokenStream::from(inner.inner) - } -} - -impl From<TokenTree> for TokenStream { - fn from(token: TokenTree) -> Self { - TokenStream::_new(imp::TokenStream::from(token)) - } -} - -impl Extend<TokenTree> for TokenStream { - fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) { - self.inner.extend(streams); - } -} - -impl Extend<TokenStream> for TokenStream { - fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { - self.inner - .extend(streams.into_iter().map(|stream| stream.inner)); - } -} - -/// Collects a number of token trees into a single stream. -impl FromIterator<TokenTree> for TokenStream { - fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self { - TokenStream::_new(streams.into_iter().collect()) - } -} -impl FromIterator<TokenStream> for TokenStream { - fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { - TokenStream::_new(streams.into_iter().map(|i| i.inner).collect()) - } -} - -/// Prints the token stream as a string that is supposed to be losslessly -/// convertible back into the same token stream (modulo spans), except for -/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative -/// numeric literals. -impl Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, f) - } -} - -/// Prints token in a form convenient for debugging. -impl Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -impl LexError { - pub fn span(&self) -> Span { - Span::_new(self.inner.span()) - } -} - -impl Debug for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -impl Display for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, f) - } -} - -impl Error for LexError {} - -/// A region of source code, along with macro expansion information. -#[derive(Copy, Clone)] -pub struct Span { - inner: imp::Span, - _marker: ProcMacroAutoTraits, -} - -impl Span { - fn _new(inner: imp::Span) -> Self { - Span { - inner, - _marker: MARKER, - } - } - - fn _new_fallback(inner: fallback::Span) -> Self { - Span { - inner: imp::Span::from(inner), - _marker: MARKER, - } - } - - /// The span of the invocation of the current procedural macro. - /// - /// Identifiers created with this span will be resolved as if they were - /// written directly at the macro call location (call-site hygiene) and - /// other code at the macro call site will be able to refer to them as well. - pub fn call_site() -> Self { - Span::_new(imp::Span::call_site()) - } - - /// The span located at the invocation of the procedural macro, but with - /// local variables, labels, and `$crate` resolved at the definition site - /// of the macro. This is the same hygiene behavior as `macro_rules`. - pub fn mixed_site() -> Self { - Span::_new(imp::Span::mixed_site()) - } - - /// A span that resolves at the macro definition site. - /// - /// This method is semver exempt and not exposed by default. - #[cfg(procmacro2_semver_exempt)] - #[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))] - pub fn def_site() -> Self { - Span::_new(imp::Span::def_site()) - } - - /// Creates a new span with the same line/column information as `self` but - /// that resolves symbols as though it were at `other`. - pub fn resolved_at(&self, other: Span) -> Span { - Span::_new(self.inner.resolved_at(other.inner)) - } - - /// Creates a new span with the same name resolution behavior as `self` but - /// with the line/column information of `other`. - pub fn located_at(&self, other: Span) -> Span { - Span::_new(self.inner.located_at(other.inner)) - } - - /// Convert `proc_macro2::Span` to `proc_macro::Span`. - /// - /// This method is available when building with a nightly compiler, or when - /// building with rustc 1.29+ *without* semver exempt features. - /// - /// # Panics - /// - /// Panics if called from outside of a procedural macro. Unlike - /// `proc_macro2::Span`, the `proc_macro::Span` type can only exist within - /// the context of a procedural macro invocation. - #[cfg(wrap_proc_macro)] - pub fn unwrap(self) -> proc_macro::Span { - self.inner.unwrap() - } - - // Soft deprecated. Please use Span::unwrap. - #[cfg(wrap_proc_macro)] - #[doc(hidden)] - pub fn unstable(self) -> proc_macro::Span { - self.unwrap() - } - - /// Returns the span's byte position range in the source file. - /// - /// This method requires the `"span-locations"` feature to be enabled. - /// - /// When executing in a procedural macro context, the returned range is only - /// accurate if compiled with a nightly toolchain. The stable toolchain does - /// not have this information available. When executing outside of a - /// procedural macro, such as main.rs or build.rs, the byte range is always - /// accurate regardless of toolchain. - #[cfg(span_locations)] - #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] - pub fn byte_range(&self) -> Range<usize> { - self.inner.byte_range() - } - - /// Get the starting line/column in the source file for this span. - /// - /// This method requires the `"span-locations"` feature to be enabled. - /// - /// When executing in a procedural macro context, the returned line/column - /// are only meaningful if compiled with a nightly toolchain. The stable - /// toolchain does not have this information available. When executing - /// outside of a procedural macro, such as main.rs or build.rs, the - /// line/column are always meaningful regardless of toolchain. - #[cfg(span_locations)] - #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] - pub fn start(&self) -> LineColumn { - self.inner.start() - } - - /// Get the ending line/column in the source file for this span. - /// - /// This method requires the `"span-locations"` feature to be enabled. - /// - /// When executing in a procedural macro context, the returned line/column - /// are only meaningful if compiled with a nightly toolchain. The stable - /// toolchain does not have this information available. When executing - /// outside of a procedural macro, such as main.rs or build.rs, the - /// line/column are always meaningful regardless of toolchain. - #[cfg(span_locations)] - #[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] - pub fn end(&self) -> LineColumn { - self.inner.end() - } - - /// The path to the source file in which this span occurs, for display - /// purposes. - /// - /// This might not correspond to a valid file system path. It might be - /// remapped, or might be an artificial path such as `"<macro expansion>"`. - /// - /// This method is semver exempt and not exposed by default. - #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] - #[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))] - pub fn file(&self) -> String { - self.inner.file() - } - - /// The path to the source file in which this span occurs on disk. - /// - /// This is the actual path on disk. It is unaffected by path remapping. - /// - /// This path should not be embedded in the output of the macro; prefer - /// `file()` instead. - /// - /// This method is semver exempt and not exposed by default. - #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] - #[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))] - pub fn local_file(&self) -> Option<PathBuf> { - self.inner.local_file() - } - - /// Create a new span encompassing `self` and `other`. - /// - /// Returns `None` if `self` and `other` are from different files. - /// - /// Warning: the underlying [`proc_macro::Span::join`] method is - /// nightly-only. When called from within a procedural macro not using a - /// nightly compiler, this method will always return `None`. - pub fn join(&self, other: Span) -> Option<Span> { - self.inner.join(other.inner).map(Span::_new) - } - - /// Compares two spans to see if they're equal. - /// - /// This method is semver exempt and not exposed by default. - #[cfg(procmacro2_semver_exempt)] - #[cfg_attr(docsrs, doc(cfg(procmacro2_semver_exempt)))] - pub fn eq(&self, other: &Span) -> bool { - self.inner.eq(&other.inner) - } - - /// Returns the source text behind a span. This preserves the original - /// source code, including spaces and comments. It only returns a result if - /// the span corresponds to real source code. - /// - /// Note: The observable result of a macro should only rely on the tokens - /// and not on this source text. The result of this function is a best - /// effort to be used for diagnostics only. - pub fn source_text(&self) -> Option<String> { - self.inner.source_text() - } -} - -/// Prints a span in a form convenient for debugging. -impl Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`). -#[derive(Clone)] -pub enum TokenTree { - /// A token stream surrounded by bracket delimiters. - Group(Group), - /// An identifier. - Ident(Ident), - /// A single punctuation character (`+`, `,`, `$`, etc.). - Punct(Punct), - /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc. - Literal(Literal), -} - -impl TokenTree { - /// Returns the span of this tree, delegating to the `span` method of - /// the contained token or a delimited stream. - pub fn span(&self) -> Span { - match self { - TokenTree::Group(t) => t.span(), - TokenTree::Ident(t) => t.span(), - TokenTree::Punct(t) => t.span(), - TokenTree::Literal(t) => t.span(), - } - } - - /// Configures the span for *only this token*. - /// - /// Note that if this token is a `Group` then this method will not configure - /// the span of each of the internal tokens, this will simply delegate to - /// the `set_span` method of each variant. - pub fn set_span(&mut self, span: Span) { - match self { - TokenTree::Group(t) => t.set_span(span), - TokenTree::Ident(t) => t.set_span(span), - TokenTree::Punct(t) => t.set_span(span), - TokenTree::Literal(t) => t.set_span(span), - } - } -} - -impl From<Group> for TokenTree { - fn from(g: Group) -> Self { - TokenTree::Group(g) - } -} - -impl From<Ident> for TokenTree { - fn from(g: Ident) -> Self { - TokenTree::Ident(g) - } -} - -impl From<Punct> for TokenTree { - fn from(g: Punct) -> Self { - TokenTree::Punct(g) - } -} - -impl From<Literal> for TokenTree { - fn from(g: Literal) -> Self { - TokenTree::Literal(g) - } -} - -/// Prints the token tree as a string that is supposed to be losslessly -/// convertible back into the same token tree (modulo spans), except for -/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative -/// numeric literals. -impl Display for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - TokenTree::Group(t) => Display::fmt(t, f), - TokenTree::Ident(t) => Display::fmt(t, f), - TokenTree::Punct(t) => Display::fmt(t, f), - TokenTree::Literal(t) => Display::fmt(t, f), - } - } -} - -/// Prints token tree in a form convenient for debugging. -impl Debug for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // Each of these has the name in the struct type in the derived debug, - // so don't bother with an extra layer of indirection - match self { - TokenTree::Group(t) => Debug::fmt(t, f), - TokenTree::Ident(t) => { - let mut debug = f.debug_struct("Ident"); - debug.field("sym", &format_args!("{}", t)); - imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner); - debug.finish() - } - TokenTree::Punct(t) => Debug::fmt(t, f), - TokenTree::Literal(t) => Debug::fmt(t, f), - } - } -} - -/// A delimited token stream. -/// -/// A `Group` internally contains a `TokenStream` which is surrounded by -/// `Delimiter`s. -#[derive(Clone)] -pub struct Group { - inner: imp::Group, -} - -/// Describes how a sequence of token trees is delimited. -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Delimiter { - /// `( ... )` - Parenthesis, - /// `{ ... }` - Brace, - /// `[ ... ]` - Bracket, - /// `∅ ... ∅` - /// - /// An invisible delimiter, that may, for example, appear around tokens - /// coming from a "macro variable" `$var`. It is important to preserve - /// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`. - /// Invisible delimiters may not survive roundtrip of a token stream through - /// a string. - /// - /// <div class="warning"> - /// - /// Note: rustc currently can ignore the grouping of tokens delimited by `None` in the output - /// of a proc_macro. Only `None`-delimited groups created by a macro_rules macro in the input - /// of a proc_macro macro are preserved, and only in very specific circumstances. - /// Any `None`-delimited groups (re)created by a proc_macro will therefore not preserve - /// operator priorities as indicated above. The other `Delimiter` variants should be used - /// instead in this context. This is a rustc bug. For details, see - /// [rust-lang/rust#67062](https://github.com/rust-lang/rust/issues/67062). - /// - /// </div> - None, -} - -impl Group { - fn _new(inner: imp::Group) -> Self { - Group { inner } - } - - fn _new_fallback(inner: fallback::Group) -> Self { - Group { - inner: imp::Group::from(inner), - } - } - - /// Creates a new `Group` with the given delimiter and token stream. - /// - /// This constructor will set the span for this group to - /// `Span::call_site()`. To change the span you can use the `set_span` - /// method below. - pub fn new(delimiter: Delimiter, stream: TokenStream) -> Self { - Group { - inner: imp::Group::new(delimiter, stream.inner), - } - } - - /// Returns the punctuation used as the delimiter for this group: a set of - /// parentheses, square brackets, or curly braces. - pub fn delimiter(&self) -> Delimiter { - self.inner.delimiter() - } - - /// Returns the `TokenStream` of tokens that are delimited in this `Group`. - /// - /// Note that the returned token stream does not include the delimiter - /// returned above. - pub fn stream(&self) -> TokenStream { - TokenStream::_new(self.inner.stream()) - } - - /// Returns the span for the delimiters of this token stream, spanning the - /// entire `Group`. - /// - /// ```text - /// pub fn span(&self) -> Span { - /// ^^^^^^^ - /// ``` - pub fn span(&self) -> Span { - Span::_new(self.inner.span()) - } - - /// Returns the span pointing to the opening delimiter of this group. - /// - /// ```text - /// pub fn span_open(&self) -> Span { - /// ^ - /// ``` - pub fn span_open(&self) -> Span { - Span::_new(self.inner.span_open()) - } - - /// Returns the span pointing to the closing delimiter of this group. - /// - /// ```text - /// pub fn span_close(&self) -> Span { - /// ^ - /// ``` - pub fn span_close(&self) -> Span { - Span::_new(self.inner.span_close()) - } - - /// Returns an object that holds this group's `span_open()` and - /// `span_close()` together (in a more compact representation than holding - /// those 2 spans individually). - pub fn delim_span(&self) -> DelimSpan { - DelimSpan::new(&self.inner) - } - - /// Configures the span for this `Group`'s delimiters, but not its internal - /// tokens. - /// - /// This method will **not** set the span of all the internal tokens spanned - /// by this group, but rather it will only set the span of the delimiter - /// tokens at the level of the `Group`. - pub fn set_span(&mut self, span: Span) { - self.inner.set_span(span.inner); - } -} - -/// Prints the group as a string that should be losslessly convertible back -/// into the same group (modulo spans), except for possibly `TokenTree::Group`s -/// with `Delimiter::None` delimiters. -impl Display for Group { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, formatter) - } -} - -impl Debug for Group { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, formatter) - } -} - -/// A `Punct` is a single punctuation character like `+`, `-` or `#`. -/// -/// Multicharacter operators like `+=` are represented as two instances of -/// `Punct` with different forms of `Spacing` returned. -#[derive(Clone)] -pub struct Punct { - ch: char, - spacing: Spacing, - span: Span, -} - -/// Whether a `Punct` is followed immediately by another `Punct` or followed by -/// another token or whitespace. -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum Spacing { - /// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`. - Alone, - /// E.g. `+` is `Joint` in `+=` or `'` is `Joint` in `'#`. - /// - /// Additionally, single quote `'` can join with identifiers to form - /// lifetimes `'ident`. - Joint, -} - -impl Punct { - /// Creates a new `Punct` from the given character and spacing. - /// - /// The `ch` argument must be a valid punctuation character permitted by the - /// language, otherwise the function will panic. - /// - /// The returned `Punct` will have the default span of `Span::call_site()` - /// which can be further configured with the `set_span` method below. - pub fn new(ch: char, spacing: Spacing) -> Self { - if let '!' | '#' | '$' | '%' | '&' | '\'' | '*' | '+' | ',' | '-' | '.' | '/' | ':' | ';' - | '<' | '=' | '>' | '?' | '@' | '^' | '|' | '~' = ch - { - Punct { - ch, - spacing, - span: Span::call_site(), - } - } else { - panic!("unsupported proc macro punctuation character {:?}", ch); - } - } - - /// Returns the value of this punctuation character as `char`. - pub fn as_char(&self) -> char { - self.ch - } - - /// Returns the spacing of this punctuation character, indicating whether - /// it's immediately followed by another `Punct` in the token stream, so - /// they can potentially be combined into a multicharacter operator - /// (`Joint`), or it's followed by some other token or whitespace (`Alone`) - /// so the operator has certainly ended. - pub fn spacing(&self) -> Spacing { - self.spacing - } - - /// Returns the span for this punctuation character. - pub fn span(&self) -> Span { - self.span - } - - /// Configure the span for this punctuation character. - pub fn set_span(&mut self, span: Span) { - self.span = span; - } -} - -/// Prints the punctuation character as a string that should be losslessly -/// convertible back into the same character. -impl Display for Punct { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.ch, f) - } -} - -impl Debug for Punct { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut debug = fmt.debug_struct("Punct"); - debug.field("char", &self.ch); - debug.field("spacing", &self.spacing); - imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner); - debug.finish() - } -} - -/// A word of Rust code, which may be a keyword or legal variable name. -/// -/// An identifier consists of at least one Unicode code point, the first of -/// which has the XID_Start property and the rest of which have the XID_Continue -/// property. -/// -/// - The empty string is not an identifier. Use `Option<Ident>`. -/// - A lifetime is not an identifier. Use `syn::Lifetime` instead. -/// -/// An identifier constructed with `Ident::new` is permitted to be a Rust -/// keyword, though parsing one through its [`Parse`] implementation rejects -/// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the -/// behaviour of `Ident::new`. -/// -/// [`Parse`]: https://docs.rs/syn/2.0/syn/parse/trait.Parse.html -/// -/// # Examples -/// -/// A new ident can be created from a string using the `Ident::new` function. -/// A span must be provided explicitly which governs the name resolution -/// behavior of the resulting identifier. -/// -/// ``` -/// use proc_macro2::{Ident, Span}; -/// -/// fn main() { -/// let call_ident = Ident::new("calligraphy", Span::call_site()); -/// -/// println!("{}", call_ident); -/// } -/// ``` -/// -/// An ident can be interpolated into a token stream using the `quote!` macro. -/// -/// ``` -/// use proc_macro2::{Ident, Span}; -/// use quote::quote; -/// -/// fn main() { -/// let ident = Ident::new("demo", Span::call_site()); -/// -/// // Create a variable binding whose name is this ident. -/// let expanded = quote! { let #ident = 10; }; -/// -/// // Create a variable binding with a slightly different name. -/// let temp_ident = Ident::new(&format!("new_{}", ident), Span::call_site()); -/// let expanded = quote! { let #temp_ident = 10; }; -/// } -/// ``` -/// -/// A string representation of the ident is available through the `to_string()` -/// method. -/// -/// ``` -/// # use proc_macro2::{Ident, Span}; -/// # -/// # let ident = Ident::new("another_identifier", Span::call_site()); -/// # -/// // Examine the ident as a string. -/// let ident_string = ident.to_string(); -/// if ident_string.len() > 60 { -/// println!("Very long identifier: {}", ident_string) -/// } -/// ``` -#[derive(Clone)] -pub struct Ident { - inner: imp::Ident, - _marker: ProcMacroAutoTraits, -} - -impl Ident { - fn _new(inner: imp::Ident) -> Self { - Ident { - inner, - _marker: MARKER, - } - } - - fn _new_fallback(inner: fallback::Ident) -> Self { - Ident { - inner: imp::Ident::from(inner), - _marker: MARKER, - } - } - - /// Creates a new `Ident` with the given `string` as well as the specified - /// `span`. - /// - /// The `string` argument must be a valid identifier permitted by the - /// language, otherwise the function will panic. - /// - /// Note that `span`, currently in rustc, configures the hygiene information - /// for this identifier. - /// - /// As of this time `Span::call_site()` explicitly opts-in to "call-site" - /// hygiene meaning that identifiers created with this span will be resolved - /// as if they were written directly at the location of the macro call, and - /// other code at the macro call site will be able to refer to them as well. - /// - /// Later spans like `Span::def_site()` will allow to opt-in to - /// "definition-site" hygiene meaning that identifiers created with this - /// span will be resolved at the location of the macro definition and other - /// code at the macro call site will not be able to refer to them. - /// - /// Due to the current importance of hygiene this constructor, unlike other - /// tokens, requires a `Span` to be specified at construction. - /// - /// # Panics - /// - /// Panics if the input string is neither a keyword nor a legal variable - /// name. If you are not sure whether the string contains an identifier and - /// need to handle an error case, use - /// <a href="https://docs.rs/syn/2.0/syn/fn.parse_str.html"><code - /// style="padding-right:0;">syn::parse_str</code></a><code - /// style="padding-left:0;">::<Ident></code> - /// rather than `Ident::new`. - #[track_caller] - pub fn new(string: &str, span: Span) -> Self { - Ident::_new(imp::Ident::new_checked(string, span.inner)) - } - - /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). The - /// `string` argument must be a valid identifier permitted by the language - /// (including keywords, e.g. `fn`). Keywords which are usable in path - /// segments (e.g. `self`, `super`) are not supported, and will cause a - /// panic. - #[track_caller] - pub fn new_raw(string: &str, span: Span) -> Self { - Ident::_new(imp::Ident::new_raw_checked(string, span.inner)) - } - - /// Returns the span of this `Ident`. - pub fn span(&self) -> Span { - Span::_new(self.inner.span()) - } - - /// Configures the span of this `Ident`, possibly changing its hygiene - /// context. - pub fn set_span(&mut self, span: Span) { - self.inner.set_span(span.inner); - } -} - -impl PartialEq for Ident { - fn eq(&self, other: &Ident) -> bool { - self.inner == other.inner - } -} - -impl<T> PartialEq<T> for Ident -where - T: ?Sized + AsRef<str>, -{ - fn eq(&self, other: &T) -> bool { - self.inner == other - } -} - -impl Eq for Ident {} - -impl PartialOrd for Ident { - fn partial_cmp(&self, other: &Ident) -> Option<Ordering> { - Some(self.cmp(other)) - } -} - -impl Ord for Ident { - fn cmp(&self, other: &Ident) -> Ordering { - self.to_string().cmp(&other.to_string()) - } -} - -impl Hash for Ident { - fn hash<H: Hasher>(&self, hasher: &mut H) { - self.to_string().hash(hasher); - } -} - -/// Prints the identifier as a string that should be losslessly convertible back -/// into the same identifier. -impl Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, f) - } -} - -impl Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -/// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`), -/// byte character (`b'a'`), an integer or floating point number with or without -/// a suffix (`1`, `1u8`, `2.3`, `2.3f32`). -/// -/// Boolean literals like `true` and `false` do not belong here, they are -/// `Ident`s. -#[derive(Clone)] -pub struct Literal { - inner: imp::Literal, - _marker: ProcMacroAutoTraits, -} - -macro_rules! suffixed_int_literals { - ($($name:ident => $kind:ident,)*) => ($( - /// Creates a new suffixed integer literal with the specified value. - /// - /// This function will create an integer like `1u32` where the integer - /// value specified is the first part of the token and the integral is - /// also suffixed at the end. Literals created from negative numbers may - /// not survive roundtrips through `TokenStream` or strings and may be - /// broken into two tokens (`-` and positive literal). - /// - /// Literals created through this method have the `Span::call_site()` - /// span by default, which can be configured with the `set_span` method - /// below. - pub fn $name(n: $kind) -> Literal { - Literal::_new(imp::Literal::$name(n)) - } - )*) -} - -macro_rules! unsuffixed_int_literals { - ($($name:ident => $kind:ident,)*) => ($( - /// Creates a new unsuffixed integer literal with the specified value. - /// - /// This function will create an integer like `1` where the integer - /// value specified is the first part of the token. No suffix is - /// specified on this token, meaning that invocations like - /// `Literal::i8_unsuffixed(1)` are equivalent to - /// `Literal::u32_unsuffixed(1)`. Literals created from negative numbers - /// may not survive roundtrips through `TokenStream` or strings and may - /// be broken into two tokens (`-` and positive literal). - /// - /// Literals created through this method have the `Span::call_site()` - /// span by default, which can be configured with the `set_span` method - /// below. - pub fn $name(n: $kind) -> Literal { - Literal::_new(imp::Literal::$name(n)) - } - )*) -} - -impl Literal { - fn _new(inner: imp::Literal) -> Self { - Literal { - inner, - _marker: MARKER, - } - } - - fn _new_fallback(inner: fallback::Literal) -> Self { - Literal { - inner: imp::Literal::from(inner), - _marker: MARKER, - } - } - - suffixed_int_literals! { - u8_suffixed => u8, - u16_suffixed => u16, - u32_suffixed => u32, - u64_suffixed => u64, - u128_suffixed => u128, - usize_suffixed => usize, - i8_suffixed => i8, - i16_suffixed => i16, - i32_suffixed => i32, - i64_suffixed => i64, - i128_suffixed => i128, - isize_suffixed => isize, - } - - unsuffixed_int_literals! { - u8_unsuffixed => u8, - u16_unsuffixed => u16, - u32_unsuffixed => u32, - u64_unsuffixed => u64, - u128_unsuffixed => u128, - usize_unsuffixed => usize, - i8_unsuffixed => i8, - i16_unsuffixed => i16, - i32_unsuffixed => i32, - i64_unsuffixed => i64, - i128_unsuffixed => i128, - isize_unsuffixed => isize, - } - - /// Creates a new unsuffixed floating-point literal. - /// - /// This constructor is similar to those like `Literal::i8_unsuffixed` where - /// the float's value is emitted directly into the token but no suffix is - /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers may not survive round-trips - /// through `TokenStream` or strings and may be broken into two tokens (`-` - /// and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for example - /// if it is infinity or NaN this function will panic. - pub fn f64_unsuffixed(f: f64) -> Literal { - assert!(f.is_finite()); - Literal::_new(imp::Literal::f64_unsuffixed(f)) - } - - /// Creates a new suffixed floating-point literal. - /// - /// This constructor will create a literal like `1.0f64` where the value - /// specified is the preceding part of the token and `f64` is the suffix of - /// the token. This token will always be inferred to be an `f64` in the - /// compiler. Literals created from negative numbers may not survive - /// round-trips through `TokenStream` or strings and may be broken into two - /// tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for example - /// if it is infinity or NaN this function will panic. - pub fn f64_suffixed(f: f64) -> Literal { - assert!(f.is_finite()); - Literal::_new(imp::Literal::f64_suffixed(f)) - } - - /// Creates a new unsuffixed floating-point literal. - /// - /// This constructor is similar to those like `Literal::i8_unsuffixed` where - /// the float's value is emitted directly into the token but no suffix is - /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers may not survive round-trips - /// through `TokenStream` or strings and may be broken into two tokens (`-` - /// and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for example - /// if it is infinity or NaN this function will panic. - pub fn f32_unsuffixed(f: f32) -> Literal { - assert!(f.is_finite()); - Literal::_new(imp::Literal::f32_unsuffixed(f)) - } - - /// Creates a new suffixed floating-point literal. - /// - /// This constructor will create a literal like `1.0f32` where the value - /// specified is the preceding part of the token and `f32` is the suffix of - /// the token. This token will always be inferred to be an `f32` in the - /// compiler. Literals created from negative numbers may not survive - /// round-trips through `TokenStream` or strings and may be broken into two - /// tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for example - /// if it is infinity or NaN this function will panic. - pub fn f32_suffixed(f: f32) -> Literal { - assert!(f.is_finite()); - Literal::_new(imp::Literal::f32_suffixed(f)) - } - - /// String literal. - pub fn string(string: &str) -> Literal { - Literal::_new(imp::Literal::string(string)) - } - - /// Character literal. - pub fn character(ch: char) -> Literal { - Literal::_new(imp::Literal::character(ch)) - } - - /// Byte character literal. - pub fn byte_character(byte: u8) -> Literal { - Literal::_new(imp::Literal::byte_character(byte)) - } - - /// Byte string literal. - pub fn byte_string(bytes: &[u8]) -> Literal { - Literal::_new(imp::Literal::byte_string(bytes)) - } - - /// C string literal. - pub fn c_string(string: &CStr) -> Literal { - Literal::_new(imp::Literal::c_string(string)) - } - - /// Returns the span encompassing this literal. - pub fn span(&self) -> Span { - Span::_new(self.inner.span()) - } - - /// Configures the span associated for this literal. - pub fn set_span(&mut self, span: Span) { - self.inner.set_span(span.inner); - } - - /// Returns a `Span` that is a subset of `self.span()` containing only - /// the source bytes in range `range`. Returns `None` if the would-be - /// trimmed span is outside the bounds of `self`. - /// - /// Warning: the underlying [`proc_macro::Literal::subspan`] method is - /// nightly-only. When called from within a procedural macro not using a - /// nightly compiler, this method will always return `None`. - pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> { - self.inner.subspan(range).map(Span::_new) - } - - // Intended for the `quote!` macro to use when constructing a proc-macro2 - // token out of a macro_rules $:literal token, which is already known to be - // a valid literal. This avoids reparsing/validating the literal's string - // representation. This is not public API other than for quote. - #[doc(hidden)] - pub unsafe fn from_str_unchecked(repr: &str) -> Self { - Literal::_new(unsafe { imp::Literal::from_str_unchecked(repr) }) - } -} - -impl FromStr for Literal { - type Err = LexError; - - fn from_str(repr: &str) -> Result<Self, LexError> { - match imp::Literal::from_str_checked(repr) { - Ok(lit) => Ok(Literal::_new(lit)), - Err(lex) => Err(LexError { - inner: lex, - _marker: MARKER, - }), - } - } -} - -impl Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&self.inner, f) - } -} - -impl Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Display::fmt(&self.inner, f) - } -} - -/// Public implementation details for the `TokenStream` type, such as iterators. -pub mod token_stream { - use crate::marker::{ProcMacroAutoTraits, MARKER}; - use crate::{imp, TokenTree}; - use core::fmt::{self, Debug}; - - pub use crate::TokenStream; - - /// An iterator over `TokenStream`'s `TokenTree`s. - /// - /// The iteration is "shallow", e.g. the iterator doesn't recurse into - /// delimited groups, and returns whole groups as token trees. - #[derive(Clone)] - pub struct IntoIter { - inner: imp::TokenTreeIter, - _marker: ProcMacroAutoTraits, - } - - impl Iterator for IntoIter { - type Item = TokenTree; - - fn next(&mut self) -> Option<TokenTree> { - self.inner.next() - } - - fn size_hint(&self) -> (usize, Option<usize>) { - self.inner.size_hint() - } - } - - impl Debug for IntoIter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("TokenStream ")?; - f.debug_list().entries(self.clone()).finish() - } - } - - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = IntoIter; - - fn into_iter(self) -> IntoIter { - IntoIter { - inner: self.inner.into_iter(), - _marker: MARKER, - } - } - } -} diff --git a/vendor/proc-macro2/src/location.rs b/vendor/proc-macro2/src/location.rs deleted file mode 100644 index 7190e2d0..00000000 --- a/vendor/proc-macro2/src/location.rs +++ /dev/null @@ -1,29 +0,0 @@ -use core::cmp::Ordering; - -/// A line-column pair representing the start or end of a `Span`. -/// -/// This type is semver exempt and not exposed by default. -#[cfg_attr(docsrs, doc(cfg(feature = "span-locations")))] -#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] -pub struct LineColumn { - /// The 1-indexed line in the source file on which the span starts or ends - /// (inclusive). - pub line: usize, - /// The 0-indexed column (in UTF-8 characters) in the source file on which - /// the span starts or ends (inclusive). - pub column: usize, -} - -impl Ord for LineColumn { - fn cmp(&self, other: &Self) -> Ordering { - self.line - .cmp(&other.line) - .then(self.column.cmp(&other.column)) - } -} - -impl PartialOrd for LineColumn { - fn partial_cmp(&self, other: &Self) -> Option<Ordering> { - Some(self.cmp(other)) - } -} diff --git a/vendor/proc-macro2/src/marker.rs b/vendor/proc-macro2/src/marker.rs deleted file mode 100644 index 23b94ce6..00000000 --- a/vendor/proc-macro2/src/marker.rs +++ /dev/null @@ -1,17 +0,0 @@ -use alloc::rc::Rc; -use core::marker::PhantomData; -use core::panic::{RefUnwindSafe, UnwindSafe}; - -// Zero sized marker with the correct set of autotrait impls we want all proc -// macro types to have. -#[derive(Copy, Clone)] -#[cfg_attr( - all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)), - derive(PartialEq, Eq) -)] -pub(crate) struct ProcMacroAutoTraits(PhantomData<Rc<()>>); - -pub(crate) const MARKER: ProcMacroAutoTraits = ProcMacroAutoTraits(PhantomData); - -impl UnwindSafe for ProcMacroAutoTraits {} -impl RefUnwindSafe for ProcMacroAutoTraits {} diff --git a/vendor/proc-macro2/src/parse.rs b/vendor/proc-macro2/src/parse.rs deleted file mode 100644 index 0f9eb866..00000000 --- a/vendor/proc-macro2/src/parse.rs +++ /dev/null @@ -1,992 +0,0 @@ -use crate::fallback::{ - self, is_ident_continue, is_ident_start, Group, Ident, LexError, Literal, Span, TokenStream, - TokenStreamBuilder, -}; -use crate::{Delimiter, Punct, Spacing, TokenTree}; -use core::char; -use core::str::{Bytes, CharIndices, Chars}; - -#[derive(Copy, Clone, Eq, PartialEq)] -pub(crate) struct Cursor<'a> { - pub(crate) rest: &'a str, - #[cfg(span_locations)] - pub(crate) off: u32, -} - -impl<'a> Cursor<'a> { - pub(crate) fn advance(&self, bytes: usize) -> Cursor<'a> { - let (_front, rest) = self.rest.split_at(bytes); - Cursor { - rest, - #[cfg(span_locations)] - off: self.off + _front.chars().count() as u32, - } - } - - pub(crate) fn starts_with(&self, s: &str) -> bool { - self.rest.starts_with(s) - } - - pub(crate) fn starts_with_char(&self, ch: char) -> bool { - self.rest.starts_with(ch) - } - - pub(crate) fn starts_with_fn<Pattern>(&self, f: Pattern) -> bool - where - Pattern: FnMut(char) -> bool, - { - self.rest.starts_with(f) - } - - pub(crate) fn is_empty(&self) -> bool { - self.rest.is_empty() - } - - fn len(&self) -> usize { - self.rest.len() - } - - fn as_bytes(&self) -> &'a [u8] { - self.rest.as_bytes() - } - - fn bytes(&self) -> Bytes<'a> { - self.rest.bytes() - } - - fn chars(&self) -> Chars<'a> { - self.rest.chars() - } - - fn char_indices(&self) -> CharIndices<'a> { - self.rest.char_indices() - } - - fn parse(&self, tag: &str) -> Result<Cursor<'a>, Reject> { - if self.starts_with(tag) { - Ok(self.advance(tag.len())) - } else { - Err(Reject) - } - } -} - -pub(crate) struct Reject; -type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>; - -fn skip_whitespace(input: Cursor) -> Cursor { - let mut s = input; - - while !s.is_empty() { - let byte = s.as_bytes()[0]; - if byte == b'/' { - if s.starts_with("//") - && (!s.starts_with("///") || s.starts_with("////")) - && !s.starts_with("//!") - { - let (cursor, _) = take_until_newline_or_eof(s); - s = cursor; - continue; - } else if s.starts_with("/**/") { - s = s.advance(4); - continue; - } else if s.starts_with("/*") - && (!s.starts_with("/**") || s.starts_with("/***")) - && !s.starts_with("/*!") - { - match block_comment(s) { - Ok((rest, _)) => { - s = rest; - continue; - } - Err(Reject) => return s, - } - } - } - match byte { - b' ' | 0x09..=0x0d => { - s = s.advance(1); - continue; - } - b if b.is_ascii() => {} - _ => { - let ch = s.chars().next().unwrap(); - if is_whitespace(ch) { - s = s.advance(ch.len_utf8()); - continue; - } - } - } - return s; - } - s -} - -fn block_comment(input: Cursor) -> PResult<&str> { - if !input.starts_with("/*") { - return Err(Reject); - } - - let mut depth = 0usize; - let bytes = input.as_bytes(); - let mut i = 0usize; - let upper = bytes.len() - 1; - - while i < upper { - if bytes[i] == b'/' && bytes[i + 1] == b'*' { - depth += 1; - i += 1; // eat '*' - } else if bytes[i] == b'*' && bytes[i + 1] == b'/' { - depth -= 1; - if depth == 0 { - return Ok((input.advance(i + 2), &input.rest[..i + 2])); - } - i += 1; // eat '/' - } - i += 1; - } - - Err(Reject) -} - -fn is_whitespace(ch: char) -> bool { - // Rust treats left-to-right mark and right-to-left mark as whitespace - ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}' -} - -fn word_break(input: Cursor) -> Result<Cursor, Reject> { - match input.chars().next() { - Some(ch) if is_ident_continue(ch) => Err(Reject), - Some(_) | None => Ok(input), - } -} - -// Rustc's representation of a macro expansion error in expression position or -// type position. -const ERROR: &str = "(/*ERROR*/)"; - -pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> { - let mut trees = TokenStreamBuilder::new(); - let mut stack = Vec::new(); - - loop { - input = skip_whitespace(input); - - if let Ok((rest, ())) = doc_comment(input, &mut trees) { - input = rest; - continue; - } - - #[cfg(span_locations)] - let lo = input.off; - - let first = match input.bytes().next() { - Some(first) => first, - None => match stack.last() { - None => return Ok(trees.build()), - #[cfg(span_locations)] - Some((lo, _frame)) => { - return Err(LexError { - span: Span { lo: *lo, hi: *lo }, - }) - } - #[cfg(not(span_locations))] - Some(_frame) => return Err(LexError { span: Span {} }), - }, - }; - - if let Some(open_delimiter) = match first { - b'(' if !input.starts_with(ERROR) => Some(Delimiter::Parenthesis), - b'[' => Some(Delimiter::Bracket), - b'{' => Some(Delimiter::Brace), - _ => None, - } { - input = input.advance(1); - let frame = (open_delimiter, trees); - #[cfg(span_locations)] - let frame = (lo, frame); - stack.push(frame); - trees = TokenStreamBuilder::new(); - } else if let Some(close_delimiter) = match first { - b')' => Some(Delimiter::Parenthesis), - b']' => Some(Delimiter::Bracket), - b'}' => Some(Delimiter::Brace), - _ => None, - } { - let frame = match stack.pop() { - Some(frame) => frame, - None => return Err(lex_error(input)), - }; - #[cfg(span_locations)] - let (lo, frame) = frame; - let (open_delimiter, outer) = frame; - if open_delimiter != close_delimiter { - return Err(lex_error(input)); - } - input = input.advance(1); - let mut g = Group::new(open_delimiter, trees.build()); - g.set_span(Span { - #[cfg(span_locations)] - lo, - #[cfg(span_locations)] - hi: input.off, - }); - trees = outer; - trees.push_token_from_parser(TokenTree::Group(crate::Group::_new_fallback(g))); - } else { - let (rest, mut tt) = match leaf_token(input) { - Ok((rest, tt)) => (rest, tt), - Err(Reject) => return Err(lex_error(input)), - }; - tt.set_span(crate::Span::_new_fallback(Span { - #[cfg(span_locations)] - lo, - #[cfg(span_locations)] - hi: rest.off, - })); - trees.push_token_from_parser(tt); - input = rest; - } - } -} - -fn lex_error(cursor: Cursor) -> LexError { - #[cfg(not(span_locations))] - let _ = cursor; - LexError { - span: Span { - #[cfg(span_locations)] - lo: cursor.off, - #[cfg(span_locations)] - hi: cursor.off, - }, - } -} - -fn leaf_token(input: Cursor) -> PResult<TokenTree> { - if let Ok((input, l)) = literal(input) { - // must be parsed before ident - Ok((input, TokenTree::Literal(crate::Literal::_new_fallback(l)))) - } else if let Ok((input, p)) = punct(input) { - Ok((input, TokenTree::Punct(p))) - } else if let Ok((input, i)) = ident(input) { - Ok((input, TokenTree::Ident(i))) - } else if input.starts_with(ERROR) { - let rest = input.advance(ERROR.len()); - let repr = crate::Literal::_new_fallback(Literal::_new(ERROR.to_owned())); - Ok((rest, TokenTree::Literal(repr))) - } else { - Err(Reject) - } -} - -fn ident(input: Cursor) -> PResult<crate::Ident> { - if [ - "r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#", "c\"", "cr\"", "cr#", - ] - .iter() - .any(|prefix| input.starts_with(prefix)) - { - Err(Reject) - } else { - ident_any(input) - } -} - -fn ident_any(input: Cursor) -> PResult<crate::Ident> { - let raw = input.starts_with("r#"); - let rest = input.advance((raw as usize) << 1); - - let (rest, sym) = ident_not_raw(rest)?; - - if !raw { - let ident = - crate::Ident::_new_fallback(Ident::new_unchecked(sym, fallback::Span::call_site())); - return Ok((rest, ident)); - } - - match sym { - "_" | "super" | "self" | "Self" | "crate" => return Err(Reject), - _ => {} - } - - let ident = - crate::Ident::_new_fallback(Ident::new_raw_unchecked(sym, fallback::Span::call_site())); - Ok((rest, ident)) -} - -fn ident_not_raw(input: Cursor) -> PResult<&str> { - let mut chars = input.char_indices(); - - match chars.next() { - Some((_, ch)) if is_ident_start(ch) => {} - _ => return Err(Reject), - } - - let mut end = input.len(); - for (i, ch) in chars { - if !is_ident_continue(ch) { - end = i; - break; - } - } - - Ok((input.advance(end), &input.rest[..end])) -} - -pub(crate) fn literal(input: Cursor) -> PResult<Literal> { - let rest = literal_nocapture(input)?; - let end = input.len() - rest.len(); - Ok((rest, Literal::_new(input.rest[..end].to_string()))) -} - -fn literal_nocapture(input: Cursor) -> Result<Cursor, Reject> { - if let Ok(ok) = string(input) { - Ok(ok) - } else if let Ok(ok) = byte_string(input) { - Ok(ok) - } else if let Ok(ok) = c_string(input) { - Ok(ok) - } else if let Ok(ok) = byte(input) { - Ok(ok) - } else if let Ok(ok) = character(input) { - Ok(ok) - } else if let Ok(ok) = float(input) { - Ok(ok) - } else if let Ok(ok) = int(input) { - Ok(ok) - } else { - Err(Reject) - } -} - -fn literal_suffix(input: Cursor) -> Cursor { - match ident_not_raw(input) { - Ok((input, _)) => input, - Err(Reject) => input, - } -} - -fn string(input: Cursor) -> Result<Cursor, Reject> { - if let Ok(input) = input.parse("\"") { - cooked_string(input) - } else if let Ok(input) = input.parse("r") { - raw_string(input) - } else { - Err(Reject) - } -} - -fn cooked_string(mut input: Cursor) -> Result<Cursor, Reject> { - let mut chars = input.char_indices(); - - while let Some((i, ch)) = chars.next() { - match ch { - '"' => { - let input = input.advance(i + 1); - return Ok(literal_suffix(input)); - } - '\r' => match chars.next() { - Some((_, '\n')) => {} - _ => break, - }, - '\\' => match chars.next() { - Some((_, 'x')) => { - backslash_x_char(&mut chars)?; - } - Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0')) => {} - Some((_, 'u')) => { - backslash_u(&mut chars)?; - } - Some((newline, ch @ ('\n' | '\r'))) => { - input = input.advance(newline + 1); - trailing_backslash(&mut input, ch as u8)?; - chars = input.char_indices(); - } - _ => break, - }, - _ch => {} - } - } - Err(Reject) -} - -fn raw_string(input: Cursor) -> Result<Cursor, Reject> { - let (input, delimiter) = delimiter_of_raw_string(input)?; - let mut bytes = input.bytes().enumerate(); - while let Some((i, byte)) = bytes.next() { - match byte { - b'"' if input.rest[i + 1..].starts_with(delimiter) => { - let rest = input.advance(i + 1 + delimiter.len()); - return Ok(literal_suffix(rest)); - } - b'\r' => match bytes.next() { - Some((_, b'\n')) => {} - _ => break, - }, - _ => {} - } - } - Err(Reject) -} - -fn byte_string(input: Cursor) -> Result<Cursor, Reject> { - if let Ok(input) = input.parse("b\"") { - cooked_byte_string(input) - } else if let Ok(input) = input.parse("br") { - raw_byte_string(input) - } else { - Err(Reject) - } -} - -fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, Reject> { - let mut bytes = input.bytes().enumerate(); - while let Some((offset, b)) = bytes.next() { - match b { - b'"' => { - let input = input.advance(offset + 1); - return Ok(literal_suffix(input)); - } - b'\r' => match bytes.next() { - Some((_, b'\n')) => {} - _ => break, - }, - b'\\' => match bytes.next() { - Some((_, b'x')) => { - backslash_x_byte(&mut bytes)?; - } - Some((_, b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"')) => {} - Some((newline, b @ (b'\n' | b'\r'))) => { - input = input.advance(newline + 1); - trailing_backslash(&mut input, b)?; - bytes = input.bytes().enumerate(); - } - _ => break, - }, - b if b.is_ascii() => {} - _ => break, - } - } - Err(Reject) -} - -fn delimiter_of_raw_string(input: Cursor) -> PResult<&str> { - for (i, byte) in input.bytes().enumerate() { - match byte { - b'"' => { - if i > 255 { - // https://github.com/rust-lang/rust/pull/95251 - return Err(Reject); - } - return Ok((input.advance(i + 1), &input.rest[..i])); - } - b'#' => {} - _ => break, - } - } - Err(Reject) -} - -fn raw_byte_string(input: Cursor) -> Result<Cursor, Reject> { - let (input, delimiter) = delimiter_of_raw_string(input)?; - let mut bytes = input.bytes().enumerate(); - while let Some((i, byte)) = bytes.next() { - match byte { - b'"' if input.rest[i + 1..].starts_with(delimiter) => { - let rest = input.advance(i + 1 + delimiter.len()); - return Ok(literal_suffix(rest)); - } - b'\r' => match bytes.next() { - Some((_, b'\n')) => {} - _ => break, - }, - other => { - if !other.is_ascii() { - break; - } - } - } - } - Err(Reject) -} - -fn c_string(input: Cursor) -> Result<Cursor, Reject> { - if let Ok(input) = input.parse("c\"") { - cooked_c_string(input) - } else if let Ok(input) = input.parse("cr") { - raw_c_string(input) - } else { - Err(Reject) - } -} - -fn raw_c_string(input: Cursor) -> Result<Cursor, Reject> { - let (input, delimiter) = delimiter_of_raw_string(input)?; - let mut bytes = input.bytes().enumerate(); - while let Some((i, byte)) = bytes.next() { - match byte { - b'"' if input.rest[i + 1..].starts_with(delimiter) => { - let rest = input.advance(i + 1 + delimiter.len()); - return Ok(literal_suffix(rest)); - } - b'\r' => match bytes.next() { - Some((_, b'\n')) => {} - _ => break, - }, - b'\0' => break, - _ => {} - } - } - Err(Reject) -} - -fn cooked_c_string(mut input: Cursor) -> Result<Cursor, Reject> { - let mut chars = input.char_indices(); - - while let Some((i, ch)) = chars.next() { - match ch { - '"' => { - let input = input.advance(i + 1); - return Ok(literal_suffix(input)); - } - '\r' => match chars.next() { - Some((_, '\n')) => {} - _ => break, - }, - '\\' => match chars.next() { - Some((_, 'x')) => { - backslash_x_nonzero(&mut chars)?; - } - Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"')) => {} - Some((_, 'u')) => { - if backslash_u(&mut chars)? == '\0' { - break; - } - } - Some((newline, ch @ ('\n' | '\r'))) => { - input = input.advance(newline + 1); - trailing_backslash(&mut input, ch as u8)?; - chars = input.char_indices(); - } - _ => break, - }, - '\0' => break, - _ch => {} - } - } - Err(Reject) -} - -fn byte(input: Cursor) -> Result<Cursor, Reject> { - let input = input.parse("b'")?; - let mut bytes = input.bytes().enumerate(); - let ok = match bytes.next().map(|(_, b)| b) { - Some(b'\\') => match bytes.next().map(|(_, b)| b) { - Some(b'x') => backslash_x_byte(&mut bytes).is_ok(), - Some(b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"') => true, - _ => false, - }, - b => b.is_some(), - }; - if !ok { - return Err(Reject); - } - let (offset, _) = bytes.next().ok_or(Reject)?; - if !input.chars().as_str().is_char_boundary(offset) { - return Err(Reject); - } - let input = input.advance(offset).parse("'")?; - Ok(literal_suffix(input)) -} - -fn character(input: Cursor) -> Result<Cursor, Reject> { - let input = input.parse("'")?; - let mut chars = input.char_indices(); - let ok = match chars.next().map(|(_, ch)| ch) { - Some('\\') => match chars.next().map(|(_, ch)| ch) { - Some('x') => backslash_x_char(&mut chars).is_ok(), - Some('u') => backslash_u(&mut chars).is_ok(), - Some('n' | 'r' | 't' | '\\' | '0' | '\'' | '"') => true, - _ => false, - }, - ch => ch.is_some(), - }; - if !ok { - return Err(Reject); - } - let (idx, _) = chars.next().ok_or(Reject)?; - let input = input.advance(idx).parse("'")?; - Ok(literal_suffix(input)) -} - -macro_rules! next_ch { - ($chars:ident @ $pat:pat) => { - match $chars.next() { - Some((_, ch)) => match ch { - $pat => ch, - _ => return Err(Reject), - }, - None => return Err(Reject), - } - }; -} - -fn backslash_x_char<I>(chars: &mut I) -> Result<(), Reject> -where - I: Iterator<Item = (usize, char)>, -{ - next_ch!(chars @ '0'..='7'); - next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); - Ok(()) -} - -fn backslash_x_byte<I>(chars: &mut I) -> Result<(), Reject> -where - I: Iterator<Item = (usize, u8)>, -{ - next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); - next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); - Ok(()) -} - -fn backslash_x_nonzero<I>(chars: &mut I) -> Result<(), Reject> -where - I: Iterator<Item = (usize, char)>, -{ - let first = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); - let second = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); - if first == '0' && second == '0' { - Err(Reject) - } else { - Ok(()) - } -} - -fn backslash_u<I>(chars: &mut I) -> Result<char, Reject> -where - I: Iterator<Item = (usize, char)>, -{ - next_ch!(chars @ '{'); - let mut value = 0; - let mut len = 0; - for (_, ch) in chars { - let digit = match ch { - '0'..='9' => ch as u8 - b'0', - 'a'..='f' => 10 + ch as u8 - b'a', - 'A'..='F' => 10 + ch as u8 - b'A', - '_' if len > 0 => continue, - '}' if len > 0 => return char::from_u32(value).ok_or(Reject), - _ => break, - }; - if len == 6 { - break; - } - value *= 0x10; - value += u32::from(digit); - len += 1; - } - Err(Reject) -} - -fn trailing_backslash(input: &mut Cursor, mut last: u8) -> Result<(), Reject> { - let mut whitespace = input.bytes().enumerate(); - loop { - if last == b'\r' && whitespace.next().map_or(true, |(_, b)| b != b'\n') { - return Err(Reject); - } - match whitespace.next() { - Some((_, b @ (b' ' | b'\t' | b'\n' | b'\r'))) => { - last = b; - } - Some((offset, _)) => { - *input = input.advance(offset); - return Ok(()); - } - None => return Err(Reject), - } - } -} - -fn float(input: Cursor) -> Result<Cursor, Reject> { - let mut rest = float_digits(input)?; - if let Some(ch) = rest.chars().next() { - if is_ident_start(ch) { - rest = ident_not_raw(rest)?.0; - } - } - word_break(rest) -} - -fn float_digits(input: Cursor) -> Result<Cursor, Reject> { - let mut chars = input.chars().peekable(); - match chars.next() { - Some(ch) if '0' <= ch && ch <= '9' => {} - _ => return Err(Reject), - } - - let mut len = 1; - let mut has_dot = false; - let mut has_exp = false; - while let Some(&ch) = chars.peek() { - match ch { - '0'..='9' | '_' => { - chars.next(); - len += 1; - } - '.' => { - if has_dot { - break; - } - chars.next(); - if chars - .peek() - .map_or(false, |&ch| ch == '.' || is_ident_start(ch)) - { - return Err(Reject); - } - len += 1; - has_dot = true; - } - 'e' | 'E' => { - chars.next(); - len += 1; - has_exp = true; - break; - } - _ => break, - } - } - - if !(has_dot || has_exp) { - return Err(Reject); - } - - if has_exp { - let token_before_exp = if has_dot { - Ok(input.advance(len - 1)) - } else { - Err(Reject) - }; - let mut has_sign = false; - let mut has_exp_value = false; - while let Some(&ch) = chars.peek() { - match ch { - '+' | '-' => { - if has_exp_value { - break; - } - if has_sign { - return token_before_exp; - } - chars.next(); - len += 1; - has_sign = true; - } - '0'..='9' => { - chars.next(); - len += 1; - has_exp_value = true; - } - '_' => { - chars.next(); - len += 1; - } - _ => break, - } - } - if !has_exp_value { - return token_before_exp; - } - } - - Ok(input.advance(len)) -} - -fn int(input: Cursor) -> Result<Cursor, Reject> { - let mut rest = digits(input)?; - if let Some(ch) = rest.chars().next() { - if is_ident_start(ch) { - rest = ident_not_raw(rest)?.0; - } - } - word_break(rest) -} - -fn digits(mut input: Cursor) -> Result<Cursor, Reject> { - let base = if input.starts_with("0x") { - input = input.advance(2); - 16 - } else if input.starts_with("0o") { - input = input.advance(2); - 8 - } else if input.starts_with("0b") { - input = input.advance(2); - 2 - } else { - 10 - }; - - let mut len = 0; - let mut empty = true; - for b in input.bytes() { - match b { - b'0'..=b'9' => { - let digit = (b - b'0') as u64; - if digit >= base { - return Err(Reject); - } - } - b'a'..=b'f' => { - let digit = 10 + (b - b'a') as u64; - if digit >= base { - break; - } - } - b'A'..=b'F' => { - let digit = 10 + (b - b'A') as u64; - if digit >= base { - break; - } - } - b'_' => { - if empty && base == 10 { - return Err(Reject); - } - len += 1; - continue; - } - _ => break, - } - len += 1; - empty = false; - } - if empty { - Err(Reject) - } else { - Ok(input.advance(len)) - } -} - -fn punct(input: Cursor) -> PResult<Punct> { - let (rest, ch) = punct_char(input)?; - if ch == '\'' { - if ident_any(rest)?.0.starts_with_char('\'') { - Err(Reject) - } else { - Ok((rest, Punct::new('\'', Spacing::Joint))) - } - } else { - let kind = match punct_char(rest) { - Ok(_) => Spacing::Joint, - Err(Reject) => Spacing::Alone, - }; - Ok((rest, Punct::new(ch, kind))) - } -} - -fn punct_char(input: Cursor) -> PResult<char> { - if input.starts_with("//") || input.starts_with("/*") { - // Do not accept `/` of a comment as a punct. - return Err(Reject); - } - - let mut chars = input.chars(); - let first = match chars.next() { - Some(ch) => ch, - None => { - return Err(Reject); - } - }; - let recognized = "~!@#$%^&*-=+|;:,<.>/?'"; - if recognized.contains(first) { - Ok((input.advance(first.len_utf8()), first)) - } else { - Err(Reject) - } -} - -fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult<'a, ()> { - #[cfg(span_locations)] - let lo = input.off; - let (rest, (comment, inner)) = doc_comment_contents(input)?; - let fallback_span = Span { - #[cfg(span_locations)] - lo, - #[cfg(span_locations)] - hi: rest.off, - }; - let span = crate::Span::_new_fallback(fallback_span); - - let mut scan_for_bare_cr = comment; - while let Some(cr) = scan_for_bare_cr.find('\r') { - let rest = &scan_for_bare_cr[cr + 1..]; - if !rest.starts_with('\n') { - return Err(Reject); - } - scan_for_bare_cr = rest; - } - - let mut pound = Punct::new('#', Spacing::Alone); - pound.set_span(span); - trees.push_token_from_parser(TokenTree::Punct(pound)); - - if inner { - let mut bang = Punct::new('!', Spacing::Alone); - bang.set_span(span); - trees.push_token_from_parser(TokenTree::Punct(bang)); - } - - let doc_ident = crate::Ident::_new_fallback(Ident::new_unchecked("doc", fallback_span)); - let mut equal = Punct::new('=', Spacing::Alone); - equal.set_span(span); - let mut literal = crate::Literal::_new_fallback(Literal::string(comment)); - literal.set_span(span); - let mut bracketed = TokenStreamBuilder::with_capacity(3); - bracketed.push_token_from_parser(TokenTree::Ident(doc_ident)); - bracketed.push_token_from_parser(TokenTree::Punct(equal)); - bracketed.push_token_from_parser(TokenTree::Literal(literal)); - let group = Group::new(Delimiter::Bracket, bracketed.build()); - let mut group = crate::Group::_new_fallback(group); - group.set_span(span); - trees.push_token_from_parser(TokenTree::Group(group)); - - Ok((rest, ())) -} - -fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> { - if input.starts_with("//!") { - let input = input.advance(3); - let (input, s) = take_until_newline_or_eof(input); - Ok((input, (s, true))) - } else if input.starts_with("/*!") { - let (input, s) = block_comment(input)?; - Ok((input, (&s[3..s.len() - 2], true))) - } else if input.starts_with("///") { - let input = input.advance(3); - if input.starts_with_char('/') { - return Err(Reject); - } - let (input, s) = take_until_newline_or_eof(input); - Ok((input, (s, false))) - } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') { - let (input, s) = block_comment(input)?; - Ok((input, (&s[3..s.len() - 2], false))) - } else { - Err(Reject) - } -} - -fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) { - let chars = input.char_indices(); - - for (i, ch) in chars { - if ch == '\n' { - return (input.advance(i), &input.rest[..i]); - } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') { - return (input.advance(i + 1), &input.rest[..i]); - } - } - - (input.advance(input.len()), input.rest) -} diff --git a/vendor/proc-macro2/src/rcvec.rs b/vendor/proc-macro2/src/rcvec.rs deleted file mode 100644 index 23edc77d..00000000 --- a/vendor/proc-macro2/src/rcvec.rs +++ /dev/null @@ -1,146 +0,0 @@ -use alloc::rc::Rc; -use alloc::vec; -use core::mem; -use core::panic::RefUnwindSafe; -use core::slice; - -pub(crate) struct RcVec<T> { - inner: Rc<Vec<T>>, -} - -pub(crate) struct RcVecBuilder<T> { - inner: Vec<T>, -} - -pub(crate) struct RcVecMut<'a, T> { - inner: &'a mut Vec<T>, -} - -#[derive(Clone)] -pub(crate) struct RcVecIntoIter<T> { - inner: vec::IntoIter<T>, -} - -impl<T> RcVec<T> { - pub(crate) fn is_empty(&self) -> bool { - self.inner.is_empty() - } - - pub(crate) fn len(&self) -> usize { - self.inner.len() - } - - pub(crate) fn iter(&self) -> slice::Iter<T> { - self.inner.iter() - } - - pub(crate) fn make_mut(&mut self) -> RcVecMut<T> - where - T: Clone, - { - RcVecMut { - inner: Rc::make_mut(&mut self.inner), - } - } - - pub(crate) fn get_mut(&mut self) -> Option<RcVecMut<T>> { - let inner = Rc::get_mut(&mut self.inner)?; - Some(RcVecMut { inner }) - } - - pub(crate) fn make_owned(mut self) -> RcVecBuilder<T> - where - T: Clone, - { - let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) { - mem::take(owned) - } else { - Vec::clone(&self.inner) - }; - RcVecBuilder { inner: vec } - } -} - -impl<T> RcVecBuilder<T> { - pub(crate) fn new() -> Self { - RcVecBuilder { inner: Vec::new() } - } - - pub(crate) fn with_capacity(cap: usize) -> Self { - RcVecBuilder { - inner: Vec::with_capacity(cap), - } - } - - pub(crate) fn push(&mut self, element: T) { - self.inner.push(element); - } - - pub(crate) fn extend(&mut self, iter: impl IntoIterator<Item = T>) { - self.inner.extend(iter); - } - - pub(crate) fn as_mut(&mut self) -> RcVecMut<T> { - RcVecMut { - inner: &mut self.inner, - } - } - - pub(crate) fn build(self) -> RcVec<T> { - RcVec { - inner: Rc::new(self.inner), - } - } -} - -impl<'a, T> RcVecMut<'a, T> { - pub(crate) fn push(&mut self, element: T) { - self.inner.push(element); - } - - pub(crate) fn extend(&mut self, iter: impl IntoIterator<Item = T>) { - self.inner.extend(iter); - } - - pub(crate) fn as_mut(&mut self) -> RcVecMut<T> { - RcVecMut { inner: self.inner } - } - - pub(crate) fn take(self) -> RcVecBuilder<T> { - let vec = mem::take(self.inner); - RcVecBuilder { inner: vec } - } -} - -impl<T> Clone for RcVec<T> { - fn clone(&self) -> Self { - RcVec { - inner: Rc::clone(&self.inner), - } - } -} - -impl<T> IntoIterator for RcVecBuilder<T> { - type Item = T; - type IntoIter = RcVecIntoIter<T>; - - fn into_iter(self) -> Self::IntoIter { - RcVecIntoIter { - inner: self.inner.into_iter(), - } - } -} - -impl<T> Iterator for RcVecIntoIter<T> { - type Item = T; - - fn next(&mut self) -> Option<Self::Item> { - self.inner.next() - } - - fn size_hint(&self) -> (usize, Option<usize>) { - self.inner.size_hint() - } -} - -impl<T> RefUnwindSafe for RcVec<T> where T: RefUnwindSafe {} diff --git a/vendor/proc-macro2/src/wrapper.rs b/vendor/proc-macro2/src/wrapper.rs deleted file mode 100644 index ee31fa6c..00000000 --- a/vendor/proc-macro2/src/wrapper.rs +++ /dev/null @@ -1,972 +0,0 @@ -use crate::detection::inside_proc_macro; -use crate::fallback::{self, FromStr2 as _}; -#[cfg(span_locations)] -use crate::location::LineColumn; -use crate::{Delimiter, Punct, Spacing, TokenTree}; -use core::fmt::{self, Debug, Display}; -#[cfg(span_locations)] -use core::ops::Range; -use core::ops::RangeBounds; -use std::ffi::CStr; -#[cfg(super_unstable)] -use std::path::PathBuf; - -#[derive(Clone)] -pub(crate) enum TokenStream { - Compiler(DeferredTokenStream), - Fallback(fallback::TokenStream), -} - -// Work around https://github.com/rust-lang/rust/issues/65080. -// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote, -// we hold on to the appended tokens and do proc_macro::TokenStream::extend as -// late as possible to batch together consecutive uses of the Extend impl. -#[derive(Clone)] -pub(crate) struct DeferredTokenStream { - stream: proc_macro::TokenStream, - extra: Vec<proc_macro::TokenTree>, -} - -pub(crate) enum LexError { - Compiler(proc_macro::LexError), - Fallback(fallback::LexError), - - // Rustc was supposed to return a LexError, but it panicked instead. - // https://github.com/rust-lang/rust/issues/58736 - CompilerPanic, -} - -#[cold] -fn mismatch(line: u32) -> ! { - #[cfg(procmacro2_backtrace)] - { - let backtrace = std::backtrace::Backtrace::force_capture(); - panic!("compiler/fallback mismatch L{}\n\n{}", line, backtrace) - } - #[cfg(not(procmacro2_backtrace))] - { - panic!("compiler/fallback mismatch L{}", line) - } -} - -impl DeferredTokenStream { - fn new(stream: proc_macro::TokenStream) -> Self { - DeferredTokenStream { - stream, - extra: Vec::new(), - } - } - - fn is_empty(&self) -> bool { - self.stream.is_empty() && self.extra.is_empty() - } - - fn evaluate_now(&mut self) { - // If-check provides a fast short circuit for the common case of `extra` - // being empty, which saves a round trip over the proc macro bridge. - // Improves macro expansion time in winrt by 6% in debug mode. - if !self.extra.is_empty() { - self.stream.extend(self.extra.drain(..)); - } - } - - fn into_token_stream(mut self) -> proc_macro::TokenStream { - self.evaluate_now(); - self.stream - } -} - -impl TokenStream { - pub(crate) fn new() -> Self { - if inside_proc_macro() { - TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new())) - } else { - TokenStream::Fallback(fallback::TokenStream::new()) - } - } - - pub(crate) fn from_str_checked(src: &str) -> Result<Self, LexError> { - if inside_proc_macro() { - Ok(TokenStream::Compiler(DeferredTokenStream::new( - proc_macro::TokenStream::from_str_checked(src)?, - ))) - } else { - Ok(TokenStream::Fallback( - fallback::TokenStream::from_str_checked(src)?, - )) - } - } - - pub(crate) fn is_empty(&self) -> bool { - match self { - TokenStream::Compiler(tts) => tts.is_empty(), - TokenStream::Fallback(tts) => tts.is_empty(), - } - } - - fn unwrap_nightly(self) -> proc_macro::TokenStream { - match self { - TokenStream::Compiler(s) => s.into_token_stream(), - TokenStream::Fallback(_) => mismatch(line!()), - } - } - - fn unwrap_stable(self) -> fallback::TokenStream { - match self { - TokenStream::Compiler(_) => mismatch(line!()), - TokenStream::Fallback(s) => s, - } - } -} - -impl Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f), - TokenStream::Fallback(tts) => Display::fmt(tts, f), - } - } -} - -impl From<proc_macro::TokenStream> for TokenStream { - fn from(inner: proc_macro::TokenStream) -> Self { - TokenStream::Compiler(DeferredTokenStream::new(inner)) - } -} - -impl From<TokenStream> for proc_macro::TokenStream { - fn from(inner: TokenStream) -> Self { - match inner { - TokenStream::Compiler(inner) => inner.into_token_stream(), - TokenStream::Fallback(inner) => { - proc_macro::TokenStream::from_str_unchecked(&inner.to_string()) - } - } - } -} - -impl From<fallback::TokenStream> for TokenStream { - fn from(inner: fallback::TokenStream) -> Self { - TokenStream::Fallback(inner) - } -} - -// Assumes inside_proc_macro(). -fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree { - match token { - TokenTree::Group(tt) => proc_macro::TokenTree::Group(tt.inner.unwrap_nightly()), - TokenTree::Punct(tt) => { - let spacing = match tt.spacing() { - Spacing::Joint => proc_macro::Spacing::Joint, - Spacing::Alone => proc_macro::Spacing::Alone, - }; - let mut punct = proc_macro::Punct::new(tt.as_char(), spacing); - punct.set_span(tt.span().inner.unwrap_nightly()); - proc_macro::TokenTree::Punct(punct) - } - TokenTree::Ident(tt) => proc_macro::TokenTree::Ident(tt.inner.unwrap_nightly()), - TokenTree::Literal(tt) => proc_macro::TokenTree::Literal(tt.inner.unwrap_nightly()), - } -} - -impl From<TokenTree> for TokenStream { - fn from(token: TokenTree) -> Self { - if inside_proc_macro() { - TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::from( - into_compiler_token(token), - ))) - } else { - TokenStream::Fallback(fallback::TokenStream::from(token)) - } - } -} - -impl FromIterator<TokenTree> for TokenStream { - fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self { - if inside_proc_macro() { - TokenStream::Compiler(DeferredTokenStream::new( - trees.into_iter().map(into_compiler_token).collect(), - )) - } else { - TokenStream::Fallback(trees.into_iter().collect()) - } - } -} - -impl FromIterator<TokenStream> for TokenStream { - fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { - let mut streams = streams.into_iter(); - match streams.next() { - Some(TokenStream::Compiler(mut first)) => { - first.evaluate_now(); - first.stream.extend(streams.map(|s| match s { - TokenStream::Compiler(s) => s.into_token_stream(), - TokenStream::Fallback(_) => mismatch(line!()), - })); - TokenStream::Compiler(first) - } - Some(TokenStream::Fallback(mut first)) => { - first.extend(streams.map(|s| match s { - TokenStream::Fallback(s) => s, - TokenStream::Compiler(_) => mismatch(line!()), - })); - TokenStream::Fallback(first) - } - None => TokenStream::new(), - } - } -} - -impl Extend<TokenTree> for TokenStream { - fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) { - match self { - TokenStream::Compiler(tts) => { - // Here is the reason for DeferredTokenStream. - for token in stream { - tts.extra.push(into_compiler_token(token)); - } - } - TokenStream::Fallback(tts) => tts.extend(stream), - } - } -} - -impl Extend<TokenStream> for TokenStream { - fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { - match self { - TokenStream::Compiler(tts) => { - tts.evaluate_now(); - tts.stream - .extend(streams.into_iter().map(TokenStream::unwrap_nightly)); - } - TokenStream::Fallback(tts) => { - tts.extend(streams.into_iter().map(TokenStream::unwrap_stable)); - } - } - } -} - -impl Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f), - TokenStream::Fallback(tts) => Debug::fmt(tts, f), - } - } -} - -impl LexError { - pub(crate) fn span(&self) -> Span { - match self { - LexError::Compiler(_) | LexError::CompilerPanic => Span::call_site(), - LexError::Fallback(e) => Span::Fallback(e.span()), - } - } -} - -impl From<proc_macro::LexError> for LexError { - fn from(e: proc_macro::LexError) -> Self { - LexError::Compiler(e) - } -} - -impl From<fallback::LexError> for LexError { - fn from(e: fallback::LexError) -> Self { - LexError::Fallback(e) - } -} - -impl Debug for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - LexError::Compiler(e) => Debug::fmt(e, f), - LexError::Fallback(e) => Debug::fmt(e, f), - LexError::CompilerPanic => { - let fallback = fallback::LexError::call_site(); - Debug::fmt(&fallback, f) - } - } - } -} - -impl Display for LexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - LexError::Compiler(e) => Display::fmt(e, f), - LexError::Fallback(e) => Display::fmt(e, f), - LexError::CompilerPanic => { - let fallback = fallback::LexError::call_site(); - Display::fmt(&fallback, f) - } - } - } -} - -#[derive(Clone)] -pub(crate) enum TokenTreeIter { - Compiler(proc_macro::token_stream::IntoIter), - Fallback(fallback::TokenTreeIter), -} - -impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = TokenTreeIter; - - fn into_iter(self) -> TokenTreeIter { - match self { - TokenStream::Compiler(tts) => { - TokenTreeIter::Compiler(tts.into_token_stream().into_iter()) - } - TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()), - } - } -} - -impl Iterator for TokenTreeIter { - type Item = TokenTree; - - fn next(&mut self) -> Option<TokenTree> { - let token = match self { - TokenTreeIter::Compiler(iter) => iter.next()?, - TokenTreeIter::Fallback(iter) => return iter.next(), - }; - Some(match token { - proc_macro::TokenTree::Group(tt) => { - TokenTree::Group(crate::Group::_new(Group::Compiler(tt))) - } - proc_macro::TokenTree::Punct(tt) => { - let spacing = match tt.spacing() { - proc_macro::Spacing::Joint => Spacing::Joint, - proc_macro::Spacing::Alone => Spacing::Alone, - }; - let mut o = Punct::new(tt.as_char(), spacing); - o.set_span(crate::Span::_new(Span::Compiler(tt.span()))); - TokenTree::Punct(o) - } - proc_macro::TokenTree::Ident(s) => { - TokenTree::Ident(crate::Ident::_new(Ident::Compiler(s))) - } - proc_macro::TokenTree::Literal(l) => { - TokenTree::Literal(crate::Literal::_new(Literal::Compiler(l))) - } - }) - } - - fn size_hint(&self) -> (usize, Option<usize>) { - match self { - TokenTreeIter::Compiler(tts) => tts.size_hint(), - TokenTreeIter::Fallback(tts) => tts.size_hint(), - } - } -} - -#[derive(Copy, Clone)] -pub(crate) enum Span { - Compiler(proc_macro::Span), - Fallback(fallback::Span), -} - -impl Span { - pub(crate) fn call_site() -> Self { - if inside_proc_macro() { - Span::Compiler(proc_macro::Span::call_site()) - } else { - Span::Fallback(fallback::Span::call_site()) - } - } - - pub(crate) fn mixed_site() -> Self { - if inside_proc_macro() { - Span::Compiler(proc_macro::Span::mixed_site()) - } else { - Span::Fallback(fallback::Span::mixed_site()) - } - } - - #[cfg(super_unstable)] - pub(crate) fn def_site() -> Self { - if inside_proc_macro() { - Span::Compiler(proc_macro::Span::def_site()) - } else { - Span::Fallback(fallback::Span::def_site()) - } - } - - pub(crate) fn resolved_at(&self, other: Span) -> Span { - match (self, other) { - (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)), - (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)), - (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()), - } - } - - pub(crate) fn located_at(&self, other: Span) -> Span { - match (self, other) { - (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)), - (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)), - (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()), - } - } - - pub(crate) fn unwrap(self) -> proc_macro::Span { - match self { - Span::Compiler(s) => s, - Span::Fallback(_) => panic!("proc_macro::Span is only available in procedural macros"), - } - } - - #[cfg(span_locations)] - pub(crate) fn byte_range(&self) -> Range<usize> { - match self { - #[cfg(proc_macro_span)] - Span::Compiler(s) => s.byte_range(), - #[cfg(not(proc_macro_span))] - Span::Compiler(_) => 0..0, - Span::Fallback(s) => s.byte_range(), - } - } - - #[cfg(span_locations)] - pub(crate) fn start(&self) -> LineColumn { - match self { - #[cfg(proc_macro_span)] - Span::Compiler(s) => LineColumn { - line: s.line(), - column: s.column().saturating_sub(1), - }, - #[cfg(not(proc_macro_span))] - Span::Compiler(_) => LineColumn { line: 0, column: 0 }, - Span::Fallback(s) => s.start(), - } - } - - #[cfg(span_locations)] - pub(crate) fn end(&self) -> LineColumn { - match self { - #[cfg(proc_macro_span)] - Span::Compiler(s) => { - let end = s.end(); - LineColumn { - line: end.line(), - column: end.column().saturating_sub(1), - } - } - #[cfg(not(proc_macro_span))] - Span::Compiler(_) => LineColumn { line: 0, column: 0 }, - Span::Fallback(s) => s.end(), - } - } - - #[cfg(super_unstable)] - pub(crate) fn file(&self) -> String { - match self { - Span::Compiler(s) => s.file(), - Span::Fallback(s) => s.file(), - } - } - - #[cfg(super_unstable)] - pub(crate) fn local_file(&self) -> Option<PathBuf> { - match self { - Span::Compiler(s) => s.local_file(), - Span::Fallback(s) => s.local_file(), - } - } - - pub(crate) fn join(&self, other: Span) -> Option<Span> { - let ret = match (self, other) { - #[cfg(proc_macro_span)] - (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.join(b)?), - (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.join(b)?), - _ => return None, - }; - Some(ret) - } - - #[cfg(super_unstable)] - pub(crate) fn eq(&self, other: &Span) -> bool { - match (self, other) { - (Span::Compiler(a), Span::Compiler(b)) => a.eq(b), - (Span::Fallback(a), Span::Fallback(b)) => a.eq(b), - _ => false, - } - } - - pub(crate) fn source_text(&self) -> Option<String> { - match self { - #[cfg(not(no_source_text))] - Span::Compiler(s) => s.source_text(), - #[cfg(no_source_text)] - Span::Compiler(_) => None, - Span::Fallback(s) => s.source_text(), - } - } - - fn unwrap_nightly(self) -> proc_macro::Span { - match self { - Span::Compiler(s) => s, - Span::Fallback(_) => mismatch(line!()), - } - } -} - -impl From<proc_macro::Span> for crate::Span { - fn from(proc_span: proc_macro::Span) -> Self { - crate::Span::_new(Span::Compiler(proc_span)) - } -} - -impl From<fallback::Span> for Span { - fn from(inner: fallback::Span) -> Self { - Span::Fallback(inner) - } -} - -impl Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Span::Compiler(s) => Debug::fmt(s, f), - Span::Fallback(s) => Debug::fmt(s, f), - } - } -} - -pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) { - match span { - Span::Compiler(s) => { - debug.field("span", &s); - } - Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s), - } -} - -#[derive(Clone)] -pub(crate) enum Group { - Compiler(proc_macro::Group), - Fallback(fallback::Group), -} - -impl Group { - pub(crate) fn new(delimiter: Delimiter, stream: TokenStream) -> Self { - match stream { - TokenStream::Compiler(tts) => { - let delimiter = match delimiter { - Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis, - Delimiter::Bracket => proc_macro::Delimiter::Bracket, - Delimiter::Brace => proc_macro::Delimiter::Brace, - Delimiter::None => proc_macro::Delimiter::None, - }; - Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream())) - } - TokenStream::Fallback(stream) => { - Group::Fallback(fallback::Group::new(delimiter, stream)) - } - } - } - - pub(crate) fn delimiter(&self) -> Delimiter { - match self { - Group::Compiler(g) => match g.delimiter() { - proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis, - proc_macro::Delimiter::Bracket => Delimiter::Bracket, - proc_macro::Delimiter::Brace => Delimiter::Brace, - proc_macro::Delimiter::None => Delimiter::None, - }, - Group::Fallback(g) => g.delimiter(), - } - } - - pub(crate) fn stream(&self) -> TokenStream { - match self { - Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())), - Group::Fallback(g) => TokenStream::Fallback(g.stream()), - } - } - - pub(crate) fn span(&self) -> Span { - match self { - Group::Compiler(g) => Span::Compiler(g.span()), - Group::Fallback(g) => Span::Fallback(g.span()), - } - } - - pub(crate) fn span_open(&self) -> Span { - match self { - Group::Compiler(g) => Span::Compiler(g.span_open()), - Group::Fallback(g) => Span::Fallback(g.span_open()), - } - } - - pub(crate) fn span_close(&self) -> Span { - match self { - Group::Compiler(g) => Span::Compiler(g.span_close()), - Group::Fallback(g) => Span::Fallback(g.span_close()), - } - } - - pub(crate) fn set_span(&mut self, span: Span) { - match (self, span) { - (Group::Compiler(g), Span::Compiler(s)) => g.set_span(s), - (Group::Fallback(g), Span::Fallback(s)) => g.set_span(s), - (Group::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Group::Fallback(_), Span::Compiler(_)) => mismatch(line!()), - } - } - - fn unwrap_nightly(self) -> proc_macro::Group { - match self { - Group::Compiler(g) => g, - Group::Fallback(_) => mismatch(line!()), - } - } -} - -impl From<fallback::Group> for Group { - fn from(g: fallback::Group) -> Self { - Group::Fallback(g) - } -} - -impl Display for Group { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - match self { - Group::Compiler(group) => Display::fmt(group, formatter), - Group::Fallback(group) => Display::fmt(group, formatter), - } - } -} - -impl Debug for Group { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - match self { - Group::Compiler(group) => Debug::fmt(group, formatter), - Group::Fallback(group) => Debug::fmt(group, formatter), - } - } -} - -#[derive(Clone)] -pub(crate) enum Ident { - Compiler(proc_macro::Ident), - Fallback(fallback::Ident), -} - -impl Ident { - #[track_caller] - pub(crate) fn new_checked(string: &str, span: Span) -> Self { - match span { - Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)), - Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_checked(string, s)), - } - } - - #[track_caller] - pub(crate) fn new_raw_checked(string: &str, span: Span) -> Self { - match span { - Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new_raw(string, s)), - Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw_checked(string, s)), - } - } - - pub(crate) fn span(&self) -> Span { - match self { - Ident::Compiler(t) => Span::Compiler(t.span()), - Ident::Fallback(t) => Span::Fallback(t.span()), - } - } - - pub(crate) fn set_span(&mut self, span: Span) { - match (self, span) { - (Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s), - (Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s), - (Ident::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Ident::Fallback(_), Span::Compiler(_)) => mismatch(line!()), - } - } - - fn unwrap_nightly(self) -> proc_macro::Ident { - match self { - Ident::Compiler(s) => s, - Ident::Fallback(_) => mismatch(line!()), - } - } -} - -impl From<fallback::Ident> for Ident { - fn from(inner: fallback::Ident) -> Self { - Ident::Fallback(inner) - } -} - -impl PartialEq for Ident { - fn eq(&self, other: &Ident) -> bool { - match (self, other) { - (Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(), - (Ident::Fallback(t), Ident::Fallback(o)) => t == o, - (Ident::Compiler(_), Ident::Fallback(_)) => mismatch(line!()), - (Ident::Fallback(_), Ident::Compiler(_)) => mismatch(line!()), - } - } -} - -impl<T> PartialEq<T> for Ident -where - T: ?Sized + AsRef<str>, -{ - fn eq(&self, other: &T) -> bool { - let other = other.as_ref(); - match self { - Ident::Compiler(t) => t.to_string() == other, - Ident::Fallback(t) => t == other, - } - } -} - -impl Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Ident::Compiler(t) => Display::fmt(t, f), - Ident::Fallback(t) => Display::fmt(t, f), - } - } -} - -impl Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Ident::Compiler(t) => Debug::fmt(t, f), - Ident::Fallback(t) => Debug::fmt(t, f), - } - } -} - -#[derive(Clone)] -pub(crate) enum Literal { - Compiler(proc_macro::Literal), - Fallback(fallback::Literal), -} - -macro_rules! suffixed_numbers { - ($($name:ident => $kind:ident,)*) => ($( - pub(crate) fn $name(n: $kind) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::$name(n)) - } else { - Literal::Fallback(fallback::Literal::$name(n)) - } - } - )*) -} - -macro_rules! unsuffixed_integers { - ($($name:ident => $kind:ident,)*) => ($( - pub(crate) fn $name(n: $kind) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::$name(n)) - } else { - Literal::Fallback(fallback::Literal::$name(n)) - } - } - )*) -} - -impl Literal { - pub(crate) fn from_str_checked(repr: &str) -> Result<Self, LexError> { - if inside_proc_macro() { - let literal = proc_macro::Literal::from_str_checked(repr)?; - Ok(Literal::Compiler(literal)) - } else { - let literal = fallback::Literal::from_str_checked(repr)?; - Ok(Literal::Fallback(literal)) - } - } - - pub(crate) unsafe fn from_str_unchecked(repr: &str) -> Self { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::from_str_unchecked(repr)) - } else { - Literal::Fallback(unsafe { fallback::Literal::from_str_unchecked(repr) }) - } - } - - suffixed_numbers! { - u8_suffixed => u8, - u16_suffixed => u16, - u32_suffixed => u32, - u64_suffixed => u64, - u128_suffixed => u128, - usize_suffixed => usize, - i8_suffixed => i8, - i16_suffixed => i16, - i32_suffixed => i32, - i64_suffixed => i64, - i128_suffixed => i128, - isize_suffixed => isize, - - f32_suffixed => f32, - f64_suffixed => f64, - } - - unsuffixed_integers! { - u8_unsuffixed => u8, - u16_unsuffixed => u16, - u32_unsuffixed => u32, - u64_unsuffixed => u64, - u128_unsuffixed => u128, - usize_unsuffixed => usize, - i8_unsuffixed => i8, - i16_unsuffixed => i16, - i32_unsuffixed => i32, - i64_unsuffixed => i64, - i128_unsuffixed => i128, - isize_unsuffixed => isize, - } - - pub(crate) fn f32_unsuffixed(f: f32) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f)) - } else { - Literal::Fallback(fallback::Literal::f32_unsuffixed(f)) - } - } - - pub(crate) fn f64_unsuffixed(f: f64) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f)) - } else { - Literal::Fallback(fallback::Literal::f64_unsuffixed(f)) - } - } - - pub(crate) fn string(string: &str) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::string(string)) - } else { - Literal::Fallback(fallback::Literal::string(string)) - } - } - - pub(crate) fn character(ch: char) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::character(ch)) - } else { - Literal::Fallback(fallback::Literal::character(ch)) - } - } - - pub(crate) fn byte_character(byte: u8) -> Literal { - if inside_proc_macro() { - Literal::Compiler({ - #[cfg(not(no_literal_byte_character))] - { - proc_macro::Literal::byte_character(byte) - } - - #[cfg(no_literal_byte_character)] - { - let fallback = fallback::Literal::byte_character(byte); - proc_macro::Literal::from_str_unchecked(&fallback.repr) - } - }) - } else { - Literal::Fallback(fallback::Literal::byte_character(byte)) - } - } - - pub(crate) fn byte_string(bytes: &[u8]) -> Literal { - if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::byte_string(bytes)) - } else { - Literal::Fallback(fallback::Literal::byte_string(bytes)) - } - } - - pub(crate) fn c_string(string: &CStr) -> Literal { - if inside_proc_macro() { - Literal::Compiler({ - #[cfg(not(no_literal_c_string))] - { - proc_macro::Literal::c_string(string) - } - - #[cfg(no_literal_c_string)] - { - let fallback = fallback::Literal::c_string(string); - proc_macro::Literal::from_str_unchecked(&fallback.repr) - } - }) - } else { - Literal::Fallback(fallback::Literal::c_string(string)) - } - } - - pub(crate) fn span(&self) -> Span { - match self { - Literal::Compiler(lit) => Span::Compiler(lit.span()), - Literal::Fallback(lit) => Span::Fallback(lit.span()), - } - } - - pub(crate) fn set_span(&mut self, span: Span) { - match (self, span) { - (Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s), - (Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s), - (Literal::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Literal::Fallback(_), Span::Compiler(_)) => mismatch(line!()), - } - } - - pub(crate) fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> { - match self { - #[cfg(proc_macro_span)] - Literal::Compiler(lit) => lit.subspan(range).map(Span::Compiler), - #[cfg(not(proc_macro_span))] - Literal::Compiler(_lit) => None, - Literal::Fallback(lit) => lit.subspan(range).map(Span::Fallback), - } - } - - fn unwrap_nightly(self) -> proc_macro::Literal { - match self { - Literal::Compiler(s) => s, - Literal::Fallback(_) => mismatch(line!()), - } - } -} - -impl From<fallback::Literal> for Literal { - fn from(s: fallback::Literal) -> Self { - Literal::Fallback(s) - } -} - -impl Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Literal::Compiler(t) => Display::fmt(t, f), - Literal::Fallback(t) => Display::fmt(t, f), - } - } -} - -impl Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Literal::Compiler(t) => Debug::fmt(t, f), - Literal::Fallback(t) => Debug::fmt(t, f), - } - } -} - -#[cfg(span_locations)] -pub(crate) fn invalidate_current_thread_spans() { - if inside_proc_macro() { - panic!( - "proc_macro2::extra::invalidate_current_thread_spans is not available in procedural macros" - ); - } else { - crate::fallback::invalidate_current_thread_spans(); - } -} diff --git a/vendor/proc-macro2/tests/comments.rs b/vendor/proc-macro2/tests/comments.rs deleted file mode 100644 index 4f7236de..00000000 --- a/vendor/proc-macro2/tests/comments.rs +++ /dev/null @@ -1,105 +0,0 @@ -#![allow(clippy::assertions_on_result_states)] - -use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree}; - -// #[doc = "..."] -> "..." -fn lit_of_outer_doc_comment(tokens: &TokenStream) -> Literal { - lit_of_doc_comment(tokens, false) -} - -// #![doc = "..."] -> "..." -fn lit_of_inner_doc_comment(tokens: &TokenStream) -> Literal { - lit_of_doc_comment(tokens, true) -} - -fn lit_of_doc_comment(tokens: &TokenStream, inner: bool) -> Literal { - let mut iter = tokens.clone().into_iter(); - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '#'); - assert_eq!(punct.spacing(), Spacing::Alone); - } - _ => panic!("wrong token {:?}", tokens), - } - if inner { - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '!'); - assert_eq!(punct.spacing(), Spacing::Alone); - } - _ => panic!("wrong token {:?}", tokens), - } - } - iter = match iter.next().unwrap() { - TokenTree::Group(group) => { - assert_eq!(group.delimiter(), Delimiter::Bracket); - assert!(iter.next().is_none(), "unexpected token {:?}", tokens); - group.stream().into_iter() - } - _ => panic!("wrong token {:?}", tokens), - }; - match iter.next().unwrap() { - TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"), - _ => panic!("wrong token {:?}", tokens), - } - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '='); - assert_eq!(punct.spacing(), Spacing::Alone); - } - _ => panic!("wrong token {:?}", tokens), - } - match iter.next().unwrap() { - TokenTree::Literal(literal) => { - assert!(iter.next().is_none(), "unexpected token {:?}", tokens); - literal - } - _ => panic!("wrong token {:?}", tokens), - } -} - -#[test] -fn closed_immediately() { - let stream = "/**/".parse::<TokenStream>().unwrap(); - let tokens = stream.into_iter().collect::<Vec<_>>(); - assert!(tokens.is_empty(), "not empty -- {:?}", tokens); -} - -#[test] -fn incomplete() { - assert!("/*/".parse::<TokenStream>().is_err()); -} - -#[test] -fn lit() { - let stream = "/// doc".parse::<TokenStream>().unwrap(); - let lit = lit_of_outer_doc_comment(&stream); - assert_eq!(lit.to_string(), "\" doc\""); - - let stream = "//! doc".parse::<TokenStream>().unwrap(); - let lit = lit_of_inner_doc_comment(&stream); - assert_eq!(lit.to_string(), "\" doc\""); - - let stream = "/** doc */".parse::<TokenStream>().unwrap(); - let lit = lit_of_outer_doc_comment(&stream); - assert_eq!(lit.to_string(), "\" doc \""); - - let stream = "/*! doc */".parse::<TokenStream>().unwrap(); - let lit = lit_of_inner_doc_comment(&stream); - assert_eq!(lit.to_string(), "\" doc \""); -} - -#[test] -fn carriage_return() { - let stream = "///\r\n".parse::<TokenStream>().unwrap(); - let lit = lit_of_outer_doc_comment(&stream); - assert_eq!(lit.to_string(), "\"\""); - - let stream = "/**\r\n*/".parse::<TokenStream>().unwrap(); - let lit = lit_of_outer_doc_comment(&stream); - assert_eq!(lit.to_string(), "\"\\r\\n\""); - - "///\r".parse::<TokenStream>().unwrap_err(); - "///\r \n".parse::<TokenStream>().unwrap_err(); - "/**\r \n*/".parse::<TokenStream>().unwrap_err(); -} diff --git a/vendor/proc-macro2/tests/features.rs b/vendor/proc-macro2/tests/features.rs deleted file mode 100644 index 073f6e60..00000000 --- a/vendor/proc-macro2/tests/features.rs +++ /dev/null @@ -1,8 +0,0 @@ -#[test] -#[ignore] -fn make_sure_no_proc_macro() { - assert!( - !cfg!(feature = "proc-macro"), - "still compiled with proc_macro?" - ); -} diff --git a/vendor/proc-macro2/tests/marker.rs b/vendor/proc-macro2/tests/marker.rs deleted file mode 100644 index af8932a1..00000000 --- a/vendor/proc-macro2/tests/marker.rs +++ /dev/null @@ -1,97 +0,0 @@ -#![allow(clippy::extra_unused_type_parameters)] - -use proc_macro2::{ - Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, -}; - -macro_rules! assert_impl { - ($ty:ident is $($marker:ident) and +) => { - #[test] - #[allow(non_snake_case)] - fn $ty() { - fn assert_implemented<T: $($marker +)+>() {} - assert_implemented::<$ty>(); - } - }; - - ($ty:ident is not $($marker:ident) or +) => { - #[test] - #[allow(non_snake_case)] - fn $ty() { - $( - { - // Implemented for types that implement $marker. - #[allow(dead_code)] - trait IsNotImplemented { - fn assert_not_implemented() {} - } - impl<T: $marker> IsNotImplemented for T {} - - // Implemented for the type being tested. - trait IsImplemented { - fn assert_not_implemented() {} - } - impl IsImplemented for $ty {} - - // If $ty does not implement $marker, there is no ambiguity - // in the following trait method call. - <$ty>::assert_not_implemented(); - } - )+ - } - }; -} - -assert_impl!(Delimiter is Send and Sync); -assert_impl!(Spacing is Send and Sync); - -assert_impl!(Group is not Send or Sync); -assert_impl!(Ident is not Send or Sync); -assert_impl!(LexError is not Send or Sync); -assert_impl!(Literal is not Send or Sync); -assert_impl!(Punct is not Send or Sync); -assert_impl!(Span is not Send or Sync); -assert_impl!(TokenStream is not Send or Sync); -assert_impl!(TokenTree is not Send or Sync); - -#[cfg(procmacro2_semver_exempt)] -mod semver_exempt { - use proc_macro2::LineColumn; - - assert_impl!(LineColumn is Send and Sync); -} - -mod unwind_safe { - #[cfg(procmacro2_semver_exempt)] - use proc_macro2::LineColumn; - use proc_macro2::{ - Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, - }; - use std::panic::{RefUnwindSafe, UnwindSafe}; - - macro_rules! assert_unwind_safe { - ($($types:ident)*) => { - $( - assert_impl!($types is UnwindSafe and RefUnwindSafe); - )* - }; - } - - assert_unwind_safe! { - Delimiter - Group - Ident - LexError - Literal - Punct - Spacing - Span - TokenStream - TokenTree - } - - #[cfg(procmacro2_semver_exempt)] - assert_unwind_safe! { - LineColumn - } -} diff --git a/vendor/proc-macro2/tests/test.rs b/vendor/proc-macro2/tests/test.rs deleted file mode 100644 index aa7397b6..00000000 --- a/vendor/proc-macro2/tests/test.rs +++ /dev/null @@ -1,898 +0,0 @@ -#![allow( - clippy::assertions_on_result_states, - clippy::items_after_statements, - clippy::needless_pass_by_value, - clippy::needless_raw_string_hashes, - clippy::non_ascii_literal, - clippy::octal_escapes -)] - -use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; -use std::ffi::CStr; -use std::iter; -use std::str::{self, FromStr}; - -#[test] -fn idents() { - assert_eq!( - Ident::new("String", Span::call_site()).to_string(), - "String" - ); - assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn"); - assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_"); -} - -#[test] -fn raw_idents() { - assert_eq!( - Ident::new_raw("String", Span::call_site()).to_string(), - "r#String" - ); - assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn"); -} - -#[test] -#[should_panic(expected = "`r#_` cannot be a raw identifier")] -fn ident_raw_underscore() { - Ident::new_raw("_", Span::call_site()); -} - -#[test] -#[should_panic(expected = "`r#super` cannot be a raw identifier")] -fn ident_raw_reserved() { - Ident::new_raw("super", Span::call_site()); -} - -#[test] -#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")] -fn ident_empty() { - Ident::new("", Span::call_site()); -} - -#[test] -#[should_panic(expected = "Ident cannot be a number; use Literal instead")] -fn ident_number() { - Ident::new("255", Span::call_site()); -} - -#[test] -#[should_panic(expected = "\"a#\" is not a valid Ident")] -fn ident_invalid() { - Ident::new("a#", Span::call_site()); -} - -#[test] -#[should_panic(expected = "not a valid Ident")] -fn raw_ident_empty() { - Ident::new("r#", Span::call_site()); -} - -#[test] -#[should_panic(expected = "not a valid Ident")] -fn raw_ident_number() { - Ident::new("r#255", Span::call_site()); -} - -#[test] -#[should_panic(expected = "\"r#a#\" is not a valid Ident")] -fn raw_ident_invalid() { - Ident::new("r#a#", Span::call_site()); -} - -#[test] -#[should_panic(expected = "not a valid Ident")] -fn lifetime_empty() { - Ident::new("'", Span::call_site()); -} - -#[test] -#[should_panic(expected = "not a valid Ident")] -fn lifetime_number() { - Ident::new("'255", Span::call_site()); -} - -#[test] -#[should_panic(expected = r#""'a#" is not a valid Ident"#)] -fn lifetime_invalid() { - Ident::new("'a#", Span::call_site()); -} - -#[test] -fn literal_string() { - #[track_caller] - fn assert(literal: Literal, expected: &str) { - assert_eq!(literal.to_string(), expected.trim()); - } - - assert(Literal::string(""), r#" "" "#); - assert(Literal::string("aA"), r#" "aA" "#); - assert(Literal::string("\t"), r#" "\t" "#); - assert(Literal::string("❤"), r#" "❤" "#); - assert(Literal::string("'"), r#" "'" "#); - assert(Literal::string("\""), r#" "\"" "#); - assert(Literal::string("\0"), r#" "\0" "#); - assert(Literal::string("\u{1}"), r#" "\u{1}" "#); - assert( - Literal::string("a\00b\07c\08d\0e\0"), - r#" "a\x000b\x007c\08d\0e\0" "#, - ); - - "\"\\\r\n x\"".parse::<TokenStream>().unwrap(); - "\"\\\r\n \rx\"".parse::<TokenStream>().unwrap_err(); -} - -#[test] -fn literal_raw_string() { - "r\"\r\n\"".parse::<TokenStream>().unwrap(); - - fn raw_string_literal_with_hashes(n: usize) -> String { - let mut literal = String::new(); - literal.push('r'); - literal.extend(iter::repeat('#').take(n)); - literal.push('"'); - literal.push('"'); - literal.extend(iter::repeat('#').take(n)); - literal - } - - raw_string_literal_with_hashes(255) - .parse::<TokenStream>() - .unwrap(); - - // https://github.com/rust-lang/rust/pull/95251 - raw_string_literal_with_hashes(256) - .parse::<TokenStream>() - .unwrap_err(); -} - -#[test] -fn literal_byte_character() { - #[track_caller] - fn assert(literal: Literal, expected: &str) { - assert_eq!(literal.to_string(), expected.trim()); - } - - assert(Literal::byte_character(b'a'), r#" b'a' "#); - assert(Literal::byte_character(b'\0'), r#" b'\0' "#); - assert(Literal::byte_character(b'\t'), r#" b'\t' "#); - assert(Literal::byte_character(b'\n'), r#" b'\n' "#); - assert(Literal::byte_character(b'\r'), r#" b'\r' "#); - assert(Literal::byte_character(b'\''), r#" b'\'' "#); - assert(Literal::byte_character(b'\\'), r#" b'\\' "#); - assert(Literal::byte_character(b'\x1f'), r#" b'\x1F' "#); - assert(Literal::byte_character(b'"'), r#" b'"' "#); -} - -#[test] -fn literal_byte_string() { - #[track_caller] - fn assert(literal: Literal, expected: &str) { - assert_eq!(literal.to_string(), expected.trim()); - } - - assert(Literal::byte_string(b""), r#" b"" "#); - assert(Literal::byte_string(b"\0"), r#" b"\0" "#); - assert(Literal::byte_string(b"\t"), r#" b"\t" "#); - assert(Literal::byte_string(b"\n"), r#" b"\n" "#); - assert(Literal::byte_string(b"\r"), r#" b"\r" "#); - assert(Literal::byte_string(b"\""), r#" b"\"" "#); - assert(Literal::byte_string(b"\\"), r#" b"\\" "#); - assert(Literal::byte_string(b"\x1f"), r#" b"\x1F" "#); - assert(Literal::byte_string(b"'"), r#" b"'" "#); - assert( - Literal::byte_string(b"a\00b\07c\08d\0e\0"), - r#" b"a\x000b\x007c\08d\0e\0" "#, - ); - - "b\"\\\r\n x\"".parse::<TokenStream>().unwrap(); - "b\"\\\r\n \rx\"".parse::<TokenStream>().unwrap_err(); - "b\"\\\r\n \u{a0}x\"".parse::<TokenStream>().unwrap_err(); - "br\"\u{a0}\"".parse::<TokenStream>().unwrap_err(); -} - -#[test] -fn literal_c_string() { - #[track_caller] - fn assert(literal: Literal, expected: &str) { - assert_eq!(literal.to_string(), expected.trim()); - } - - assert(Literal::c_string(<&CStr>::default()), r#" c"" "#); - assert( - Literal::c_string(CStr::from_bytes_with_nul(b"aA\0").unwrap()), - r#" c"aA" "#, - ); - assert( - Literal::c_string(CStr::from_bytes_with_nul(b"aA\0").unwrap()), - r#" c"aA" "#, - ); - assert( - Literal::c_string(CStr::from_bytes_with_nul(b"\t\0").unwrap()), - r#" c"\t" "#, - ); - assert( - Literal::c_string(CStr::from_bytes_with_nul(b"\xE2\x9D\xA4\0").unwrap()), - r#" c"❤" "#, - ); - assert( - Literal::c_string(CStr::from_bytes_with_nul(b"'\0").unwrap()), - r#" c"'" "#, - ); - assert( - Literal::c_string(CStr::from_bytes_with_nul(b"\"\0").unwrap()), - r#" c"\"" "#, - ); - assert( - Literal::c_string(CStr::from_bytes_with_nul(b"\x7F\xFF\xFE\xCC\xB3\0").unwrap()), - r#" c"\u{7f}\xFF\xFE\u{333}" "#, - ); - - let strings = r###" - c"hello\x80我叫\u{1F980}" // from the RFC - cr"\" - cr##"Hello "world"!"## - c"\t\n\r\"\\" - "###; - - let mut tokens = strings.parse::<TokenStream>().unwrap().into_iter(); - - for expected in &[ - r#"c"hello\x80我叫\u{1F980}""#, - r#"cr"\""#, - r###"cr##"Hello "world"!"##"###, - r#"c"\t\n\r\"\\""#, - ] { - match tokens.next().unwrap() { - TokenTree::Literal(literal) => { - assert_eq!(literal.to_string(), *expected); - } - unexpected => panic!("unexpected token: {:?}", unexpected), - } - } - - if let Some(unexpected) = tokens.next() { - panic!("unexpected token: {:?}", unexpected); - } - - for invalid in &[r#"c"\0""#, r#"c"\x00""#, r#"c"\u{0}""#, "c\"\0\""] { - if let Ok(unexpected) = invalid.parse::<TokenStream>() { - panic!("unexpected token: {:?}", unexpected); - } - } -} - -#[test] -fn literal_character() { - #[track_caller] - fn assert(literal: Literal, expected: &str) { - assert_eq!(literal.to_string(), expected.trim()); - } - - assert(Literal::character('a'), r#" 'a' "#); - assert(Literal::character('\t'), r#" '\t' "#); - assert(Literal::character('❤'), r#" '❤' "#); - assert(Literal::character('\''), r#" '\'' "#); - assert(Literal::character('"'), r#" '"' "#); - assert(Literal::character('\0'), r#" '\0' "#); - assert(Literal::character('\u{1}'), r#" '\u{1}' "#); -} - -#[test] -fn literal_integer() { - #[track_caller] - fn assert(literal: Literal, expected: &str) { - assert_eq!(literal.to_string(), expected); - } - - assert(Literal::u8_suffixed(10), "10u8"); - assert(Literal::u16_suffixed(10), "10u16"); - assert(Literal::u32_suffixed(10), "10u32"); - assert(Literal::u64_suffixed(10), "10u64"); - assert(Literal::u128_suffixed(10), "10u128"); - assert(Literal::usize_suffixed(10), "10usize"); - - assert(Literal::i8_suffixed(10), "10i8"); - assert(Literal::i16_suffixed(10), "10i16"); - assert(Literal::i32_suffixed(10), "10i32"); - assert(Literal::i64_suffixed(10), "10i64"); - assert(Literal::i128_suffixed(10), "10i128"); - assert(Literal::isize_suffixed(10), "10isize"); - - assert(Literal::u8_unsuffixed(10), "10"); - assert(Literal::u16_unsuffixed(10), "10"); - assert(Literal::u32_unsuffixed(10), "10"); - assert(Literal::u64_unsuffixed(10), "10"); - assert(Literal::u128_unsuffixed(10), "10"); - assert(Literal::usize_unsuffixed(10), "10"); - - assert(Literal::i8_unsuffixed(10), "10"); - assert(Literal::i16_unsuffixed(10), "10"); - assert(Literal::i32_unsuffixed(10), "10"); - assert(Literal::i64_unsuffixed(10), "10"); - assert(Literal::i128_unsuffixed(10), "10"); - assert(Literal::isize_unsuffixed(10), "10"); - - assert(Literal::i32_suffixed(-10), "-10i32"); - assert(Literal::i32_unsuffixed(-10), "-10"); -} - -#[test] -fn literal_float() { - #[track_caller] - fn assert(literal: Literal, expected: &str) { - assert_eq!(literal.to_string(), expected); - } - - assert(Literal::f32_suffixed(10.0), "10f32"); - assert(Literal::f32_suffixed(-10.0), "-10f32"); - assert(Literal::f64_suffixed(10.0), "10f64"); - assert(Literal::f64_suffixed(-10.0), "-10f64"); - - assert(Literal::f32_unsuffixed(10.0), "10.0"); - assert(Literal::f32_unsuffixed(-10.0), "-10.0"); - assert(Literal::f64_unsuffixed(10.0), "10.0"); - assert(Literal::f64_unsuffixed(-10.0), "-10.0"); - - assert( - Literal::f64_unsuffixed(1e100), - "10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.0", - ); -} - -#[test] -fn literal_suffix() { - fn token_count(p: &str) -> usize { - p.parse::<TokenStream>().unwrap().into_iter().count() - } - - assert_eq!(token_count("999u256"), 1); - assert_eq!(token_count("999r#u256"), 3); - assert_eq!(token_count("1."), 1); - assert_eq!(token_count("1.f32"), 3); - assert_eq!(token_count("1.0_0"), 1); - assert_eq!(token_count("1._0"), 3); - assert_eq!(token_count("1._m"), 3); - assert_eq!(token_count("\"\"s"), 1); - assert_eq!(token_count("r\"\"r"), 1); - assert_eq!(token_count("r#\"\"#r"), 1); - assert_eq!(token_count("b\"\"b"), 1); - assert_eq!(token_count("br\"\"br"), 1); - assert_eq!(token_count("br#\"\"#br"), 1); - assert_eq!(token_count("c\"\"c"), 1); - assert_eq!(token_count("cr\"\"cr"), 1); - assert_eq!(token_count("cr#\"\"#cr"), 1); - assert_eq!(token_count("'c'c"), 1); - assert_eq!(token_count("b'b'b"), 1); - assert_eq!(token_count("0E"), 1); - assert_eq!(token_count("0o0A"), 1); - assert_eq!(token_count("0E--0"), 4); - assert_eq!(token_count("0.0ECMA"), 1); -} - -#[test] -fn literal_iter_negative() { - let negative_literal = Literal::i32_suffixed(-3); - let tokens = TokenStream::from(TokenTree::Literal(negative_literal)); - let mut iter = tokens.into_iter(); - match iter.next().unwrap() { - TokenTree::Punct(punct) => { - assert_eq!(punct.as_char(), '-'); - assert_eq!(punct.spacing(), Spacing::Alone); - } - unexpected => panic!("unexpected token {:?}", unexpected), - } - match iter.next().unwrap() { - TokenTree::Literal(literal) => { - assert_eq!(literal.to_string(), "3i32"); - } - unexpected => panic!("unexpected token {:?}", unexpected), - } - assert!(iter.next().is_none()); -} - -#[test] -fn literal_parse() { - assert!("1".parse::<Literal>().is_ok()); - assert!("-1".parse::<Literal>().is_ok()); - assert!("-1u12".parse::<Literal>().is_ok()); - assert!("1.0".parse::<Literal>().is_ok()); - assert!("-1.0".parse::<Literal>().is_ok()); - assert!("-1.0f12".parse::<Literal>().is_ok()); - assert!("'a'".parse::<Literal>().is_ok()); - assert!("\"\n\"".parse::<Literal>().is_ok()); - assert!("0 1".parse::<Literal>().is_err()); - assert!(" 0".parse::<Literal>().is_err()); - assert!("0 ".parse::<Literal>().is_err()); - assert!("/* comment */0".parse::<Literal>().is_err()); - assert!("0/* comment */".parse::<Literal>().is_err()); - assert!("0// comment".parse::<Literal>().is_err()); - assert!("- 1".parse::<Literal>().is_err()); - assert!("- 1.0".parse::<Literal>().is_err()); - assert!("-\"\"".parse::<Literal>().is_err()); -} - -#[test] -fn literal_span() { - let positive = "0.1".parse::<Literal>().unwrap(); - let negative = "-0.1".parse::<Literal>().unwrap(); - let subspan = positive.subspan(1..2); - - #[cfg(not(span_locations))] - { - let _ = negative; - assert!(subspan.is_none()); - } - - #[cfg(span_locations)] - { - assert_eq!(positive.span().start().column, 0); - assert_eq!(positive.span().end().column, 3); - assert_eq!(negative.span().start().column, 0); - assert_eq!(negative.span().end().column, 4); - assert_eq!(subspan.unwrap().source_text().unwrap(), "."); - } - - assert!(positive.subspan(1..4).is_none()); -} - -#[cfg(span_locations)] -#[test] -fn source_text() { - let input = " 𓀕 a z "; - let mut tokens = input - .parse::<proc_macro2::TokenStream>() - .unwrap() - .into_iter(); - - let first = tokens.next().unwrap(); - assert_eq!("𓀕", first.span().source_text().unwrap()); - - let second = tokens.next().unwrap(); - let third = tokens.next().unwrap(); - assert_eq!("z", third.span().source_text().unwrap()); - assert_eq!("a", second.span().source_text().unwrap()); -} - -#[test] -fn roundtrip() { - fn roundtrip(p: &str) { - println!("parse: {}", p); - let s = p.parse::<TokenStream>().unwrap().to_string(); - println!("first: {}", s); - let s2 = s.parse::<TokenStream>().unwrap().to_string(); - assert_eq!(s, s2); - } - roundtrip("a"); - roundtrip("<<"); - roundtrip("<<="); - roundtrip( - " - 1 - 1.0 - 1f32 - 2f64 - 1usize - 4isize - 4e10 - 1_000 - 1_0i32 - 8u8 - 9 - 0 - 0xffffffffffffffffffffffffffffffff - 1x - 1u80 - 1f320 - ", - ); - roundtrip("'a"); - roundtrip("'_"); - roundtrip("'static"); - roundtrip(r"'\u{10__FFFF}'"); - roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\""); -} - -#[test] -fn fail() { - fn fail(p: &str) { - if let Ok(s) = p.parse::<TokenStream>() { - panic!("should have failed to parse: {}\n{:#?}", p, s); - } - } - fail("' static"); - fail("r#1"); - fail("r#_"); - fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits) - fail("\"\\u{999999}\""); // outside of valid range of char - fail("\"\\u{_0}\""); // leading underscore - fail("\"\\u{}\""); // empty - fail("b\"\r\""); // bare carriage return in byte string - fail("r\"\r\""); // bare carriage return in raw string - fail("\"\\\r \""); // backslash carriage return - fail("'aa'aa"); - fail("br##\"\"#"); - fail("cr##\"\"#"); - fail("\"\\\n\u{85}\r\""); -} - -#[cfg(span_locations)] -#[test] -fn span_test() { - check_spans( - "\ -/// This is a document comment -testing 123 -{ - testing 234 -}", - &[ - (1, 0, 1, 30), // # - (1, 0, 1, 30), // [ ... ] - (1, 0, 1, 30), // doc - (1, 0, 1, 30), // = - (1, 0, 1, 30), // "This is..." - (2, 0, 2, 7), // testing - (2, 8, 2, 11), // 123 - (3, 0, 5, 1), // { ... } - (4, 2, 4, 9), // testing - (4, 10, 4, 13), // 234 - ], - ); -} - -#[cfg(procmacro2_semver_exempt)] -#[test] -fn default_span() { - let start = Span::call_site().start(); - assert_eq!(start.line, 1); - assert_eq!(start.column, 0); - let end = Span::call_site().end(); - assert_eq!(end.line, 1); - assert_eq!(end.column, 0); - assert_eq!(Span::call_site().file(), "<unspecified>"); - assert!(Span::call_site().local_file().is_none()); -} - -#[cfg(procmacro2_semver_exempt)] -#[test] -fn span_join() { - let source1 = "aaa\nbbb" - .parse::<TokenStream>() - .unwrap() - .into_iter() - .collect::<Vec<_>>(); - let source2 = "ccc\nddd" - .parse::<TokenStream>() - .unwrap() - .into_iter() - .collect::<Vec<_>>(); - - assert!(source1[0].span().file() != source2[0].span().file()); - assert_eq!(source1[0].span().file(), source1[1].span().file()); - - let joined1 = source1[0].span().join(source1[1].span()); - let joined2 = source1[0].span().join(source2[0].span()); - assert!(joined1.is_some()); - assert!(joined2.is_none()); - - let start = joined1.unwrap().start(); - let end = joined1.unwrap().end(); - assert_eq!(start.line, 1); - assert_eq!(start.column, 0); - assert_eq!(end.line, 2); - assert_eq!(end.column, 3); - - assert_eq!(joined1.unwrap().file(), source1[0].span().file()); -} - -#[test] -fn no_panic() { - let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap(); - assert!(s.parse::<TokenStream>().is_err()); -} - -#[test] -fn punct_before_comment() { - let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter(); - match tts.next().unwrap() { - TokenTree::Punct(tt) => { - assert_eq!(tt.as_char(), '~'); - assert_eq!(tt.spacing(), Spacing::Alone); - } - wrong => panic!("wrong token {:?}", wrong), - } -} - -#[test] -fn joint_last_token() { - // This test verifies that we match the behavior of libproc_macro *not* in - // the range nightly-2020-09-06 through nightly-2020-09-10, in which this - // behavior was temporarily broken. - // See https://github.com/rust-lang/rust/issues/76399 - - let joint_punct = Punct::new(':', Spacing::Joint); - let stream = TokenStream::from(TokenTree::Punct(joint_punct)); - let punct = match stream.into_iter().next().unwrap() { - TokenTree::Punct(punct) => punct, - _ => unreachable!(), - }; - assert_eq!(punct.spacing(), Spacing::Joint); -} - -#[test] -fn raw_identifier() { - let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter(); - match tts.next().unwrap() { - TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()), - wrong => panic!("wrong token {:?}", wrong), - } - assert!(tts.next().is_none()); -} - -#[test] -fn test_debug_ident() { - let ident = Ident::new("proc_macro", Span::call_site()); - - #[cfg(not(span_locations))] - let expected = "Ident(proc_macro)"; - - #[cfg(span_locations)] - let expected = "Ident { sym: proc_macro }"; - - assert_eq!(expected, format!("{:?}", ident)); -} - -#[test] -fn test_debug_tokenstream() { - let tts = TokenStream::from_str("[a + 1]").unwrap(); - - #[cfg(not(span_locations))] - let expected = "\ -TokenStream [ - Group { - delimiter: Bracket, - stream: TokenStream [ - Ident { - sym: a, - }, - Punct { - char: '+', - spacing: Alone, - }, - Literal { - lit: 1, - }, - ], - }, -]\ - "; - - #[cfg(not(span_locations))] - let expected_before_trailing_commas = "\ -TokenStream [ - Group { - delimiter: Bracket, - stream: TokenStream [ - Ident { - sym: a - }, - Punct { - char: '+', - spacing: Alone - }, - Literal { - lit: 1 - } - ] - } -]\ - "; - - #[cfg(span_locations)] - let expected = "\ -TokenStream [ - Group { - delimiter: Bracket, - stream: TokenStream [ - Ident { - sym: a, - span: bytes(2..3), - }, - Punct { - char: '+', - spacing: Alone, - span: bytes(4..5), - }, - Literal { - lit: 1, - span: bytes(6..7), - }, - ], - span: bytes(1..8), - }, -]\ - "; - - #[cfg(span_locations)] - let expected_before_trailing_commas = "\ -TokenStream [ - Group { - delimiter: Bracket, - stream: TokenStream [ - Ident { - sym: a, - span: bytes(2..3) - }, - Punct { - char: '+', - spacing: Alone, - span: bytes(4..5) - }, - Literal { - lit: 1, - span: bytes(6..7) - } - ], - span: bytes(1..8) - } -]\ - "; - - let actual = format!("{:#?}", tts); - if actual.ends_with(",\n]") { - assert_eq!(expected, actual); - } else { - assert_eq!(expected_before_trailing_commas, actual); - } -} - -#[test] -fn default_tokenstream_is_empty() { - let default_token_stream = <TokenStream as Default>::default(); - - assert!(default_token_stream.is_empty()); -} - -#[test] -fn tokenstream_size_hint() { - let tokens = "a b (c d) e".parse::<TokenStream>().unwrap(); - - assert_eq!(tokens.into_iter().size_hint(), (4, Some(4))); -} - -#[test] -fn tuple_indexing() { - // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322 - let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter(); - assert_eq!("tuple", tokens.next().unwrap().to_string()); - assert_eq!(".", tokens.next().unwrap().to_string()); - assert_eq!("0.0", tokens.next().unwrap().to_string()); - assert!(tokens.next().is_none()); -} - -#[cfg(span_locations)] -#[test] -fn non_ascii_tokens() { - check_spans("// abc", &[]); - check_spans("// ábc", &[]); - check_spans("// abc x", &[]); - check_spans("// ábc x", &[]); - check_spans("/* abc */ x", &[(1, 10, 1, 11)]); - check_spans("/* ábc */ x", &[(1, 10, 1, 11)]); - check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]); - check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]); - check_spans("/*** abc */ x", &[(1, 12, 1, 13)]); - check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]); - check_spans(r#""abc""#, &[(1, 0, 1, 5)]); - check_spans(r#""ábc""#, &[(1, 0, 1, 5)]); - check_spans(r##"r#"abc"#"##, &[(1, 0, 1, 8)]); - check_spans(r##"r#"ábc"#"##, &[(1, 0, 1, 8)]); - check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]); - check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]); - check_spans("'a'", &[(1, 0, 1, 3)]); - check_spans("'á'", &[(1, 0, 1, 3)]); - check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); - check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); - check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); - check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]); - check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); - check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]); - check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); - check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]); - check_spans("abc", &[(1, 0, 1, 3)]); - check_spans("ábc", &[(1, 0, 1, 3)]); - check_spans("ábć", &[(1, 0, 1, 3)]); - check_spans("abc// foo", &[(1, 0, 1, 3)]); - check_spans("ábc// foo", &[(1, 0, 1, 3)]); - check_spans("ábć// foo", &[(1, 0, 1, 3)]); - check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]); -} - -#[cfg(span_locations)] -fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) { - let ts = p.parse::<TokenStream>().unwrap(); - check_spans_internal(ts, &mut lines); - assert!(lines.is_empty(), "leftover ranges: {:?}", lines); -} - -#[cfg(span_locations)] -fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) { - for i in ts { - if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() { - *lines = rest; - - let start = i.span().start(); - assert_eq!(start.line, sline, "sline did not match for {}", i); - assert_eq!(start.column, scol, "scol did not match for {}", i); - - let end = i.span().end(); - assert_eq!(end.line, eline, "eline did not match for {}", i); - assert_eq!(end.column, ecol, "ecol did not match for {}", i); - - if let TokenTree::Group(g) = i { - check_spans_internal(g.stream().clone(), lines); - } - } - } -} - -#[test] -fn whitespace() { - // space, horizontal tab, vertical tab, form feed, carriage return, line - // feed, non-breaking space, left-to-right mark, right-to-left mark - let various_spaces = " \t\u{b}\u{c}\r\n\u{a0}\u{200e}\u{200f}"; - let tokens = various_spaces.parse::<TokenStream>().unwrap(); - assert_eq!(tokens.into_iter().count(), 0); - - let lone_carriage_returns = " \r \r\r\n "; - lone_carriage_returns.parse::<TokenStream>().unwrap(); -} - -#[test] -fn byte_order_mark() { - let string = "\u{feff}foo"; - let tokens = string.parse::<TokenStream>().unwrap(); - match tokens.into_iter().next().unwrap() { - TokenTree::Ident(ident) => assert_eq!(ident, "foo"), - _ => unreachable!(), - } - - let string = "foo\u{feff}"; - string.parse::<TokenStream>().unwrap_err(); -} - -#[cfg(span_locations)] -fn create_span() -> proc_macro2::Span { - let tts: TokenStream = "1".parse().unwrap(); - match tts.into_iter().next().unwrap() { - TokenTree::Literal(literal) => literal.span(), - _ => unreachable!(), - } -} - -#[cfg(span_locations)] -#[test] -fn test_invalidate_current_thread_spans() { - let actual = format!("{:#?}", create_span()); - assert_eq!(actual, "bytes(1..2)"); - let actual = format!("{:#?}", create_span()); - assert_eq!(actual, "bytes(3..4)"); - - proc_macro2::extra::invalidate_current_thread_spans(); - - let actual = format!("{:#?}", create_span()); - // Test that span offsets have been reset after the call - // to invalidate_current_thread_spans() - assert_eq!(actual, "bytes(1..2)"); -} - -#[cfg(span_locations)] -#[test] -#[should_panic(expected = "Invalid span with no related FileInfo!")] -fn test_use_span_after_invalidation() { - let span = create_span(); - - proc_macro2::extra::invalidate_current_thread_spans(); - - span.source_text(); -} diff --git a/vendor/proc-macro2/tests/test_fmt.rs b/vendor/proc-macro2/tests/test_fmt.rs deleted file mode 100644 index 86a4c387..00000000 --- a/vendor/proc-macro2/tests/test_fmt.rs +++ /dev/null @@ -1,28 +0,0 @@ -#![allow(clippy::from_iter_instead_of_collect)] - -use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; -use std::iter; - -#[test] -fn test_fmt_group() { - let ident = Ident::new("x", Span::call_site()); - let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident))); - let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new()); - let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone()); - let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new()); - let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone()); - let braces_empty = Group::new(Delimiter::Brace, TokenStream::new()); - let braces_nonempty = Group::new(Delimiter::Brace, inner.clone()); - let none_empty = Group::new(Delimiter::None, TokenStream::new()); - let none_nonempty = Group::new(Delimiter::None, inner); - - // Matches libproc_macro. - assert_eq!("()", parens_empty.to_string()); - assert_eq!("(x)", parens_nonempty.to_string()); - assert_eq!("[]", brackets_empty.to_string()); - assert_eq!("[x]", brackets_nonempty.to_string()); - assert_eq!("{ }", braces_empty.to_string()); - assert_eq!("{ x }", braces_nonempty.to_string()); - assert_eq!("", none_empty.to_string()); - assert_eq!("x", none_nonempty.to_string()); -} diff --git a/vendor/proc-macro2/tests/test_size.rs b/vendor/proc-macro2/tests/test_size.rs deleted file mode 100644 index 8b679151..00000000 --- a/vendor/proc-macro2/tests/test_size.rs +++ /dev/null @@ -1,81 +0,0 @@ -#![allow(unused_attributes)] - -extern crate proc_macro; - -use std::mem; - -#[rustversion::attr(before(1.64), ignore = "requires Rust 1.64+")] -#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] -#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] -#[test] -fn test_proc_macro_size() { - assert_eq!(mem::size_of::<proc_macro::Span>(), 4); - assert_eq!(mem::size_of::<Option<proc_macro::Span>>(), 4); - assert_eq!(mem::size_of::<proc_macro::Group>(), 20); - assert_eq!(mem::size_of::<proc_macro::Ident>(), 12); - assert_eq!(mem::size_of::<proc_macro::Punct>(), 8); - assert_eq!(mem::size_of::<proc_macro::Literal>(), 16); - assert_eq!(mem::size_of::<proc_macro::TokenStream>(), 4); -} - -#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] -#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] -#[cfg_attr(wrap_proc_macro, ignore = "wrapper mode")] -#[cfg_attr(span_locations, ignore = "span locations are on")] -#[test] -fn test_proc_macro2_fallback_size_without_locations() { - assert_eq!(mem::size_of::<proc_macro2::Span>(), 0); - assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 1); - assert_eq!(mem::size_of::<proc_macro2::Group>(), 16); - assert_eq!(mem::size_of::<proc_macro2::Ident>(), 24); - assert_eq!(mem::size_of::<proc_macro2::Punct>(), 8); - assert_eq!(mem::size_of::<proc_macro2::Literal>(), 24); - assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 8); -} - -#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] -#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] -#[cfg_attr(wrap_proc_macro, ignore = "wrapper mode")] -#[cfg_attr(not(span_locations), ignore = "span locations are off")] -#[test] -fn test_proc_macro2_fallback_size_with_locations() { - assert_eq!(mem::size_of::<proc_macro2::Span>(), 8); - assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12); - assert_eq!(mem::size_of::<proc_macro2::Group>(), 24); - assert_eq!(mem::size_of::<proc_macro2::Ident>(), 32); - assert_eq!(mem::size_of::<proc_macro2::Punct>(), 16); - assert_eq!(mem::size_of::<proc_macro2::Literal>(), 32); - assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 8); -} - -#[rustversion::attr(before(1.71), ignore = "requires Rust 1.71+")] -#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] -#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] -#[cfg_attr(not(wrap_proc_macro), ignore = "fallback mode")] -#[cfg_attr(span_locations, ignore = "span locations are on")] -#[test] -fn test_proc_macro2_wrapper_size_without_locations() { - assert_eq!(mem::size_of::<proc_macro2::Span>(), 4); - assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 8); - assert_eq!(mem::size_of::<proc_macro2::Group>(), 24); - assert_eq!(mem::size_of::<proc_macro2::Ident>(), 24); - assert_eq!(mem::size_of::<proc_macro2::Punct>(), 12); - assert_eq!(mem::size_of::<proc_macro2::Literal>(), 24); - assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 32); -} - -#[rustversion::attr(before(1.65), ignore = "requires Rust 1.65+")] -#[cfg_attr(not(target_pointer_width = "64"), ignore = "only applicable to 64-bit")] -#[cfg_attr(randomize_layout, ignore = "disabled due to randomized layout")] -#[cfg_attr(not(wrap_proc_macro), ignore = "fallback mode")] -#[cfg_attr(not(span_locations), ignore = "span locations are off")] -#[test] -fn test_proc_macro2_wrapper_size_with_locations() { - assert_eq!(mem::size_of::<proc_macro2::Span>(), 12); - assert_eq!(mem::size_of::<Option<proc_macro2::Span>>(), 12); - assert_eq!(mem::size_of::<proc_macro2::Group>(), 32); - assert_eq!(mem::size_of::<proc_macro2::Ident>(), 32); - assert_eq!(mem::size_of::<proc_macro2::Punct>(), 20); - assert_eq!(mem::size_of::<proc_macro2::Literal>(), 32); - assert_eq!(mem::size_of::<proc_macro2::TokenStream>(), 32); -} |
