!5 回退 'Pull Request !4 : syn 1.0.107升级至2.0.48'

Merge pull request !5 from openharmony_ci/revert-merge-4-master
This commit is contained in:
openharmony_ci 2024-04-01 12:17:24 +00:00 committed by Gitee
commit b762ffaa9c
No known key found for this signature in database
GPG Key ID: 173E9B9CA92EEF8F
137 changed files with 14998 additions and 16166 deletions

1
.clippy.toml Normal file
View File

@ -0,0 +1 @@
msrv = "1.31.0"

View File

@ -3,7 +3,6 @@ name: CI
on:
push:
pull_request:
workflow_dispatch:
schedule: [cron: "40 1 * * *"]
permissions:
@ -13,31 +12,24 @@ env:
RUSTFLAGS: -Dwarnings
jobs:
pre_ci:
uses: dtolnay/.github/.github/workflows/pre_ci.yml@master
test:
name: Tests
needs: pre_ci
if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
with:
components: llvm-tools, rustc-dev
components: rustc-dev
- run: cargo test --all-features --release --tests
build:
name: ${{matrix.name || format('Rust {0}', matrix.rust)}}
needs: pre_ci
if: needs.pre_ci.outputs.continue
runs-on: ${{matrix.os || 'ubuntu'}}-latest
strategy:
fail-fast: false
matrix:
rust: [stable, beta, 1.56.0]
rust: [stable, beta, 1.31.0]
include:
- rust: nightly
components: rustc-dev
@ -54,7 +46,7 @@ jobs:
target: ${{matrix.target && format('--target={0}', matrix.target)}}
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{matrix.rust}}
@ -80,80 +72,65 @@ jobs:
examples:
name: Examples
needs: pre_ci
if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
- run: cargo check --manifest-path examples/dump-syntax/Cargo.toml
- run: cargo check --manifest-path examples/heapsize/example/Cargo.toml
- run: cargo check --manifest-path examples/lazy-static/example/Cargo.toml
- run: cargo check --manifest-path examples/trace-var/example/Cargo.toml
doc:
name: Documentation
needs: pre_ci
if: needs.pre_ci.outputs.continue
docs:
name: Docs
runs-on: ubuntu-latest
timeout-minutes: 45
env:
RUSTDOCFLAGS: -Dwarnings
RUSTDOCFLAGS: --cfg=doc_cfg -Dbroken_intra_doc_links
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
- uses: dtolnay/install@cargo-docs-rs
- run: cargo docs-rs
- run: cargo docs-rs --manifest-path json/Cargo.toml
- run: cargo test --all-features --doc
- run: cargo doc --all-features
codegen:
name: Codegen
needs: pre_ci
if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@stable
- run: cargo run --manifest-path codegen/Cargo.toml
- run: cd codegen && cargo run
- run: git diff --exit-code
minimal:
msrv:
name: Minimal versions
needs: pre_ci
if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
- run: cargo generate-lockfile -Z minimal-versions
- run: cargo check --all-features --locked
- run: cargo update -Z minimal-versions
- run: cargo check --all-features
fuzz:
name: Fuzz
needs: pre_ci
if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
- uses: dtolnay/install@cargo-fuzz
- run: cargo fuzz check
- run: cargo fuzz build -O
miri:
name: Miri
needs: pre_ci
if: needs.pre_ci.outputs.continue
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@miri
- run: cargo miri setup
- run: cargo miri test --all-features
env:
MIRIFLAGS: -Zmiri-strict-provenance
@ -164,12 +141,11 @@ jobs:
if: github.event_name != 'pull_request'
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@nightly
with:
components: clippy,rustc-dev
- run: cargo clippy --all-features --tests --benches -- -Dclippy::all -Dclippy::pedantic
- run: cargo clippy --manifest-path codegen/Cargo.toml -- -Dclippy::all -Dclippy::pedantic
outdated:
name: Outdated
@ -177,7 +153,7 @@ jobs:
if: github.event_name != 'pull_request'
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- uses: dtolnay/install@cargo-outdated
- run: cargo outdated --workspace --exit-code 1
- run: cargo outdated --manifest-path fuzz/Cargo.toml --exit-code 1

View File

@ -19,8 +19,8 @@ ohos_cargo_crate("lib") {
crate_root = "src/lib.rs"
sources = [ "src/lib.rs" ]
edition = "2021"
cargo_pkg_version = "2.0.48"
edition = "2018"
cargo_pkg_version = "1.0.107"
cargo_pkg_authors = "David Tolnay <dtolnay@gmail.com>"
cargo_pkg_name = "syn"
cargo_pkg_description = "Parser for Rust source code"

View File

@ -1,13 +1,14 @@
[package]
name = "syn"
version = "2.0.48" # don't forget to update html_root_url and syn.json
version = "1.0.107" # don't forget to update html_root_url and syn.json
authors = ["David Tolnay <dtolnay@gmail.com>"]
categories = ["development-tools::procedural-macro-helpers", "parser-implementations"]
description = "Parser for Rust source code"
documentation = "https://docs.rs/syn"
edition = "2021"
edition = "2018"
include = [
"/benches/**",
"/build.rs",
"/Cargo.toml",
"/LICENSE-APACHE",
"/LICENSE-MIT",
@ -18,7 +19,7 @@ include = [
keywords = ["macros", "syn"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/syn"
rust-version = "1.56"
rust-version = "1.31"
[features]
default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"]
@ -35,23 +36,23 @@ proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
test = ["syn-test-suite/all-features"]
[dependencies]
proc-macro2 = { version = "1.0.75", default-features = false }
quote = { version = "1.0.35", optional = true, default-features = false }
unicode-ident = "1"
proc-macro2 = { version = "1.0.46", default-features = false }
quote = { version = "1.0", optional = true, default-features = false }
unicode-ident = "1.0"
[dev-dependencies]
anyhow = "1"
automod = "1"
flate2 = "1"
insta = "1"
rayon = "1"
ref-cast = "1"
anyhow = "1.0"
automod = "1.0"
flate2 = "1.0"
insta = "1.0"
rayon = "1.0"
ref-cast = "1.0"
regex = "1.0"
reqwest = { version = "0.11", features = ["blocking"] }
rustversion = "1"
syn-test-suite = { version = "0", path = "tests/features" }
tar = "0.4.16"
termcolor = "1"
walkdir = "2.3.2"
termcolor = "1.0"
walkdir = "2.1"
[lib]
doc-scrape-examples = false
@ -68,7 +69,7 @@ required-features = ["full", "parsing"]
[package.metadata.docs.rs]
all-features = true
targets = ["x86_64-unknown-linux-gnu"]
rustdoc-args = ["--cfg", "doc_cfg", "--generate-link-to-definition"]
rustdoc-args = ["--cfg", "doc_cfg"]
[package.metadata.playground]
features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
@ -84,5 +85,7 @@ members = [
"examples/lazy-static/lazy-static",
"examples/trace-var/example",
"examples/trace-var/trace-var",
"json",
"tests/crates",
"tests/features",
]

View File

@ -174,3 +174,28 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -3,7 +3,7 @@
"Name": "syn",
"License": "Apache License V2.0, MIT",
"License File": "LICENSE-APACHE, LICENSE-MIT",
"Version Number": "2.0.48",
"Version Number": "1.0.107",
"Owner": "fangting12@huawei.com",
"Upstream URL": "https://github.com/dtolnay/syn",
"Description": "A Rust library that provides support for parsing Rust code."

View File

@ -39,14 +39,14 @@ contains some APIs that may be useful more generally.
procedural macros enable only what they need, and do not pay in compile time
for all the rest.
[`syn::File`]: https://docs.rs/syn/2.0/syn/struct.File.html
[`syn::Item`]: https://docs.rs/syn/2.0/syn/enum.Item.html
[`syn::Expr`]: https://docs.rs/syn/2.0/syn/enum.Expr.html
[`syn::Type`]: https://docs.rs/syn/2.0/syn/enum.Type.html
[`syn::DeriveInput`]: https://docs.rs/syn/2.0/syn/struct.DeriveInput.html
[parser functions]: https://docs.rs/syn/2.0/syn/parse/index.html
[`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html
[`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html
[`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html
[`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html
[`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
[parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
*Version requirement: Syn supports rustc 1.56 and up.*
*Version requirement: Syn supports rustc 1.31 and up.*
[*Release notes*](https://github.com/dtolnay/syn/releases)
@ -76,7 +76,7 @@ tokens back to the compiler to compile into the user's crate.
```toml
[dependencies]
syn = "2.0"
syn = "1.0"
quote = "1.0"
[lib]
@ -104,8 +104,9 @@ pub fn my_macro(input: TokenStream) -> TokenStream {
```
The [`heapsize`] example directory shows a complete working implementation of a
derive macro. The example derives a `HeapSize` trait which computes an estimate
of the amount of heap memory owned by a value.
derive macro. It works on any Rust compiler 1.31+. The example derives a
`HeapSize` trait which computes an estimate of the amount of heap memory owned
by a value.
[`heapsize`]: examples/heapsize

View File

@ -4,11 +4,8 @@
#![recursion_limit = "1024"]
#![allow(
clippy::items_after_statements,
clippy::manual_let_else,
clippy::match_like_matches_macro,
clippy::missing_panics_doc,
clippy::must_use_candidate,
clippy::uninlined_format_args
clippy::must_use_candidate
)]
extern crate test;
@ -17,9 +14,10 @@ extern crate test;
#[path = "../tests/macros/mod.rs"]
mod macros;
#[allow(dead_code)]
#[path = "../tests/common/mod.rs"]
mod common;
#[path = "../tests/repo/mod.rs"]
mod repo;
pub mod repo;
use proc_macro2::{Span, TokenStream};
use std::fs;

View File

@ -5,21 +5,14 @@
#![cfg_attr(not(syn_only), feature(rustc_private))]
#![recursion_limit = "1024"]
#![allow(
clippy::arc_with_non_send_sync,
clippy::cast_lossless,
clippy::let_underscore_untyped,
clippy::manual_let_else,
clippy::match_like_matches_macro,
clippy::uninlined_format_args,
clippy::unnecessary_wraps
)]
#![allow(clippy::cast_lossless, clippy::unnecessary_wraps)]
#[macro_use]
#[path = "../tests/macros/mod.rs"]
mod macros;
#[allow(dead_code)]
#[path = "../tests/common/mod.rs"]
mod common;
#[path = "../tests/repo/mod.rs"]
mod repo;
@ -45,7 +38,6 @@ mod syn_parse {
#[cfg(not(syn_only))]
mod librustc_parse {
extern crate rustc_data_structures;
extern crate rustc_driver;
extern crate rustc_error_messages;
extern crate rustc_errors;
extern crate rustc_parse;
@ -54,7 +46,7 @@ mod librustc_parse {
use rustc_data_structures::sync::Lrc;
use rustc_error_messages::FluentBundle;
use rustc_errors::{emitter::Emitter, translation::Translate, DiagCtxt, Diagnostic};
use rustc_errors::{emitter::Emitter, translation::Translate, Diagnostic, Handler};
use rustc_session::parse::ParseSess;
use rustc_span::source_map::{FilePathMapping, SourceMap};
use rustc_span::{edition::Edition, FileName};
@ -79,10 +71,10 @@ mod librustc_parse {
}
rustc_span::create_session_if_not_set_then(Edition::Edition2018, |_| {
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let emitter = Box::new(SilentEmitter);
let handler = DiagCtxt::with_emitter(emitter);
let sess = ParseSess::with_dcx(handler, source_map);
let handler = Handler::with_emitter(false, None, emitter);
let sess = ParseSess::with_span_handler(handler, cm);
if let Err(diagnostic) = rustc_parse::parse_crate_from_source_str(
FileName::Custom("bench".to_owned()),
content.to_owned(),
@ -99,7 +91,7 @@ mod librustc_parse {
#[cfg(not(syn_only))]
mod read_from_disk {
pub fn bench(content: &str) -> Result<(), ()> {
let _ = content;
_ = content;
Ok(())
}
}
@ -109,13 +101,9 @@ fn exec(mut codepath: impl FnMut(&str) -> Result<(), ()>) -> Duration {
let mut success = 0;
let mut total = 0;
["tests/rust/compiler", "tests/rust/library"]
.iter()
.flat_map(|dir| {
walkdir::WalkDir::new(dir)
.into_iter()
.filter_entry(repo::base_dir_filter)
})
walkdir::WalkDir::new("tests/rust/src")
.into_iter()
.filter_entry(repo::base_dir_filter)
.for_each(|entry| {
let entry = entry.unwrap();
let path = entry.path();

102
build.rs
View File

@ -1,81 +1,51 @@
use std::env;
use std::ffi::OsString;
use std::process::{self, Command, Stdio};
use std::process::Command;
use std::str;
// The rustc-cfg strings below are *not* public API. Please let us know by
// opening a GitHub issue if your build environment requires some way to enable
// these cfgs other than by executing our build script.
fn main() {
println!("cargo:rerun-if-changed=build.rs");
let compiler = match rustc_version() {
Some(compiler) => compiler,
None => return,
};
// Note: add "/build.rs" to package.include in Cargo.toml if adding any
// conditional compilation within the library.
if compiler.minor < 36 {
println!("cargo:rustc-cfg=syn_omit_await_from_token_macro");
}
if !unstable() {
if compiler.minor < 39 {
println!("cargo:rustc-cfg=syn_no_const_vec_new");
}
if compiler.minor < 40 {
println!("cargo:rustc-cfg=syn_no_non_exhaustive");
}
if compiler.minor < 56 {
println!("cargo:rustc-cfg=syn_no_negative_literal_parse");
}
if !compiler.nightly {
println!("cargo:rustc-cfg=syn_disable_nightly_tests");
}
}
fn unstable() -> bool {
let rustc = cargo_env_var("RUSTC");
// Pick up Cargo rustc configuration.
let mut cmd = if let Some(wrapper) = env::var_os("RUSTC_WRAPPER") {
let mut cmd = Command::new(wrapper);
// The wrapper's first argument is supposed to be the path to rustc.
cmd.arg(rustc);
cmd
} else {
Command::new(rustc)
};
cmd.stdin(Stdio::null());
cmd.stdout(Stdio::null());
cmd.stderr(Stdio::null());
cmd.arg("-");
// Find out whether this is a nightly or dev build.
cmd.env_remove("RUSTC_BOOTSTRAP");
cmd.arg("-Zcrate-attr=feature(rustc_private)");
// Pass `-Zunpretty` to terminate earlier without writing out any "emit"
// files. Use `expanded` to proceed far enough to actually apply crate
// attrs. With `unpretty=normal` or `--print`, not enough compilation
// happens to recognize that the feature attribute is unstable.
cmd.arg("-Zunpretty=expanded");
// Set #![no_std] to bypass loading libstd.rlib. This is a 7.5% speedup.
cmd.arg("-Zcrate-attr=no_std");
cmd.arg("--crate-type=lib");
cmd.arg("--edition=2021");
if let Some(target) = env::var_os("TARGET") {
cmd.arg("--target").arg(target);
}
// If Cargo wants to set RUSTFLAGS, use that.
if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") {
if !rustflags.is_empty() {
for arg in rustflags.split('\x1f') {
cmd.arg(arg);
}
}
}
// This rustc invocation should take around 0.03 seconds.
match cmd.status() {
Ok(status) => status.success(),
Err(_) => false,
}
struct Compiler {
minor: u32,
nightly: bool,
}
fn cargo_env_var(key: &str) -> OsString {
env::var_os(key).unwrap_or_else(|| {
eprintln!(
"Environment variable ${} is not set during execution of build script",
key,
);
process::exit(1);
})
fn rustc_version() -> Option<Compiler> {
let rustc = env::var_os("RUSTC")?;
let output = Command::new(rustc).arg("--version").output().ok()?;
let version = str::from_utf8(&output.stdout).ok()?;
let mut pieces = version.split('.');
if pieces.next() != Some("rustc 1") {
return None;
}
let minor = pieces.next()?.parse().ok()?;
let nightly = version.contains("nightly") || version.ends_with("-dev");
Some(Compiler { minor, nightly })
}

View File

@ -2,25 +2,24 @@
name = "syn-internal-codegen"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>", "Nika Layzell <nika@thelayzells.com>"]
edition = "2021"
edition = "2018"
publish = false # this is an internal crate which should never be published
[dependencies]
anyhow = "1"
anyhow = "1.0"
color-backtrace = "0.4"
indexmap = { version = "2", features = ["serde"] }
indexmap = { version = "1.0", features = ["serde-1"] }
inflections = "1.1"
prettyplease = "0.2.3"
prettyplease = "0.1"
proc-macro2 = { version = "1.0.20", features = ["span-locations"] }
quote = "1"
semver = { version = "1", features = ["serde"] }
serde = "1.0.88"
serde_derive = "1.0.88"
quote = "1.0"
semver = { version = "1.0", features = ["serde"] }
serde = { version = "1.0.88", features = ["derive"] }
serde_json = "1.0.38"
syn = { version = "2", features = ["derive", "full", "parsing", "printing"], default-features = false }
syn-codegen = { path = "../json" }
thiserror = "1"
syn = { version = "1.0", features = ["derive", "parsing", "printing", "full"], default-features = false }
thiserror = "1.0"
toml = "0.5"
[workspace]

View File

@ -2,28 +2,11 @@ use proc_macro2::TokenStream;
use quote::quote;
use syn_codegen::Features;
pub fn features<'a>(
features: &Features,
overriding_cfg: impl Into<Option<&'a str>>,
) -> TokenStream {
pub fn features(features: &Features) -> TokenStream {
let features = &features.any;
let cfg = match features.len() {
0 => None,
1 => Some(quote! { cfg(feature = #(#features)*) }),
_ => Some(quote! { cfg(any(#(feature = #features),*)) }),
};
match (cfg, overriding_cfg.into()) {
(Some(cfg), Some(overriding_cfg)) => quote! {
#[#cfg]
#[cfg_attr(doc_cfg, doc(cfg(feature = #overriding_cfg)))]
},
(Some(cfg), None) => quote! {
#[#cfg]
#[cfg_attr(doc_cfg, doc(#cfg))]
},
(None, Some(overriding_cfg)) => quote! {
#[cfg_attr(doc_cfg, doc(cfg(feature = #overriding_cfg)))]
},
(None, None) => TokenStream::new(),
match features.len() {
0 => quote!(),
1 => quote!(#[cfg(feature = #(#features)*)]),
_ => quote!(#[cfg(any(#(feature = #features),*))]),
}
}

View File

@ -4,14 +4,13 @@ use proc_macro2::{Ident, Span, TokenStream};
use quote::{format_ident, quote};
use syn_codegen::{Data, Definitions, Node, Type};
const CLONE_SRC: &str = "src/gen/clone.rs";
const DEBUG_SRC: &str = "../src/gen/clone.rs";
fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream {
let type_name = &node.ident;
let ident = Ident::new(type_name, Span::call_site());
match &node.data {
Data::Enum(variants) if variants.is_empty() => quote!(match *self {}),
Data::Enum(variants) => {
let arms = variants.iter().map(|(variant_name, fields)| {
let variant = Ident::new(variant_name, Span::call_site());
@ -41,13 +40,18 @@ fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream {
}
}
});
let nonexhaustive = if node.ident == "Expr" {
let nonexhaustive = if node.exhaustive {
None
} else if node.ident == "Expr" {
Some(quote! {
#[cfg(not(feature = "full"))]
#[cfg(any(syn_no_non_exhaustive, not(feature = "full")))]
_ => unreachable!(),
})
} else {
None
Some(quote! {
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
})
};
quote! {
match self {
@ -76,7 +80,7 @@ fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream {
}
let ident = Ident::new(&node.ident, Span::call_site());
let cfg_features = cfg::features(&node.features, "clone-impls");
let cfg_features = cfg::features(&node.features);
let copy = node.ident == "AttrStyle"
|| node.ident == "BinOp"
@ -86,8 +90,10 @@ fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream {
if copy {
return quote! {
#cfg_features
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Copy for #ident {}
#cfg_features
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for #ident {
fn clone(&self) -> Self {
*self
@ -100,6 +106,7 @@ fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream {
quote! {
#cfg_features
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for #ident {
fn clone(&self) -> Self {
#body
@ -115,7 +122,7 @@ pub fn generate(defs: &Definitions) -> Result<()> {
}
file::write(
CLONE_SRC,
DEBUG_SRC,
quote! {
#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]

View File

@ -2,57 +2,16 @@ use crate::{cfg, file, lookup};
use anyhow::Result;
use proc_macro2::{Ident, Span, TokenStream};
use quote::{format_ident, quote};
use std::collections::BTreeSet as Set;
use syn_codegen::{Data, Definitions, Node, Type};
const DEBUG_SRC: &str = "src/gen/debug.rs";
const DEBUG_SRC: &str = "../src/gen/debug.rs";
fn syntax_tree_enum<'a>(
enum_name: &str,
variant_name: &str,
fields: &'a [Type],
) -> Option<&'a str> {
if fields.len() != 1 {
return None;
}
const WHITELIST: &[(&str, &str)] = &[
("Meta", "Path"),
("Pat", "Const"),
("Pat", "Lit"),
("Pat", "Macro"),
("Pat", "Path"),
("Pat", "Range"),
("PathArguments", "AngleBracketed"),
("PathArguments", "Parenthesized"),
("Stmt", "Local"),
("TypeParamBound", "Lifetime"),
("Visibility", "Public"),
("Visibility", "Restricted"),
];
match &fields[0] {
Type::Syn(ty)
if WHITELIST.contains(&(enum_name, variant_name))
|| enum_name.to_owned() + variant_name == *ty =>
{
Some(ty)
}
_ => None,
}
}
fn expand_impl_body(
defs: &Definitions,
node: &Node,
syntax_tree_variants: &Set<&str>,
) -> TokenStream {
fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream {
let type_name = &node.ident;
let ident = Ident::new(type_name, Span::call_site());
let is_syntax_tree_variant = syntax_tree_variants.contains(type_name.as_str());
let body = match &node.data {
Data::Enum(variants) if variants.is_empty() => quote!(match *self {}),
match &node.data {
Data::Enum(variants) => {
assert!(!is_syntax_tree_variant);
let arms = variants.iter().map(|(variant_name, fields)| {
let variant = Ident::new(variant_name, Span::call_site());
if fields.is_empty() {
@ -60,6 +19,9 @@ fn expand_impl_body(
#ident::#variant => formatter.write_str(#variant_name),
}
} else {
let pats = (0..fields.len())
.map(|i| format_ident!("v{}", i))
.collect::<Vec<_>>();
let mut cfg = None;
if node.ident == "Expr" {
if let Type::Syn(ty) = &fields[0] {
@ -68,37 +30,30 @@ fn expand_impl_body(
}
}
}
if syntax_tree_enum(type_name, variant_name, fields).is_some() {
quote! {
#cfg
#ident::#variant(v0) => v0.debug(formatter, #variant_name),
}
} else {
let pats = (0..fields.len())
.map(|i| format_ident!("v{}", i))
.collect::<Vec<_>>();
quote! {
#cfg
#ident::#variant(#(#pats),*) => {
let mut formatter = formatter.debug_tuple(#variant_name);
#(formatter.field(#pats);)*
formatter.finish()
}
quote! {
#cfg
#ident::#variant(#(#pats),*) => {
let mut formatter = formatter.debug_tuple(#variant_name);
#(formatter.field(#pats);)*
formatter.finish()
}
}
}
});
let nonexhaustive = if node.ident == "Expr" {
let nonexhaustive = if node.exhaustive {
None
} else if node.ident == "Expr" {
Some(quote! {
#[cfg(not(feature = "full"))]
#[cfg(any(syn_no_non_exhaustive, not(feature = "full")))]
_ => unreachable!(),
})
} else {
None
Some(quote! {
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
})
};
let prefix = format!("{}::", type_name);
quote! {
formatter.write_str(#prefix)?;
match self {
#(#arms)*
#nonexhaustive
@ -106,11 +61,6 @@ fn expand_impl_body(
}
}
Data::Struct(fields) => {
let type_name = if is_syntax_tree_variant {
quote!(name)
} else {
quote!(#type_name)
};
let fields = fields.keys().map(|f| {
let ident = Ident::new(f, Span::call_site());
quote! {
@ -124,40 +74,24 @@ fn expand_impl_body(
}
}
Data::Private => unreachable!(),
};
if is_syntax_tree_variant {
quote! {
impl #ident {
fn debug(&self, formatter: &mut fmt::Formatter, name: &str) -> fmt::Result {
#body
}
}
self.debug(formatter, #type_name)
}
} else {
body
}
}
fn expand_impl(defs: &Definitions, node: &Node, syntax_tree_variants: &Set<&str>) -> TokenStream {
fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream {
let manual_debug = node.data == Data::Private || node.ident == "LitBool";
if manual_debug {
return TokenStream::new();
}
let ident = Ident::new(&node.ident, Span::call_site());
let cfg_features = cfg::features(&node.features, "extra-traits");
let body = expand_impl_body(defs, node, syntax_tree_variants);
let formatter = match &node.data {
Data::Enum(variants) if variants.is_empty() => quote!(_formatter),
_ => quote!(formatter),
};
let cfg_features = cfg::features(&node.features);
let body = expand_impl_body(defs, node);
quote! {
#cfg_features
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for #ident {
fn fmt(&self, #formatter: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
#body
}
}
@ -165,21 +99,9 @@ fn expand_impl(defs: &Definitions, node: &Node, syntax_tree_variants: &Set<&str>
}
pub fn generate(defs: &Definitions) -> Result<()> {
let mut syntax_tree_variants = Set::new();
for node in &defs.types {
if let Data::Enum(variants) = &node.data {
let enum_name = &node.ident;
for (variant_name, fields) in variants {
if let Some(inner) = syntax_tree_enum(enum_name, variant_name, fields) {
syntax_tree_variants.insert(inner);
}
}
}
}
let mut impls = TokenStream::new();
for node in &defs.types {
impls.extend(expand_impl(defs, node, &syntax_tree_variants));
impls.extend(expand_impl(defs, node));
}
file::write(

View File

@ -4,10 +4,11 @@ use proc_macro2::{Ident, Span, TokenStream};
use quote::{format_ident, quote};
use syn_codegen::{Data, Definitions, Node, Type};
const EQ_SRC: &str = "src/gen/eq.rs";
const DEBUG_SRC: &str = "../src/gen/eq.rs";
fn always_eq(field_type: &Type) -> bool {
match field_type {
Type::Syn(node) => node == "Reserved",
Type::Ext(ty) => ty == "Span",
Type::Token(_) | Type::Group(_) => true,
Type::Box(inner) => always_eq(inner),
@ -21,7 +22,6 @@ fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream {
let ident = Ident::new(type_name, Span::call_site());
match &node.data {
Data::Enum(variants) if variants.is_empty() => quote!(match *self {}),
Data::Enum(variants) => {
let arms = variants.iter().map(|(variant_name, fields)| {
let variant = Ident::new(variant_name, Span::call_site());
@ -72,15 +72,10 @@ fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream {
}
}
});
let fallthrough = if variants.len() == 1 {
None
} else {
Some(quote!(_ => false,))
};
quote! {
match (self, other) {
#(#arms)*
#fallthrough
_ => false,
}
}
}
@ -114,10 +109,11 @@ fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream {
}
let ident = Ident::new(&node.ident, Span::call_site());
let cfg_features = cfg::features(&node.features, "extra-traits");
let cfg_features = cfg::features(&node.features);
let eq = quote! {
#cfg_features
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for #ident {}
};
@ -127,16 +123,17 @@ fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream {
}
let body = expand_impl_body(defs, node);
let other = match &node.data {
Data::Enum(variants) if variants.is_empty() => quote!(_other),
Data::Struct(fields) if fields.values().all(always_eq) => quote!(_other),
_ => quote!(other),
let other = if body.to_string() == "true" {
quote!(_other)
} else {
quote!(other)
};
quote! {
#eq
#cfg_features
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for #ident {
fn eq(&self, #other: &Self) -> bool {
#body
@ -152,7 +149,7 @@ pub fn generate(defs: &Definitions) -> Result<()> {
}
file::write(
EQ_SRC,
DEBUG_SRC,
quote! {
#[cfg(any(feature = "derive", feature = "full"))]
use crate::tt::TokenStreamHelper;

View File

@ -1,11 +1,10 @@
use crate::workspace_path;
use anyhow::Result;
use proc_macro2::TokenStream;
use std::fs;
use std::io::Write;
use std::path::Path;
pub fn write(relative_to_workspace_root: impl AsRef<Path>, content: TokenStream) -> Result<()> {
pub fn write<P: AsRef<Path>>(path: P, content: TokenStream) -> Result<()> {
let mut formatted = Vec::new();
writeln!(
formatted,
@ -18,8 +17,7 @@ pub fn write(relative_to_workspace_root: impl AsRef<Path>, content: TokenStream)
let pretty = prettyplease::unparse(&syntax_tree);
write!(formatted, "{}", pretty)?;
let path = workspace_path::get(relative_to_workspace_root);
if path.is_file() && fs::read(&path)? == formatted {
if path.as_ref().is_file() && fs::read(&path)? == formatted {
return Ok(());
}

View File

@ -5,7 +5,7 @@ use quote::{format_ident, quote};
use syn::Index;
use syn_codegen::{Data, Definitions, Features, Node, Type};
const FOLD_SRC: &str = "src/gen/fold.rs";
const FOLD_SRC: &str = "../src/gen/fold.rs";
fn simple_visit(item: &str, name: &TokenStream) -> TokenStream {
let ident = gen::under_name(item);
@ -62,6 +62,29 @@ fn visit(
(#code)
})
}
Type::Token(t) => {
let repr = &defs.tokens[t];
let is_keyword = repr.chars().next().unwrap().is_alphabetic();
let spans = if is_keyword {
quote!(span)
} else {
quote!(spans)
};
let ty = if repr == "await" {
quote!(crate::token::Await)
} else {
syn::parse_str(&format!("Token![{}]", repr)).unwrap()
};
Some(quote! {
#ty(tokens_helper(f, &#name.#spans))
})
}
Type::Group(t) => {
let ty = Ident::new(t, Span::call_site());
Some(quote! {
#ty(tokens_helper(f, &#name.span))
})
}
Type::Syn(t) => {
fn requires_full(features: &Features) -> bool {
features.any.contains("full") && features.any.len() == 1
@ -74,7 +97,7 @@ fn visit(
Some(res)
}
Type::Ext(t) if gen::TERMINAL_TYPES.contains(&&t[..]) => Some(simple_visit(t, name)),
Type::Ext(_) | Type::Std(_) | Type::Token(_) | Type::Group(_) => None,
Type::Ext(_) | Type::Std(_) => None,
}
}
@ -128,9 +151,19 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi
}
}
let nonexhaustive = if s.exhaustive {
None
} else {
Some(quote! {
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
})
};
fold_impl.extend(quote! {
match node {
#fold_variants
#nonexhaustive
}
});
}
@ -138,17 +171,32 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi
let mut fold_fields = TokenStream::new();
for (field, ty) in fields {
let id = Ident::new(field, Span::call_site());
let id = Ident::new(&field, Span::call_site());
let ref_toks = quote!(node.#id);
let fold = visit(ty, &s.features, defs, &ref_toks).unwrap_or(ref_toks);
if let Type::Syn(ty) = ty {
if ty == "Reserved" {
fold_fields.extend(quote! {
#id: #ref_toks,
});
continue;
}
}
let fold = visit(&ty, &s.features, defs, &ref_toks).unwrap_or(ref_toks);
fold_fields.extend(quote! {
#id: #fold,
});
}
if fields.is_empty() {
if !fields.is_empty() {
fold_impl.extend(quote! {
#ty {
#fold_fields
}
})
} else {
if ty == "Ident" {
fold_impl.extend(quote! {
let mut node = node;
@ -159,12 +207,6 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi
fold_impl.extend(quote! {
node
});
} else {
fold_impl.extend(quote! {
#ty {
#fold_fields
}
});
}
}
Data::Private => {
@ -216,14 +258,12 @@ pub fn generate(defs: &Definitions) -> Result<()> {
quote! {
// Unreachable code is generated sometimes without the full feature.
#![allow(unreachable_code, unused_variables)]
#![allow(
clippy::match_wildcard_for_single_variants,
clippy::needless_match,
clippy::needless_pass_by_ref_mut,
)]
#![allow(clippy::match_wildcard_for_single_variants)]
#[cfg(any(feature = "full", feature = "derive"))]
use crate::gen::helper::fold::*;
#[cfg(any(feature = "full", feature = "derive"))]
use crate::token::{Brace, Bracket, Group, Paren};
use crate::*;
use proc_macro2::Span;
@ -234,6 +274,8 @@ pub fn generate(defs: &Definitions) -> Result<()> {
/// See the [module documentation] for details.
///
/// [module documentation]: self
///
/// *This trait is available only if Syn is built with the `"fold"` feature.*
pub trait Fold {
#traits
}

View File

@ -14,9 +14,9 @@ pub fn traverse(
node: fn(&mut TokenStream, &mut TokenStream, &Node, &Definitions),
) -> (TokenStream, TokenStream) {
let mut types = defs.types.clone();
for &terminal in TERMINAL_TYPES {
for terminal in TERMINAL_TYPES {
types.push(Node {
ident: terminal.to_owned(),
ident: terminal.to_string(),
features: Features::default(),
data: Data::Private,
exhaustive: true,
@ -27,7 +27,10 @@ pub fn traverse(
let mut traits = TokenStream::new();
let mut impls = TokenStream::new();
for s in types {
let features = cfg::features(&s.features, None);
if s.ident == "Reserved" {
continue;
}
let features = cfg::features(&s.features);
traits.extend(features.clone());
impls.extend(features);
node(&mut traits, &mut impls, &s, defs);

View File

@ -4,10 +4,11 @@ use proc_macro2::{Ident, Span, TokenStream};
use quote::{format_ident, quote};
use syn_codegen::{Data, Definitions, Node, Type};
const HASH_SRC: &str = "src/gen/hash.rs";
const DEBUG_SRC: &str = "../src/gen/hash.rs";
fn skip(field_type: &Type) -> bool {
match field_type {
Type::Syn(node) => node == "Reserved",
Type::Ext(ty) => ty == "Span",
Type::Token(_) | Type::Group(_) => true,
Type::Box(inner) => skip(inner),
@ -21,13 +22,12 @@ fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream {
let ident = Ident::new(type_name, Span::call_site());
match &node.data {
Data::Enum(variants) if variants.is_empty() => quote!(match *self {}),
Data::Enum(variants) => {
let arms = variants
.iter()
.enumerate()
.map(|(i, (variant_name, fields))| {
let i = u8::try_from(i).unwrap();
let i = i as u8;
let variant = Ident::new(variant_name, Span::call_site());
if fields.is_empty() {
quote! {
@ -76,13 +76,18 @@ fn expand_impl_body(defs: &Definitions, node: &Node) -> TokenStream {
}
}
});
let nonexhaustive = if node.ident == "Expr" {
let nonexhaustive = if node.exhaustive {
None
} else if node.ident == "Expr" {
Some(quote! {
#[cfg(not(feature = "full"))]
#[cfg(any(syn_no_non_exhaustive, not(feature = "full")))]
_ => unreachable!(),
})
} else {
None
Some(quote! {
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
})
};
quote! {
match self {
@ -123,20 +128,20 @@ fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream {
}
let ident = Ident::new(&node.ident, Span::call_site());
let cfg_features = cfg::features(&node.features, "extra-traits");
let cfg_features = cfg::features(&node.features);
let body = expand_impl_body(defs, node);
let hasher = match &node.data {
Data::Struct(_) if body.is_empty() => quote!(_state),
Data::Enum(variants) if variants.is_empty() => quote!(_state),
_ => quote!(state),
let state = if body.is_empty() {
quote!(_state)
} else {
quote!(state)
};
quote! {
#cfg_features
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Hash for #ident {
fn hash<H>(&self, #hasher: &mut H)
fn hash<H>(&self, #state: &mut H)
where
H: Hasher,
{
@ -153,7 +158,7 @@ pub fn generate(defs: &Definitions) -> Result<()> {
}
file::write(
HASH_SRC,
DEBUG_SRC,
quote! {
#[cfg(any(feature = "derive", feature = "full"))]
use crate::tt::TokenStreamHelper;

View File

@ -1,6 +1,6 @@
use crate::workspace_path;
use anyhow::Result;
use std::fs;
use std::path::Path;
use syn_codegen::Definitions;
pub fn generate(defs: &Definitions) -> Result<()> {
@ -10,7 +10,8 @@ pub fn generate(defs: &Definitions) -> Result<()> {
let check: Definitions = serde_json::from_str(&j)?;
assert_eq!(*defs, check);
let json_path = workspace_path::get("syn.json");
let codegen_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let json_path = codegen_root.join("../syn.json");
fs::write(json_path, j)?;
Ok(())

View File

@ -9,14 +9,7 @@
// Finally this crate generates the Visit, VisitMut, and Fold traits in Syn
// programmatically from the syntax tree description.
#![allow(
clippy::items_after_statements,
clippy::manual_let_else,
clippy::match_like_matches_macro,
clippy::similar_names,
clippy::too_many_lines,
clippy::uninlined_format_args
)]
#![allow(clippy::needless_pass_by_value)]
mod cfg;
mod clone;
@ -35,7 +28,6 @@ mod snapshot;
mod version;
mod visit;
mod visit_mut;
mod workspace_path;
fn main() -> anyhow::Result<()> {
color_backtrace::install();

View File

@ -1,4 +1,4 @@
use crate::{version, workspace_path};
use crate::version;
use anyhow::{bail, Result};
use indexmap::IndexMap;
use quote::quote;
@ -8,46 +8,35 @@ use std::path::{Path, PathBuf};
use syn::parse::{Error, Parser};
use syn::{
parse_quote, Attribute, Data, DataEnum, DataStruct, DeriveInput, Fields, GenericArgument,
Ident, Item, PathArguments, TypeMacro, TypePath, TypeTuple, UseTree, Visibility,
Ident, Item, PathArguments, TypeMacro, TypePath, TypeTuple, Visibility,
};
use syn_codegen as types;
use thiserror::Error;
const SYN_CRATE_ROOT: &str = "src/lib.rs";
const TOKEN_SRC: &str = "src/token.rs";
const SYN_CRATE_ROOT: &str = "../src/lib.rs";
const TOKEN_SRC: &str = "../src/token.rs";
const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"];
const EXTRA_TYPES: &[&str] = &["Lifetime"];
struct Lookup {
items: BTreeMap<Ident, AstItem>,
// "+" => "Add"
tokens: BTreeMap<String, String>,
// "PatLit" => "ExprLit"
aliases: BTreeMap<Ident, Ident>,
}
// NOTE: BTreeMap is used here instead of HashMap to have deterministic output.
type ItemLookup = BTreeMap<Ident, AstItem>;
type TokenLookup = BTreeMap<String, String>;
/// Parse the contents of `src` and return a list of AST types.
pub fn parse() -> Result<types::Definitions> {
let tokens = load_token_file(TOKEN_SRC)?;
let mut item_lookup = BTreeMap::new();
load_file(SYN_CRATE_ROOT, &[], &mut item_lookup)?;
let mut lookup = Lookup {
items: BTreeMap::new(),
tokens,
aliases: BTreeMap::new(),
};
load_file(SYN_CRATE_ROOT, &[], &mut lookup)?;
let token_lookup = load_token_file(TOKEN_SRC)?;
let version = version::get()?;
let types = lookup
.items
let types = item_lookup
.values()
.map(|item| introspect_item(item, &lookup))
.map(|item| introspect_item(item, &item_lookup, &token_lookup))
.collect();
let tokens = lookup
.tokens
let tokens = token_lookup
.into_iter()
.map(|(name, ty)| (ty, name))
.collect();
@ -65,23 +54,22 @@ pub struct AstItem {
features: Vec<Attribute>,
}
fn introspect_item(item: &AstItem, lookup: &Lookup) -> types::Node {
fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node {
let features = introspect_features(&item.features);
match &item.ast.data {
Data::Enum(data) => types::Node {
Data::Enum(ref data) => types::Node {
ident: item.ast.ident.to_string(),
features,
data: types::Data::Enum(introspect_enum(data, lookup)),
exhaustive: !(is_non_exhaustive(&item.ast.attrs)
|| data.variants.iter().any(|v| is_doc_hidden(&v.attrs))),
data: types::Data::Enum(introspect_enum(data, items, tokens)),
exhaustive: !data.variants.iter().any(|v| is_doc_hidden(&v.attrs)),
},
Data::Struct(data) => types::Node {
Data::Struct(ref data) => types::Node {
ident: item.ast.ident.to_string(),
features,
data: {
if data.fields.iter().all(|f| is_pub(&f.vis)) {
types::Data::Struct(introspect_struct(data, lookup))
types::Data::Struct(introspect_struct(data, items, tokens))
} else {
types::Data::Private
}
@ -92,7 +80,7 @@ fn introspect_item(item: &AstItem, lookup: &Lookup) -> types::Node {
}
}
fn introspect_enum(item: &DataEnum, lookup: &Lookup) -> types::Variants {
fn introspect_enum(item: &DataEnum, items: &ItemLookup, tokens: &TokenLookup) -> types::Variants {
item.variants
.iter()
.filter_map(|variant| {
@ -103,17 +91,17 @@ fn introspect_enum(item: &DataEnum, lookup: &Lookup) -> types::Variants {
Fields::Unnamed(fields) => fields
.unnamed
.iter()
.map(|field| introspect_type(&field.ty, lookup))
.map(|field| introspect_type(&field.ty, items, tokens))
.collect(),
Fields::Unit => vec![],
Fields::Named(_) => panic!("Enum representation not supported"),
_ => panic!("Enum representation not supported"),
};
Some((variant.ident.to_string(), fields))
})
.collect()
}
fn introspect_struct(item: &DataStruct, lookup: &Lookup) -> types::Fields {
fn introspect_struct(item: &DataStruct, items: &ItemLookup, tokens: &TokenLookup) -> types::Fields {
match &item.fields {
Fields::Named(fields) => fields
.named
@ -121,71 +109,74 @@ fn introspect_struct(item: &DataStruct, lookup: &Lookup) -> types::Fields {
.map(|field| {
(
field.ident.as_ref().unwrap().to_string(),
introspect_type(&field.ty, lookup),
introspect_type(&field.ty, items, tokens),
)
})
.collect(),
Fields::Unit => IndexMap::new(),
Fields::Unnamed(_) => panic!("Struct representation not supported"),
_ => panic!("Struct representation not supported"),
}
}
fn introspect_type(item: &syn::Type, lookup: &Lookup) -> types::Type {
fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type {
match item {
syn::Type::Path(TypePath { qself: None, path }) => {
syn::Type::Path(TypePath {
qself: None,
ref path,
}) => {
let last = path.segments.last().unwrap();
let string = last.ident.to_string();
match string.as_str() {
"Option" => {
let nested = introspect_type(first_arg(&last.arguments), lookup);
let nested = introspect_type(first_arg(&last.arguments), items, tokens);
types::Type::Option(Box::new(nested))
}
"Punctuated" => {
let nested = introspect_type(first_arg(&last.arguments), lookup);
let types::Type::Token(punct) =
introspect_type(last_arg(&last.arguments), lookup)
else {
panic!()
let nested = introspect_type(first_arg(&last.arguments), items, tokens);
let punct = match introspect_type(last_arg(&last.arguments), items, tokens) {
types::Type::Token(s) => s,
_ => panic!(),
};
types::Type::Punctuated(types::Punctuated {
element: Box::new(nested),
punct,
})
}
"Vec" => {
let nested = introspect_type(first_arg(&last.arguments), lookup);
let nested = introspect_type(first_arg(&last.arguments), items, tokens);
types::Type::Vec(Box::new(nested))
}
"Box" => {
let nested = introspect_type(first_arg(&last.arguments), lookup);
let nested = introspect_type(first_arg(&last.arguments), items, tokens);
types::Type::Box(Box::new(nested))
}
"Brace" | "Bracket" | "Paren" | "Group" => types::Type::Group(string),
"TokenStream" | "Literal" | "Ident" | "Span" => types::Type::Ext(string),
"String" | "u32" | "usize" | "bool" => types::Type::Std(string),
"Await" => types::Type::Token("Await".to_string()),
_ => {
let mut resolved = &last.ident;
while let Some(alias) = lookup.aliases.get(resolved) {
resolved = alias;
}
if lookup.items.get(resolved).is_some() {
types::Type::Syn(resolved.to_string())
if items.get(&last.ident).is_some() || last.ident == "Reserved" {
types::Type::Syn(string)
} else {
unimplemented!("{}", resolved);
unimplemented!("{}", string);
}
}
}
}
syn::Type::Tuple(TypeTuple { elems, .. }) => {
let tys = elems.iter().map(|ty| introspect_type(ty, lookup)).collect();
syn::Type::Tuple(TypeTuple { ref elems, .. }) => {
let tys = elems
.iter()
.map(|ty| introspect_type(&ty, items, tokens))
.collect();
types::Type::Tuple(tys)
}
syn::Type::Macro(TypeMacro { mac })
syn::Type::Macro(TypeMacro { ref mac })
if mac.path.segments.last().unwrap().ident == "Token" =>
{
let content = mac.tokens.to_string();
let ty = lookup.tokens.get(&content).unwrap().to_string();
let ty = tokens.get(&content).unwrap().to_string();
types::Type::Token(ty)
}
@ -197,11 +188,11 @@ fn introspect_features(attrs: &[Attribute]) -> types::Features {
let mut ret = types::Features::default();
for attr in attrs {
if !attr.path().is_ident("cfg") {
if !attr.path.is_ident("cfg") {
continue;
}
let features = attr.parse_args_with(parsing::parse_features).unwrap();
let features = parsing::parse_features.parse2(attr.tokens.clone()).unwrap();
if ret.any.is_empty() {
ret = features;
@ -223,65 +214,60 @@ fn is_pub(vis: &Visibility) -> bool {
}
}
fn is_non_exhaustive(attrs: &[Attribute]) -> bool {
for attr in attrs {
if attr.path().is_ident("non_exhaustive") {
return true;
}
}
false
}
fn is_doc_hidden(attrs: &[Attribute]) -> bool {
for attr in attrs {
if attr.path().is_ident("doc") && attr.parse_args::<parsing::kw::hidden>().is_ok() {
return true;
if attr.path.is_ident("doc") {
if parsing::parse_doc_hidden_attr
.parse2(attr.tokens.clone())
.is_ok()
{
return true;
}
}
}
false
}
fn first_arg(params: &PathArguments) -> &syn::Type {
let data = match params {
PathArguments::AngleBracketed(data) => data,
let data = match *params {
PathArguments::AngleBracketed(ref data) => data,
_ => panic!("Expected at least 1 type argument here"),
};
match data
match *data
.args
.first()
.expect("Expected at least 1 type argument here")
{
GenericArgument::Type(ty) => ty,
GenericArgument::Type(ref ty) => ty,
_ => panic!("Expected at least 1 type argument here"),
}
}
fn last_arg(params: &PathArguments) -> &syn::Type {
let data = match params {
PathArguments::AngleBracketed(data) => data,
let data = match *params {
PathArguments::AngleBracketed(ref data) => data,
_ => panic!("Expected at least 1 type argument here"),
};
match data
match *data
.args
.last()
.expect("Expected at least 1 type argument here")
{
GenericArgument::Type(ty) => ty,
GenericArgument::Type(ref ty) => ty,
_ => panic!("Expected at least 1 type argument here"),
}
}
mod parsing {
use super::AstItem;
use proc_macro2::TokenStream;
use super::{AstItem, TokenLookup};
use proc_macro2::{TokenStream, TokenTree};
use quote::quote;
use std::collections::{BTreeMap, BTreeSet};
use syn::parse::{ParseStream, Result};
use syn::parse::{ParseStream, Parser, Result};
use syn::{
braced, bracketed, parenthesized, parse_quote, token, Attribute, Expr, Ident, Lit, LitStr,
Path, Token,
braced, bracketed, parenthesized, parse_quote, token, Attribute, Ident, LitStr, Path, Token,
};
use syn_codegen as types;
@ -327,18 +313,32 @@ mod parsing {
Ok(res)
}
pub fn ast_enum(input: ParseStream) -> Result<AstItem> {
let attrs = input.call(Attribute::parse_outer)?;
fn no_visit(input: ParseStream) -> bool {
if peek_tag(input, "no_visit") {
input.parse::<Token![#]>().unwrap();
input.parse::<Ident>().unwrap();
true
} else {
false
}
}
pub fn ast_enum(input: ParseStream) -> Result<Option<AstItem>> {
input.call(Attribute::parse_outer)?;
input.parse::<Token![pub]>()?;
input.parse::<Token![enum]>()?;
let ident: Ident = input.parse()?;
let no_visit = no_visit(input);
let rest: TokenStream = input.parse()?;
Ok(AstItem {
ast: syn::parse2(quote! {
#(#attrs)*
pub enum #ident #rest
})?,
features: vec![],
Ok(if no_visit {
None
} else {
Some(AstItem {
ast: syn::parse2(quote! {
pub enum #ident #rest
})?,
features: vec![],
})
})
}
@ -368,7 +368,7 @@ mod parsing {
}
pub fn ast_enum_of_structs(input: ParseStream) -> Result<AstItem> {
let attrs = input.call(Attribute::parse_outer)?;
input.call(Attribute::parse_outer)?;
input.parse::<Token![pub]>()?;
input.parse::<Token![enum]>()?;
let ident: Ident = input.parse()?;
@ -380,18 +380,20 @@ mod parsing {
variants.push(content.call(eos_variant)?);
}
if let Some(ident) = input.parse::<Option<Ident>>()? {
assert_eq!(ident, "do_not_generate_to_tokens");
}
let enum_item = {
let variants = variants.iter().map(|v| {
let attrs = &v.attrs;
let name = &v.name;
if let Some(member) = &v.member {
quote!(#(#attrs)* #name(#member))
} else {
quote!(#(#attrs)* #name)
match v.member {
Some(ref member) => quote!(#(#attrs)* #name(#member)),
None => quote!(#(#attrs)* #name),
}
});
parse_quote! {
#(#attrs)*
pub enum #ident {
#(#variants),*
}
@ -403,26 +405,47 @@ mod parsing {
})
}
pub mod kw {
mod kw {
syn::custom_keyword!(hidden);
syn::custom_keyword!(macro_rules);
syn::custom_keyword!(Token);
}
pub fn parse_token_macro(input: ParseStream) -> Result<BTreeMap<String, String>> {
pub fn parse_token_macro(input: ParseStream) -> Result<TokenLookup> {
input.parse::<TokenTree>()?;
input.parse::<Token![=>]>()?;
let definition;
braced!(definition in input);
definition.call(Attribute::parse_outer)?;
definition.parse::<kw::macro_rules>()?;
definition.parse::<Token![!]>()?;
definition.parse::<kw::Token>()?;
let rules;
braced!(rules in definition);
input.parse::<Token![;]>()?;
let mut tokens = BTreeMap::new();
while !input.is_empty() {
let pattern;
bracketed!(pattern in input);
let token = pattern.parse::<TokenStream>()?.to_string();
input.parse::<Token![=>]>()?;
let expansion;
braced!(expansion in input);
input.parse::<Token![;]>()?;
expansion.parse::<Token![$]>()?;
let path: Path = expansion.parse()?;
let ty = path.segments.last().unwrap().ident.to_string();
tokens.insert(token, ty.to_string());
while !rules.is_empty() {
if rules.peek(Token![$]) {
rules.parse::<Token![$]>()?;
rules.parse::<TokenTree>()?;
rules.parse::<Token![*]>()?;
tokens.insert("await".to_owned(), "Await".to_owned());
} else {
let pattern;
bracketed!(pattern in rules);
let token = pattern.parse::<TokenStream>()?.to_string();
rules.parse::<Token![=>]>()?;
let expansion;
braced!(expansion in rules);
rules.parse::<Token![;]>()?;
expansion.parse::<Token![$]>()?;
let path: Path = expansion.parse()?;
let ty = path.segments.last().unwrap().ident.to_string();
tokens.insert(token, ty.to_string());
}
}
Ok(tokens)
}
@ -440,43 +463,56 @@ mod parsing {
pub fn parse_features(input: ParseStream) -> Result<types::Features> {
let mut features = BTreeSet::new();
let i: Ident = input.fork().parse()?;
let level_1;
parenthesized!(level_1 in input);
let i: Ident = level_1.fork().parse()?;
if i == "any" {
input.parse::<Ident>()?;
level_1.parse::<Ident>()?;
let nested;
parenthesized!(nested in input);
let level_2;
parenthesized!(level_2 in level_1);
while !nested.is_empty() {
features.insert(parse_feature(&nested)?);
while !level_2.is_empty() {
features.insert(parse_feature(&level_2)?);
if !nested.is_empty() {
nested.parse::<Token![,]>()?;
if !level_2.is_empty() {
level_2.parse::<Token![,]>()?;
}
}
} else if i == "feature" {
features.insert(parse_feature(input)?);
assert!(input.is_empty());
features.insert(parse_feature(&level_1)?);
assert!(level_1.is_empty());
} else {
panic!("{:?}", i);
}
assert!(input.is_empty());
Ok(types::Features { any: features })
}
pub fn path_attr(attrs: &[Attribute]) -> Result<Option<&LitStr>> {
pub fn path_attr(attrs: &[Attribute]) -> Result<Option<LitStr>> {
for attr in attrs {
if attr.path().is_ident("path") {
if let Expr::Lit(expr) = &attr.meta.require_name_value()?.value {
if let Lit::Str(lit) = &expr.lit {
return Ok(Some(lit));
}
if attr.path.is_ident("path") {
fn parser(input: ParseStream) -> Result<LitStr> {
input.parse::<Token![=]>()?;
input.parse()
}
let filename = parser.parse2(attr.tokens.clone())?;
return Ok(Some(filename));
}
}
Ok(None)
}
pub fn parse_doc_hidden_attr(input: ParseStream) -> Result<()> {
let content;
parenthesized!(content in input);
content.parse::<kw::hidden>()?;
Ok(())
}
}
fn clone_features(features: &[Attribute]) -> Vec<Attribute> {
@ -487,7 +523,7 @@ fn get_features(attrs: &[Attribute], base: &[Attribute]) -> Vec<Attribute> {
let mut ret = clone_features(base);
for attr in attrs {
if attr.path().is_ident("cfg") {
if attr.path.is_ident("cfg") {
ret.push(parse_quote!(#attr));
}
}
@ -504,12 +540,12 @@ struct LoadFileError {
error: Error,
}
fn load_file(
relative_to_workspace_root: impl AsRef<Path>,
fn load_file<P: AsRef<Path>>(
name: P,
features: &[Attribute],
lookup: &mut Lookup,
lookup: &mut ItemLookup,
) -> Result<()> {
let error = match do_load_file(&relative_to_workspace_root, features, lookup).err() {
let error = match do_load_file(&name, features, lookup).err() {
None => return Ok(()),
Some(error) => error,
};
@ -518,23 +554,23 @@ fn load_file(
let span = error.span().start();
bail!(LoadFileError {
path: relative_to_workspace_root.as_ref().to_owned(),
path: name.as_ref().to_owned(),
line: span.line,
column: span.column + 1,
error,
})
}
fn do_load_file(
relative_to_workspace_root: impl AsRef<Path>,
fn do_load_file<P: AsRef<Path>>(
name: P,
features: &[Attribute],
lookup: &mut Lookup,
lookup: &mut ItemLookup,
) -> Result<()> {
let relative_to_workspace_root = relative_to_workspace_root.as_ref();
let parent = relative_to_workspace_root.parent().expect("no parent path");
let name = name.as_ref();
let parent = name.parent().expect("no parent path");
// Parse the file
let src = fs::read_to_string(workspace_path::get(relative_to_workspace_root))?;
let src = fs::read_to_string(name)?;
let file = syn::parse_file(&src)?;
// Collect all of the interesting AstItems declared in this file or submodules.
@ -567,10 +603,9 @@ fn do_load_file(
// Look up the submodule file, and recursively parse it.
// Only handles same-directory .rs file submodules for now.
let filename = if let Some(filename) = parsing::path_attr(&item.attrs)? {
filename.value()
} else {
format!("{}.rs", item.ident)
let filename = match parsing::path_attr(&item.attrs)? {
Some(filename) => filename.value(),
None => format!("{}.rs", item.ident),
};
let path = parent.join(filename);
load_file(path, &features, lookup)?;
@ -582,24 +617,28 @@ fn do_load_file(
// Try to parse the AstItem declaration out of the item.
let tts = item.mac.tokens.clone();
let mut found = if item.mac.path.is_ident("ast_struct") {
parsing::ast_struct.parse2(tts)
let found = if item.mac.path.is_ident("ast_struct") {
Some(parsing::ast_struct.parse2(tts)?)
} else if item.mac.path.is_ident("ast_enum") {
parsing::ast_enum.parse2(tts)
parsing::ast_enum.parse2(tts)?
} else if item.mac.path.is_ident("ast_enum_of_structs") {
parsing::ast_enum_of_structs.parse2(tts)
Some(parsing::ast_enum_of_structs.parse2(tts)?)
} else {
continue;
}?;
};
// Record our features on the parsed AstItems.
found.features.extend(clone_features(&features));
lookup.items.insert(found.ast.ident.clone(), found);
if let Some(mut item) = found {
if item.ast.ident != "Reserved" {
item.features.extend(clone_features(&features));
lookup.insert(item.ast.ident.clone(), item);
}
}
}
Item::Struct(item) => {
let ident = item.ident;
if EXTRA_TYPES.contains(&&ident.to_string()[..]) {
lookup.items.insert(
lookup.insert(
ident.clone(),
AstItem {
ast: DeriveInput {
@ -618,43 +657,20 @@ fn do_load_file(
);
}
}
Item::Use(item)
if relative_to_workspace_root == Path::new(SYN_CRATE_ROOT)
&& matches!(item.vis, Visibility::Public(_)) =>
{
load_aliases(item.tree, lookup);
}
_ => {}
}
}
Ok(())
}
fn load_aliases(use_tree: UseTree, lookup: &mut Lookup) {
match use_tree {
UseTree::Path(use_tree) => load_aliases(*use_tree.tree, lookup),
UseTree::Rename(use_tree) => {
lookup.aliases.insert(use_tree.rename, use_tree.ident);
}
UseTree::Group(use_tree) => {
for use_tree in use_tree.items {
load_aliases(use_tree, lookup);
}
}
UseTree::Name(_) | UseTree::Glob(_) => {}
}
}
fn load_token_file(
relative_to_workspace_root: impl AsRef<Path>,
) -> Result<BTreeMap<String, String>> {
let path = workspace_path::get(relative_to_workspace_root);
let src = fs::read_to_string(path)?;
fn load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup> {
let name = name.as_ref();
let src = fs::read_to_string(name)?;
let file = syn::parse_file(&src)?;
for item in file.items {
if let Item::Macro(item) = item {
match item.ident {
Some(i) if i == "Token" => {}
Some(ref i) if i == "export_token_macro" => {}
_ => continue,
}
let tokens = item.mac.parse_body_with(parsing::parse_token_macro)?;

View File

@ -1,4 +1,3 @@
use crate::operand::{Borrowed, Operand, Owned};
use crate::{file, lookup};
use anyhow::Result;
use proc_macro2::{Ident, Span, TokenStream};
@ -6,7 +5,7 @@ use quote::{format_ident, quote};
use syn::Index;
use syn_codegen::{Data, Definitions, Node, Type};
const TESTS_DEBUG_SRC: &str = "tests/debug/gen.rs";
const DEBUG_SRC: &str = "../tests/debug/gen.rs";
fn rust_type(ty: &Type) -> TokenStream {
match ty {
@ -56,44 +55,44 @@ fn is_printable(ty: &Type) -> bool {
Type::Box(ty) => is_printable(ty),
Type::Tuple(ty) => ty.iter().any(is_printable),
Type::Token(_) | Type::Group(_) => false,
Type::Syn(_) | Type::Std(_) | Type::Punctuated(_) | Type::Option(_) | Type::Vec(_) => true,
Type::Syn(name) => name != "Reserved",
Type::Std(_) | Type::Punctuated(_) | Type::Option(_) | Type::Vec(_) => true,
}
}
fn format_field(val: &Operand, ty: &Type) -> Option<TokenStream> {
fn format_field(val: &TokenStream, ty: &Type) -> Option<TokenStream> {
if !is_printable(ty) {
return None;
}
let format = match ty {
Type::Option(ty) => {
if let Some(format) = format_field(&Borrowed(quote!(_val)), ty) {
let ty = rust_type(ty);
let val = val.ref_tokens();
quote!({
#[derive(RefCast)]
#[repr(transparent)]
struct Print(Option<#ty>);
impl Debug for Print {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match &self.0 {
Some(_val) => {
formatter.write_str("Some(")?;
Debug::fmt(#format, formatter)?;
formatter.write_str(")")?;
Ok(())
}
None => formatter.write_str("None"),
let inner = quote!(_val);
let format = format_field(&inner, ty).map(|format| {
quote! {
formatter.write_str("(")?;
Debug::fmt(#format, formatter)?;
formatter.write_str(")")?;
}
});
let ty = rust_type(ty);
quote!({
#[derive(RefCast)]
#[repr(transparent)]
struct Print(Option<#ty>);
impl Debug for Print {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
match &self.0 {
Some(#inner) => {
formatter.write_str("Some")?;
#format
Ok(())
}
None => formatter.write_str("None"),
}
}
Print::ref_cast(#val)
})
} else {
let val = val.tokens();
quote! {
&super::Option { present: #val.is_some() }
}
}
Print::ref_cast(#val)
})
}
Type::Tuple(ty) => {
let printable: Vec<TokenStream> = ty
@ -101,9 +100,8 @@ fn format_field(val: &Operand, ty: &Type) -> Option<TokenStream> {
.enumerate()
.filter_map(|(i, ty)| {
let index = Index::from(i);
let val = val.tokens();
let inner = Owned(quote!(#val.#index));
format_field(&inner, ty)
let val = quote!(&#val.#index);
format_field(&val, ty)
})
.collect();
if printable.len() == 1 {
@ -114,10 +112,7 @@ fn format_field(val: &Operand, ty: &Type) -> Option<TokenStream> {
}
}
}
_ => {
let val = val.ref_tokens();
quote! { Lite(#val) }
}
_ => quote! { Lite(#val) },
};
Some(format)
}
@ -126,43 +121,27 @@ fn syntax_tree_enum<'a>(outer: &str, inner: &str, fields: &'a [Type]) -> Option<
if fields.len() != 1 {
return None;
}
const WHITELIST: &[(&str, &str)] = &[
("Meta", "Path"),
("PathArguments", "AngleBracketed"),
("PathArguments", "Parenthesized"),
("Stmt", "Local"),
("TypeParamBound", "Lifetime"),
("Visibility", "Public"),
("Visibility", "Restricted"),
];
const WHITELIST: &[&str] = &["PathArguments", "Visibility"];
match &fields[0] {
Type::Syn(ty) if WHITELIST.contains(&(outer, inner)) || outer.to_owned() + inner == *ty => {
Some(ty)
}
Type::Syn(ty) if WHITELIST.contains(&outer) || outer.to_owned() + inner == *ty => Some(ty),
_ => None,
}
}
fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand) -> TokenStream {
fn expand_impl_body(defs: &Definitions, node: &Node, name: &str) -> TokenStream {
let ident = Ident::new(&node.ident, Span::call_site());
match &node.data {
Data::Enum(variants) if variants.is_empty() => quote!(unreachable!()),
Data::Enum(variants) => {
let arms = variants.iter().map(|(v, fields)| {
let path = format!("{}::{}", name, v);
let variant = Ident::new(v, Span::call_site());
if fields.is_empty() {
quote! {
syn::#ident::#variant => formatter.write_str(#path),
syn::#ident::#variant => formatter.write_str(#v),
}
} else if let Some(inner) = syntax_tree_enum(name, v, fields) {
let format = expand_impl_body(
defs,
lookup::node(defs, inner),
&path,
&Borrowed(quote!(_val)),
);
let path = format!("{}::{}", name, v);
let format = expand_impl_body(defs, lookup::node(defs, inner), &path);
quote! {
syn::#ident::#variant(_val) => {
#format
@ -178,7 +157,7 @@ fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand)
})
} else {
let ty = &fields[0];
format_field(&Borrowed(val), ty).map(|format| {
format_field(&val, ty).map(|format| {
quote! {
formatter.write_str("(")?;
Debug::fmt(#format, formatter)?;
@ -188,7 +167,7 @@ fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand)
};
quote! {
syn::#ident::#variant(_val) => {
formatter.write_str(#path)?;
formatter.write_str(#v)?;
#format
Ok(())
}
@ -198,14 +177,14 @@ fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand)
let fields = fields.iter().enumerate().filter_map(|(i, ty)| {
let index = format_ident!("_v{}", i);
let val = quote!(#index);
let format = format_field(&Borrowed(val), ty)?;
let format = format_field(&val, ty)?;
Some(quote! {
formatter.field(#format);
})
});
quote! {
syn::#ident::#variant(#(#pats),*) => {
let mut formatter = formatter.debug_tuple(#path);
let mut formatter = formatter.debug_tuple(#v);
#(#fields)*
formatter.finish()
}
@ -217,9 +196,8 @@ fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand)
} else {
Some(quote!(_ => unreachable!()))
};
let val = val.ref_tokens();
quote! {
match #val {
match _val {
#(#arms)*
#nonexhaustive
}
@ -229,65 +207,43 @@ fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand)
let fields = fields.iter().filter_map(|(f, ty)| {
let ident = Ident::new(f, Span::call_site());
if let Type::Option(ty) = ty {
Some(if let Some(format) = format_field(&Owned(quote!(self.0)), ty) {
let val = val.tokens();
let ty = rust_type(ty);
let inner = quote!(_val);
let format = format_field(&inner, ty).map(|format| {
quote! {
if let Some(val) = &#val.#ident {
#[derive(RefCast)]
#[repr(transparent)]
struct Print(#ty);
impl Debug for Print {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Some(")?;
Debug::fmt(#format, formatter)?;
formatter.write_str(")")?;
Ok(())
}
}
formatter.field(#f, Print::ref_cast(val));
}
let #inner = &self.0;
formatter.write_str("(")?;
Debug::fmt(#format, formatter)?;
formatter.write_str(")")?;
}
} else {
let val = val.tokens();
quote! {
if #val.#ident.is_some() {
formatter.field(#f, &Present);
});
let ty = rust_type(ty);
Some(quote! {
if let Some(val) = &_val.#ident {
#[derive(RefCast)]
#[repr(transparent)]
struct Print(#ty);
impl Debug for Print {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Some")?;
#format
Ok(())
}
}
formatter.field(#f, Print::ref_cast(val));
}
})
} else {
let val = val.tokens();
let inner = Owned(quote!(#val.#ident));
let format = format_field(&inner, ty)?;
let val = quote!(&_val.#ident);
let format = format_field(&val, ty)?;
let mut call = quote! {
formatter.field(#f, #format);
};
if node.ident == "Block" && f == "stmts" {
// Format regardless of whether is_empty().
} else if let Type::Vec(_) | Type::Punctuated(_) = ty {
if let Type::Vec(_) | Type::Punctuated(_) = ty {
call = quote! {
if !#val.#ident.is_empty() {
if !_val.#ident.is_empty() {
#call
}
};
} else if let Type::Syn(inner) = ty {
for node in &defs.types {
if node.ident == *inner {
if let Data::Enum(variants) = &node.data {
if variants.get("None").map_or(false, Vec::is_empty) {
let ty = rust_type(ty);
call = quote! {
match #val.#ident {
#ty::None => {}
_ => { #call }
}
};
}
}
break;
}
}
}
Some(call)
}
@ -300,14 +256,12 @@ fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand)
}
Data::Private => {
if node.ident == "LitInt" || node.ident == "LitFloat" {
let val = val.ref_tokens();
quote! {
write!(formatter, "{}", #val)
write!(formatter, "{}", _val)
}
} else {
let val = val.tokens();
quote! {
write!(formatter, "{:?}", #val.value())
write!(formatter, "{:?}", _val.value())
}
}
}
@ -315,30 +269,18 @@ fn expand_impl_body(defs: &Definitions, node: &Node, name: &str, val: &Operand)
}
fn expand_impl(defs: &Definitions, node: &Node) -> TokenStream {
if node.ident == "Reserved" {
return TokenStream::new();
}
let ident = Ident::new(&node.ident, Span::call_site());
let body = expand_impl_body(defs, node, &node.ident, &Owned(quote!(self.value)));
let formatter = match &node.data {
Data::Enum(variants) if variants.is_empty() => quote!(_formatter),
_ => quote!(formatter),
};
let body = expand_impl_body(defs, node, &node.ident);
quote! {
impl Debug for Lite<syn::#ident> {
fn fmt(&self, #formatter: &mut fmt::Formatter) -> fmt::Result {
#body
}
}
}
}
fn expand_token_impl(name: &str, symbol: &str) -> TokenStream {
let ident = Ident::new(name, Span::call_site());
let repr = format!("Token![{}]", symbol);
quote! {
impl Debug for Lite<syn::token::#ident> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(#repr)
let _val = &self.value;
#body
}
}
}
@ -347,22 +289,13 @@ fn expand_token_impl(name: &str, symbol: &str) -> TokenStream {
pub fn generate(defs: &Definitions) -> Result<()> {
let mut impls = TokenStream::new();
for node in &defs.types {
impls.extend(expand_impl(defs, node));
}
for (name, symbol) in &defs.tokens {
impls.extend(expand_token_impl(name, symbol));
impls.extend(expand_impl(&defs, node));
}
file::write(
TESTS_DEBUG_SRC,
DEBUG_SRC,
quote! {
// False positive: https://github.com/rust-lang/rust/issues/78586#issuecomment-1722680482
#![allow(repr_transparent_external_private_fields)]
#![allow(clippy::match_wildcard_for_single_variants)]
use super::{Lite, Present};
use ref_cast::RefCast;
use super::{Lite, RefCast};
use std::fmt::{self, Debug, Display};
#impls

View File

@ -1,11 +1,12 @@
use crate::workspace_path;
use anyhow::Result;
use semver::Version;
use serde_derive::Deserialize;
use serde::Deserialize;
use std::fs;
use std::path::Path;
pub fn get() -> Result<Version> {
let syn_cargo_toml = workspace_path::get("Cargo.toml");
let codegen_root = Path::new(env!("CARGO_MANIFEST_DIR"));
let syn_cargo_toml = codegen_root.join("../Cargo.toml");
let manifest = fs::read_to_string(syn_cargo_toml)?;
let parsed: Manifest = toml::from_str(&manifest)?;
Ok(parsed.package.version)

View File

@ -6,7 +6,7 @@ use quote::{format_ident, quote};
use syn::Index;
use syn_codegen::{Data, Definitions, Features, Node, Type};
const VISIT_SRC: &str = "src/gen/visit.rs";
const VISIT_SRC: &str = "../src/gen/visit.rs";
fn simple_visit(item: &str, name: &Operand) -> TokenStream {
let ident = gen::under_name(item);
@ -51,17 +51,20 @@ fn visit(
let name = name.ref_tokens();
Some(quote! {
for el in Punctuated::pairs(#name) {
let it = el.value();
let (it, p) = el.into_tuple();
#val;
if let Some(p) = p {
tokens_helper(v, &p.spans);
}
}
})
}
Type::Option(t) => {
let it = Borrowed(quote!(it));
let val = visit(t, features, defs, &it)?;
let name = name.ref_tokens();
let name = name.owned_tokens();
Some(quote! {
if let Some(it) = #name {
if let Some(it) = &#name {
#val;
}
})
@ -78,6 +81,25 @@ fn visit(
}
Some(code)
}
Type::Token(t) => {
let name = name.tokens();
let repr = &defs.tokens[t];
let is_keyword = repr.chars().next().unwrap().is_alphabetic();
let spans = if is_keyword {
quote!(span)
} else {
quote!(spans)
};
Some(quote! {
tokens_helper(v, &#name.#spans);
})
}
Type::Group(_) => {
let name = name.tokens();
Some(quote! {
tokens_helper(v, &#name.span);
})
}
Type::Syn(t) => {
fn requires_full(features: &Features) -> bool {
features.any.contains("full") && features.any.len() == 1
@ -90,7 +112,7 @@ fn visit(
Some(res)
}
Type::Ext(t) if gen::TERMINAL_TYPES.contains(&&t[..]) => Some(simple_visit(t, name)),
Type::Ext(_) | Type::Std(_) | Type::Token(_) | Type::Group(_) => None,
Type::Ext(_) | Type::Std(_) => None,
}
}
@ -102,11 +124,6 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi
let mut visit_impl = TokenStream::new();
match &s.data {
Data::Enum(variants) if variants.is_empty() => {
visit_impl.extend(quote! {
match *node {}
});
}
Data::Enum(variants) => {
let mut visit_variants = TokenStream::new();
@ -146,17 +163,33 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi
}
}
let nonexhaustive = if s.exhaustive {
None
} else {
Some(quote! {
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
})
};
visit_impl.extend(quote! {
match node {
#visit_variants
#nonexhaustive
}
});
}
Data::Struct(fields) => {
for (field, ty) in fields {
let id = Ident::new(field, Span::call_site());
if let Type::Syn(ty) = ty {
if ty == "Reserved" {
continue;
}
}
let id = Ident::new(&field, Span::call_site());
let ref_toks = Owned(quote!(node.#id));
let visit_field = visit(ty, &s.features, defs, &ref_toks)
let visit_field = visit(&ty, &s.features, defs, &ref_toks)
.unwrap_or_else(|| noop_visit(&ref_toks));
visit_impl.extend(quote! {
#visit_field;
@ -201,8 +234,9 @@ pub fn generate(defs: &Definitions) -> Result<()> {
VISIT_SRC,
quote! {
#![allow(unused_variables)]
#![allow(clippy::needless_pass_by_ref_mut)]
#[cfg(any(feature = "full", feature = "derive"))]
use crate::gen::helper::visit::*;
#[cfg(any(feature = "full", feature = "derive"))]
use crate::punctuated::Punctuated;
use crate::*;
@ -219,6 +253,8 @@ pub fn generate(defs: &Definitions) -> Result<()> {
/// See the [module documentation] for details.
///
/// [module documentation]: self
///
/// *This trait is available only if Syn is built with the `"visit"` feature.*
pub trait Visit<'ast> {
#traits
}

View File

@ -6,7 +6,7 @@ use quote::{format_ident, quote};
use syn::Index;
use syn_codegen::{Data, Definitions, Features, Node, Type};
const VISIT_MUT_SRC: &str = "src/gen/visit_mut.rs";
const VISIT_MUT_SRC: &str = "../src/gen/visit_mut.rs";
fn simple_visit(item: &str, name: &Operand) -> TokenStream {
let ident = gen::under_name(item);
@ -50,18 +50,21 @@ fn visit(
let val = visit(&p.element, features, defs, &operand)?;
let name = name.ref_mut_tokens();
Some(quote! {
for mut el in Punctuated::pairs_mut(#name) {
let it = el.value_mut();
for el in Punctuated::pairs_mut(#name) {
let (it, p) = el.into_tuple();
#val;
if let Some(p) = p {
tokens_helper(v, &mut p.spans);
}
}
})
}
Type::Option(t) => {
let it = Borrowed(quote!(it));
let val = visit(t, features, defs, &it)?;
let name = name.ref_mut_tokens();
let name = name.owned_tokens();
Some(quote! {
if let Some(it) = #name {
if let Some(it) = &mut #name {
#val;
}
})
@ -78,6 +81,25 @@ fn visit(
}
Some(code)
}
Type::Token(t) => {
let name = name.tokens();
let repr = &defs.tokens[t];
let is_keyword = repr.chars().next().unwrap().is_alphabetic();
let spans = if is_keyword {
quote!(span)
} else {
quote!(spans)
};
Some(quote! {
tokens_helper(v, &mut #name.#spans);
})
}
Type::Group(_) => {
let name = name.tokens();
Some(quote! {
tokens_helper(v, &mut #name.span);
})
}
Type::Syn(t) => {
fn requires_full(features: &Features) -> bool {
features.any.contains("full") && features.any.len() == 1
@ -90,7 +112,7 @@ fn visit(
Some(res)
}
Type::Ext(t) if gen::TERMINAL_TYPES.contains(&&t[..]) => Some(simple_visit(t, name)),
Type::Ext(_) | Type::Std(_) | Type::Token(_) | Type::Group(_) => None,
Type::Ext(_) | Type::Std(_) => None,
}
}
@ -102,11 +124,6 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi
let mut visit_mut_impl = TokenStream::new();
match &s.data {
Data::Enum(variants) if variants.is_empty() => {
visit_mut_impl.extend(quote! {
match *node {}
});
}
Data::Enum(variants) => {
let mut visit_mut_variants = TokenStream::new();
@ -146,17 +163,33 @@ fn node(traits: &mut TokenStream, impls: &mut TokenStream, s: &Node, defs: &Defi
}
}
let nonexhaustive = if s.exhaustive {
None
} else {
Some(quote! {
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
})
};
visit_mut_impl.extend(quote! {
match node {
#visit_mut_variants
#nonexhaustive
}
});
}
Data::Struct(fields) => {
for (field, ty) in fields {
let id = Ident::new(field, Span::call_site());
if let Type::Syn(ty) = ty {
if ty == "Reserved" {
continue;
}
}
let id = Ident::new(&field, Span::call_site());
let ref_toks = Owned(quote!(node.#id));
let visit_mut_field = visit(ty, &s.features, defs, &ref_toks)
let visit_mut_field = visit(&ty, &s.features, defs, &ref_toks)
.unwrap_or_else(|| noop_visit(&ref_toks));
visit_mut_impl.extend(quote! {
#visit_mut_field;
@ -197,8 +230,9 @@ pub fn generate(defs: &Definitions) -> Result<()> {
VISIT_MUT_SRC,
quote! {
#![allow(unused_variables)]
#![allow(clippy::needless_pass_by_ref_mut)]
#[cfg(any(feature = "full", feature = "derive"))]
use crate::gen::helper::visit_mut::*;
#[cfg(any(feature = "full", feature = "derive"))]
use crate::punctuated::Punctuated;
use crate::*;
@ -216,6 +250,8 @@ pub fn generate(defs: &Definitions) -> Result<()> {
/// See the [module documentation] for details.
///
/// [module documentation]: self
///
/// *This trait is available only if Syn is built with the `"visit-mut"` feature.*
pub trait VisitMut {
#traits
}

View File

@ -1,8 +0,0 @@
use std::path::{Path, PathBuf};
pub fn get(relative_to_workspace_root: impl AsRef<Path>) -> PathBuf {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
assert!(path.pop());
path.push(relative_to_workspace_root);
path
}

View File

@ -2,7 +2,7 @@
name = "syn-dev"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
edition = "2021"
edition = "2018"
publish = false
[lib]
@ -14,9 +14,9 @@ path = "main.rs"
name = "syn-dev"
[dependencies]
quote = "1"
quote = "1.0"
[dependencies.syn]
default-features = false
features = ["extra-traits", "full", "parsing", "proc-macro"]
path = ".."
default-features = false
features = ["parsing", "full", "extra-traits", "proc-macro"]

View File

@ -2,14 +2,14 @@
name = "dump-syntax"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
edition = "2021"
edition = "2018"
publish = false
[dependencies]
colored = "2"
proc-macro2 = { version = "1", features = ["span-locations"] }
proc-macro2 = { version = "1.0", features = ["span-locations"] }
[dependencies.syn]
default-features = false
features = ["extra-traits", "full", "parsing"]
path = "../.."
default-features = false
features = ["parsing", "full", "extra-traits"]

View File

@ -13,7 +13,7 @@
//! attrs: [
//! Attribute {
//! pound_token: Pound,
//! style: AttrStyle::Inner(
//! style: Inner(
//! ...
//! }
@ -55,14 +55,14 @@ impl Display for Error {
fn main() {
if let Err(error) = try_main() {
let _ = writeln!(io::stderr(), "{}", error);
_ = writeln!(io::stderr(), "{}", error);
process::exit(1);
}
}
fn try_main() -> Result<(), Error> {
let mut args = env::args_os();
let _ = args.next(); // executable name
_ = args.next(); // executable name
let filepath = match (args.next(), args.next()) {
(Some(arg), None) => PathBuf::from(arg),
@ -99,7 +99,11 @@ fn render_location(
let start = err.span().start();
let mut end = err.span().end();
let code_line = match start.line.checked_sub(1).and_then(|n| code.lines().nth(n)) {
if start.line == end.line && start.column == end.column {
return render_fallback(formatter, err);
}
let code_line = match code.lines().nth(start.line - 1) {
Some(line) => line,
None => return render_fallback(formatter, err),
};
@ -134,10 +138,7 @@ fn render_location(
label = start.line.to_string().blue().bold(),
code = code_line.trim_end(),
offset = " ".repeat(start.column),
underline = "^"
.repeat(end.column.saturating_sub(start.column).max(1))
.red()
.bold(),
underline = "^".repeat(end.column - start.column).red().bold(),
message = err.to_string().red(),
)
}

View File

@ -2,7 +2,7 @@
name = "heapsize_example"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
edition = "2021"
edition = "2018"
publish = false
[dependencies]

View File

@ -2,7 +2,7 @@
name = "heapsize"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
edition = "2021"
edition = "2018"
publish = false
[dependencies]

View File

@ -2,13 +2,13 @@
name = "heapsize_derive"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
edition = "2021"
edition = "2018"
publish = false
[lib]
proc-macro = true
[dependencies]
proc-macro2 = "1"
quote = "1"
proc-macro2 = "1.0"
quote = "1.0"
syn = { path = "../../.." }

View File

@ -2,7 +2,7 @@
name = "lazy-static-example"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
edition = "2021"
edition = "2018"
publish = false
[dependencies]

View File

@ -2,13 +2,13 @@
name = "lazy_static"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
edition = "2021"
edition = "2018"
publish = false
[lib]
proc-macro = true
[dependencies]
proc-macro2 = { version = "1", features = ["nightly"] }
quote = "1"
proc-macro2 = { version = "1.0", features = ["nightly"] }
quote = "1.0"
syn = { path = "../../../", features = ["full"] }

View File

@ -42,7 +42,7 @@ n = 1
The procedural macro uses a syntax tree [`Fold`] to rewrite every `let`
statement and assignment expression in the following way:
[`Fold`]: https://docs.rs/syn/2.0/syn/fold/trait.Fold.html
[`Fold`]: https://docs.rs/syn/1.0/syn/fold/trait.Fold.html
```rust
// Before

View File

@ -2,7 +2,7 @@
name = "trace-var-example"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
edition = "2021"
edition = "2018"
publish = false
[dependencies]

View File

@ -2,13 +2,13 @@
name = "trace-var"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
edition = "2021"
edition = "2018"
publish = false
[lib]
proc-macro = true
[dependencies]
proc-macro2 = { version = "1", features = ["nightly"] }
quote = "1"
syn = { path = "../../../", features = ["fold", "full"] }
proc-macro2 = { version = "1.0", features = ["nightly"] }
quote = "1.0"
syn = { path = "../../../", features = ["full", "fold"] }

View File

@ -4,7 +4,7 @@ use std::collections::HashSet as Set;
use syn::fold::{self, Fold};
use syn::parse::{Parse, ParseStream, Result};
use syn::punctuated::Punctuated;
use syn::{parse_macro_input, parse_quote, BinOp, Expr, Ident, ItemFn, Local, Pat, Stmt, Token};
use syn::{parse_macro_input, parse_quote, Expr, Ident, ItemFn, Local, Pat, Stmt, Token};
/// Parses a list of variable names separated by commas.
///
@ -84,7 +84,7 @@ impl Args {
/// let VAR = { let VAR = INIT; println!("VAR = {:?}", VAR); VAR };
fn let_and_print(&mut self, local: Local) -> Stmt {
let Local { pat, init, .. } = local;
let init = self.fold_expr(*init.unwrap().expr);
let init = self.fold_expr(*init.unwrap().1);
let ident = match pat {
Pat::Ident(ref p) => &p.ident,
_ => unreachable!(),
@ -122,11 +122,11 @@ impl Fold for Args {
Expr::Assign(fold::fold_expr_assign(self, e))
}
}
Expr::Binary(e) if is_assign_op(e.op) => {
Expr::AssignOp(e) => {
if self.should_print_expr(&e.left) {
self.assign_and_print(*e.left, &e.op, *e.right)
} else {
Expr::Binary(fold::fold_expr_binary(self, e))
Expr::AssignOp(fold::fold_expr_assign_op(self, e))
}
}
_ => fold::fold_expr(self, e),
@ -147,22 +147,6 @@ impl Fold for Args {
}
}
fn is_assign_op(op: BinOp) -> bool {
match op {
BinOp::AddAssign(_)
| BinOp::SubAssign(_)
| BinOp::MulAssign(_)
| BinOp::DivAssign(_)
| BinOp::RemAssign(_)
| BinOp::BitXorAssign(_)
| BinOp::BitAndAssign(_)
| BinOp::BitOrAssign(_)
| BinOp::ShlAssign(_)
| BinOp::ShrAssign(_) => true,
_ => false,
}
}
/// Attribute to print the value of the given variables each time they are
/// reassigned.
///

1
fuzz/.gitignore vendored
View File

@ -1,4 +1,3 @@
artifacts/
corpus/
coverage/
target/

View File

@ -10,12 +10,9 @@ cargo-fuzz = true
[dependencies]
libfuzzer-sys = "0.4"
proc-macro2 = "1.0.52"
proc-macro2 = "1"
syn = { path = "..", default-features = false, features = ["full", "parsing"] }
[features]
span-locations = ["proc-macro2/span-locations"]
[[bin]]
name = "create_token_buffer"
path = "fuzz_targets/create_token_buffer.rs"

View File

@ -12,7 +12,7 @@ fn immediate_fail(_input: ParseStream) -> syn::Result<()> {
fuzz_target!(|data: &[u8]| {
if data.len() < 300 {
if let Ok(string) = str::from_utf8(data) {
let _ = immediate_fail.parse_str(string);
_ = immediate_fail.parse_str(string);
}
}
});

View File

@ -6,7 +6,7 @@ use std::str;
fuzz_target!(|data: &[u8]| {
if data.len() < 300 {
if let Ok(string) = str::from_utf8(data) {
let _ = syn::parse_file(string);
_ = syn::parse_file(string);
}
}
});

View File

@ -1,29 +1,22 @@
[package]
name = "syn-codegen"
version = "0.4.1" # also update html_root_url
version = "0.3.0" # also update html_root_url
authors = ["David Tolnay <dtolnay@gmail.com>"]
categories = ["development-tools::procedural-macro-helpers"]
description = "Syntax tree describing Syn's syntax tree"
documentation = "https://docs.rs/syn-codegen"
edition = "2021"
edition = "2018"
keywords = ["syn"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/syn"
[dependencies]
indexmap = { version = "2", features = ["serde"] }
semver = { version = "1", features = ["serde"] }
serde = "1.0.88"
serde_derive = "1.0.88"
indexmap = { version = "1.0", features = ["serde-1"] }
semver = { version = "1.0", features = ["serde"] }
serde = { version = "1.0.88", features = ["derive"] }
[dev-dependencies]
serde_json = "1"
[lib]
doc-scrape-examples = false
serde_json = "1.0"
[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
rustdoc-args = ["--generate-link-to-definition"]
[workspace]

View File

@ -13,9 +13,9 @@
//! of the [`visit`], [`visit_mut`], and [`fold`] modules can be generated
//! programmatically from a description of the syntax tree.
//!
//! [`visit`]: https://docs.rs/syn/2.0/syn/visit/index.html
//! [`visit_mut`]: https://docs.rs/syn/2.0/syn/visit_mut/index.html
//! [`fold`]: https://docs.rs/syn/2.0/syn/fold/index.html
//! [`visit`]: https://docs.rs/syn/1.0/syn/visit/index.html
//! [`visit_mut`]: https://docs.rs/syn/1.0/syn/visit_mut/index.html
//! [`fold`]: https://docs.rs/syn/1.0/syn/fold/index.html
//!
//! To make this type of code as easy as possible to implement in any language,
//! every Syn release comes with a machine-readable description of that version
@ -44,12 +44,11 @@
//! }
//! ```
#![doc(html_root_url = "https://docs.rs/syn-codegen/0.4.1")]
#![doc(html_root_url = "https://docs.rs/syn-codegen/0.2.0")]
use indexmap::IndexMap;
use semver::Version;
use serde::de::{Deserialize, Deserializer};
use serde_derive::{Deserialize, Serialize};
use serde::{Deserialize, Deserializer, Serialize};
use std::collections::{BTreeMap, BTreeSet};
/// Top-level content of the syntax tree description.
@ -97,7 +96,7 @@ pub struct Node {
/// Content of a syntax tree data structure.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Data {
/// This is an opaque type with no publicly accessible structure.
/// This is an opaque type with no publicy accessible structure.
Private,
/// This type is a braced struct with named fields.

View File

@ -1,15 +1,19 @@
use super::*;
use crate::punctuated::Punctuated;
use proc_macro2::TokenStream;
use std::iter;
use std::slice;
#[cfg(feature = "parsing")]
use crate::meta::{self, ParseNestedMeta};
use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
#[cfg(feature = "parsing")]
use crate::parse::{Parse, ParseStream, Parser, Result};
use crate::punctuated::Pair;
ast_struct! {
/// An attribute, like `#[repr(transparent)]`.
/// An attribute like `#[repr(transparent)]`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// <br>
///
@ -19,52 +23,27 @@ ast_struct! {
///
/// - Outer attributes like `#[repr(transparent)]`. These appear outside or
/// in front of the item they describe.
///
/// - Inner attributes like `#![feature(proc_macro)]`. These appear inside
/// of the item they describe, usually a module.
///
/// - Outer one-line doc comments like `/// Example`.
///
/// - Inner one-line doc comments like `//! Please file an issue`.
///
/// - Outer documentation blocks `/** Example */`.
///
/// - Inner documentation blocks `/*! Please file an issue */`.
/// - Outer doc comments like `/// # Example`.
/// - Inner doc comments like `//! Please file an issue`.
/// - Outer block comments `/** # Example */`.
/// - Inner block comments `/*! Please file an issue */`.
///
/// The `style` field of type `AttrStyle` distinguishes whether an attribute
/// is outer or inner.
///
/// Every attribute has a `path` that indicates the intended interpretation
/// of the rest of the attribute's contents. The path and the optional
/// additional contents are represented together in the `meta` field of the
/// attribute in three possible varieties:
///
/// - Meta::Path &mdash; attributes whose information content conveys just a
/// path, for example the `#[test]` attribute.
///
/// - Meta::List &mdash; attributes that carry arbitrary tokens after the
/// path, surrounded by a delimiter (parenthesis, bracket, or brace). For
/// example `#[derive(Copy)]` or `#[precondition(x < 5)]`.
///
/// - Meta::NameValue &mdash; attributes with an `=` sign after the path,
/// followed by a Rust expression. For example `#[path =
/// "sys/windows.rs"]`.
///
/// All doc comments are represented in the NameValue style with a path of
/// "doc", as this is how they are processed by the compiler and by
/// is outer or inner. Doc comments and block comments are promoted to
/// attributes, as this is how they are processed by the compiler and by
/// `macro_rules!` macros.
///
/// The `path` field gives the possibly colon-delimited path against which
/// the attribute is resolved. It is equal to `"doc"` for desugared doc
/// comments. The `tokens` field contains the rest of the attribute body as
/// tokens.
///
/// ```text
/// #[derive(Copy, Clone)]
/// ~~~~~~Path
/// ^^^^^^^^^^^^^^^^^^^Meta::List
///
/// #[path = "sys/windows.rs"]
/// ~~~~Path
/// ^^^^^^^^^^^^^^^^^^^^^^^Meta::NameValue
///
/// #[test]
/// ^^^^Meta::Path
/// #[derive(Copy)] #[crate::precondition x < 5]
/// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~
/// path tokens path tokens
/// ```
///
/// <br>
@ -114,13 +93,18 @@ ast_struct! {
///
/// The grammar of attributes in Rust is very flexible, which makes the
/// syntax tree not that useful on its own. In particular, arguments of the
/// `Meta::List` variety of attribute are held in an arbitrary `tokens:
/// TokenStream`. Macros are expected to check the `path` of the attribute,
/// decide whether they recognize it, and then parse the remaining tokens
/// according to whatever grammar they wish to require for that kind of
/// attribute. Use [`parse_args()`] to parse those tokens into the expected
/// data structure.
/// attribute are held in an arbitrary `tokens: TokenStream`. Macros are
/// expected to check the `path` of the attribute, decide whether they
/// recognize it, and then parse the remaining tokens according to whatever
/// grammar they wish to require for that kind of attribute.
///
/// If the attribute you are parsing is expected to conform to the
/// conventional structured form of attribute, use [`parse_meta()`] to
/// obtain that structured representation. If the attribute follows some
/// other grammar of its own, use [`parse_args()`] to parse that into the
/// expected data structure.
///
/// [`parse_meta()`]: Attribute::parse_meta
/// [`parse_args()`]: Attribute::parse_args
///
/// <p><br></p>
@ -166,49 +150,65 @@ ast_struct! {
pub pound_token: Token![#],
pub style: AttrStyle,
pub bracket_token: token::Bracket,
pub meta: Meta,
pub path: Path,
pub tokens: TokenStream,
}
}
impl Attribute {
/// Returns the path that identifies the interpretation of this attribute.
/// Parses the content of the attribute, consisting of the path and tokens,
/// as a [`Meta`] if possible.
///
/// For example this would return the `test` in `#[test]`, the `derive` in
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
pub fn path(&self) -> &Path {
self.meta.path()
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_meta(&self) -> Result<Meta> {
fn clone_ident_segment(segment: &PathSegment) -> PathSegment {
PathSegment {
ident: segment.ident.clone(),
arguments: PathArguments::None,
}
}
let path = Path {
leading_colon: self
.path
.leading_colon
.as_ref()
.map(|colon| Token![::](colon.spans)),
segments: self
.path
.segments
.pairs()
.map(|pair| match pair {
Pair::Punctuated(seg, punct) => {
Pair::Punctuated(clone_ident_segment(seg), Token![::](punct.spans))
}
Pair::End(seg) => Pair::End(clone_ident_segment(seg)),
})
.collect(),
};
let parser = |input: ParseStream| parsing::parse_meta_after_path(path, input);
parse::Parser::parse2(parser, self.tokens.clone())
}
/// Parse the arguments to the attribute as a syntax tree.
///
/// This is similar to pulling out the `TokenStream` from `Meta::List` and
/// doing `syn::parse2::<T>(meta_list.tokens)`, except that using
/// `parse_args` the error message has a more useful span when `tokens` is
/// empty.
/// This is similar to `syn::parse2::<T>(attr.tokens)` except that:
///
/// The surrounding delimiters are *not* included in the input to the
/// parser.
/// - the surrounding delimiters are *not* included in the input to the
/// parser; and
/// - the error message has a more useful span when `tokens` is empty.
///
/// ```text
/// #[my_attr(value < 5)]
/// ^^^^^^^^^ what gets parsed
/// ```
///
/// # Example
///
/// ```
/// use syn::{parse_quote, Attribute, Expr};
///
/// let attr: Attribute = parse_quote! {
/// #[precondition(value < 5)]
/// };
///
/// if attr.path().is_ident("precondition") {
/// let precondition: Expr = attr.parse_args()?;
/// // ...
/// }
/// # anyhow::Ok(())
/// ```
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_args<T: Parse>(&self) -> Result<T> {
@ -217,182 +217,22 @@ impl Attribute {
/// Parse the arguments to the attribute using the given parser.
///
/// # Example
///
/// ```
/// use syn::{parse_quote, Attribute};
///
/// let attr: Attribute = parse_quote! {
/// #[inception { #[brrrrrrraaaaawwwwrwrrrmrmrmmrmrmmmmm] }]
/// };
///
/// let bwom = attr.parse_args_with(Attribute::parse_outer)?;
///
/// // Attribute does not have a Parse impl, so we couldn't directly do:
/// // let bwom: Attribute = attr.parse_args()?;
/// # anyhow::Ok(())
/// ```
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
match &self.meta {
Meta::Path(path) => Err(crate::error::new2(
path.segments.first().unwrap().ident.span(),
path.segments.last().unwrap().ident.span(),
format!(
"expected attribute arguments in parentheses: {}[{}(...)]",
parsing::DisplayAttrStyle(&self.style),
parsing::DisplayPath(path),
),
)),
Meta::NameValue(meta) => Err(Error::new(
meta.eq_token.span,
format_args!(
"expected parentheses: {}[{}(...)]",
parsing::DisplayAttrStyle(&self.style),
parsing::DisplayPath(&meta.path),
),
)),
Meta::List(meta) => meta.parse_args_with(parser),
}
}
/// Parse the arguments to the attribute, expecting it to follow the
/// conventional structure used by most of Rust's built-in attributes.
///
/// The [*Meta Item Attribute Syntax*][syntax] section in the Rust reference
/// explains the convention in more detail. Not all attributes follow this
/// convention, so [`parse_args()`][Self::parse_args] is available if you
/// need to parse arbitrarily goofy attribute syntax.
///
/// [syntax]: https://doc.rust-lang.org/reference/attributes.html#meta-item-attribute-syntax
///
/// # Example
///
/// We'll parse a struct, and then parse some of Rust's `#[repr]` attribute
/// syntax.
///
/// ```
/// use syn::{parenthesized, parse_quote, token, ItemStruct, LitInt};
///
/// let input: ItemStruct = parse_quote! {
/// #[repr(C, align(4))]
/// pub struct MyStruct(u16, u32);
/// };
///
/// let mut repr_c = false;
/// let mut repr_transparent = false;
/// let mut repr_align = None::<usize>;
/// let mut repr_packed = None::<usize>;
/// for attr in &input.attrs {
/// if attr.path().is_ident("repr") {
/// attr.parse_nested_meta(|meta| {
/// // #[repr(C)]
/// if meta.path.is_ident("C") {
/// repr_c = true;
/// return Ok(());
/// }
///
/// // #[repr(transparent)]
/// if meta.path.is_ident("transparent") {
/// repr_transparent = true;
/// return Ok(());
/// }
///
/// // #[repr(align(N))]
/// if meta.path.is_ident("align") {
/// let content;
/// parenthesized!(content in meta.input);
/// let lit: LitInt = content.parse()?;
/// let n: usize = lit.base10_parse()?;
/// repr_align = Some(n);
/// return Ok(());
/// }
///
/// // #[repr(packed)] or #[repr(packed(N))], omitted N means 1
/// if meta.path.is_ident("packed") {
/// if meta.input.peek(token::Paren) {
/// let content;
/// parenthesized!(content in meta.input);
/// let lit: LitInt = content.parse()?;
/// let n: usize = lit.base10_parse()?;
/// repr_packed = Some(n);
/// } else {
/// repr_packed = Some(1);
/// }
/// return Ok(());
/// }
///
/// Err(meta.error("unrecognized repr"))
/// })?;
/// }
/// }
/// # anyhow::Ok(())
/// ```
///
/// # Alternatives
///
/// In some cases, for attributes which have nested layers of structured
/// content, the following less flexible approach might be more convenient:
///
/// ```
/// # use syn::{parse_quote, ItemStruct};
/// #
/// # let input: ItemStruct = parse_quote! {
/// # #[repr(C, align(4))]
/// # pub struct MyStruct(u16, u32);
/// # };
/// #
/// use syn::punctuated::Punctuated;
/// use syn::{parenthesized, token, Error, LitInt, Meta, Token};
///
/// let mut repr_c = false;
/// let mut repr_transparent = false;
/// let mut repr_align = None::<usize>;
/// let mut repr_packed = None::<usize>;
/// for attr in &input.attrs {
/// if attr.path().is_ident("repr") {
/// let nested = attr.parse_args_with(Punctuated::<Meta, Token![,]>::parse_terminated)?;
/// for meta in nested {
/// match meta {
/// // #[repr(C)]
/// Meta::Path(path) if path.is_ident("C") => {
/// repr_c = true;
/// }
///
/// // #[repr(align(N))]
/// Meta::List(meta) if meta.path.is_ident("align") => {
/// let lit: LitInt = meta.parse_args()?;
/// let n: usize = lit.base10_parse()?;
/// repr_align = Some(n);
/// }
///
/// /* ... */
///
/// _ => {
/// return Err(Error::new_spanned(meta, "unrecognized repr"));
/// }
/// }
/// }
/// }
/// }
/// # Ok(())
/// ```
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_nested_meta(
&self,
logic: impl FnMut(ParseNestedMeta) -> Result<()>,
) -> Result<()> {
self.parse_args_with(meta::parser(logic))
let parser = |input: ParseStream| {
let args = enter_args(self, input)?;
parse::parse_stream(parser, &args)
};
parser.parse2(self.tokens.clone())
}
/// Parses zero or more outer attributes from the stream.
///
/// # Example
///
/// See
/// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute).
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
@ -405,10 +245,8 @@ impl Attribute {
/// Parses zero or more inner attributes from the stream.
///
/// # Example
///
/// See
/// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute).
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
@ -418,10 +256,65 @@ impl Attribute {
}
}
#[cfg(feature = "parsing")]
fn expected_parentheses(attr: &Attribute) -> String {
let style = match attr.style {
AttrStyle::Outer => "#",
AttrStyle::Inner(_) => "#!",
};
let mut path = String::new();
for segment in &attr.path.segments {
if !path.is_empty() || attr.path.leading_colon.is_some() {
path += "::";
}
path += &segment.ident.to_string();
}
format!("{}[{}(...)]", style, path)
}
#[cfg(feature = "parsing")]
fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
if input.is_empty() {
let expected = expected_parentheses(attr);
let msg = format!("expected attribute arguments in parentheses: {}", expected);
return Err(crate::error::new2(
attr.pound_token.span,
attr.bracket_token.span,
msg,
));
} else if input.peek(Token![=]) {
let expected = expected_parentheses(attr);
let msg = format!("expected parentheses: {}", expected);
return Err(input.error(msg));
};
let content;
if input.peek(token::Paren) {
parenthesized!(content in input);
} else if input.peek(token::Bracket) {
bracketed!(content in input);
} else if input.peek(token::Brace) {
braced!(content in input);
} else {
return Err(input.error("unexpected token in attribute arguments"));
}
if input.is_empty() {
Ok(content)
} else {
Err(input.error("unexpected token in attribute arguments"))
}
}
ast_enum! {
/// Distinguishes between attributes that decorate an item and attributes
/// that are contained within an item.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Outer attributes
///
/// - `#[repr(transparent)]`
@ -443,6 +336,9 @@ ast_enum! {
ast_enum_of_structs! {
/// Content of a compile-time structured attribute.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// ## Path
///
/// A meta path is like the `test` in `#[test]`.
@ -475,26 +371,32 @@ ast_enum_of_structs! {
ast_struct! {
/// A structured list within an attribute, like `derive(Copy, Clone)`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct MetaList {
pub path: Path,
pub delimiter: MacroDelimiter,
pub tokens: TokenStream,
pub paren_token: token::Paren,
pub nested: Punctuated<NestedMeta, Token![,]>,
}
}
ast_struct! {
/// A name-value pair within an attribute, like `feature = "nightly"`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct MetaNameValue {
pub path: Path,
pub eq_token: Token![=],
pub value: Expr,
pub lit: Lit,
}
}
impl Meta {
/// Returns the path that begins this structured meta item.
/// Returns the identifier that begins this structured meta item.
///
/// For example this would return the `test` in `#[test]`, the `derive` in
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
@ -505,84 +407,63 @@ impl Meta {
Meta::NameValue(meta) => &meta.path,
}
}
}
/// Error if this is a `Meta::List` or `Meta::NameValue`.
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn require_path_only(&self) -> Result<&Path> {
let error_span = match self {
Meta::Path(path) => return Ok(path),
Meta::List(meta) => meta.delimiter.span().open(),
Meta::NameValue(meta) => meta.eq_token.span,
};
Err(Error::new(error_span, "unexpected token in attribute"))
}
ast_enum_of_structs! {
/// Element of a compile-time attribute list.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum NestedMeta {
/// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
/// would be a nested `Meta::Path`.
Meta(Meta),
/// Error if this is a `Meta::Path` or `Meta::NameValue`.
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn require_list(&self) -> Result<&MetaList> {
match self {
Meta::List(meta) => Ok(meta),
Meta::Path(path) => Err(crate::error::new2(
path.segments.first().unwrap().ident.span(),
path.segments.last().unwrap().ident.span(),
format!(
"expected attribute arguments in parentheses: `{}(...)`",
parsing::DisplayPath(path),
),
)),
Meta::NameValue(meta) => Err(Error::new(meta.eq_token.span, "expected `(`")),
}
}
/// Error if this is a `Meta::Path` or `Meta::List`.
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn require_name_value(&self) -> Result<&MetaNameValue> {
match self {
Meta::NameValue(meta) => Ok(meta),
Meta::Path(path) => Err(crate::error::new2(
path.segments.first().unwrap().ident.span(),
path.segments.last().unwrap().ident.span(),
format!(
"expected a value for this attribute: `{} = ...`",
parsing::DisplayPath(path),
),
)),
Meta::List(meta) => Err(Error::new(meta.delimiter.span().open(), "expected `=`")),
}
/// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
Lit(Lit),
}
}
impl MetaList {
/// See [`Attribute::parse_args`].
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_args<T: Parse>(&self) -> Result<T> {
self.parse_args_with(T::parse)
}
/// Conventional argument type associated with an invocation of an attribute
/// macro.
///
/// For example if we are developing an attribute macro that is intended to be
/// invoked on function items as follows:
///
/// ```
/// # const IGNORE: &str = stringify! {
/// #[my_attribute(path = "/v1/refresh")]
/// # };
/// pub fn refresh() {
/// /* ... */
/// }
/// ```
///
/// The implementation of this macro would want to parse its attribute arguments
/// as type `AttributeArgs`.
///
/// ```
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
///
/// # const IGNORE: &str = stringify! {
/// #[proc_macro_attribute]
/// # };
/// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream {
/// let args = parse_macro_input!(args as AttributeArgs);
/// let input = parse_macro_input!(input as ItemFn);
///
/// /* ... */
/// # "".parse().unwrap()
/// }
/// ```
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub type AttributeArgs = Vec<NestedMeta>;
/// See [`Attribute::parse_args_with`].
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
let scope = self.delimiter.span().close();
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
}
/// See [`Attribute::parse_nested_meta`].
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_nested_meta(
&self,
logic: impl FnMut(ParseNestedMeta) -> Result<()>,
) -> Result<()> {
self.parse_args_with(meta::parser(logic))
}
}
pub(crate) trait FilterAttrs<'a> {
pub trait FilterAttrs<'a> {
type Ret: Iterator<Item = &'a Attribute>;
fn outer(self) -> Self::Ret;
@ -614,43 +495,69 @@ impl<'a> FilterAttrs<'a> for &'a [Attribute] {
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::parse::discouraged::Speculative as _;
use crate::ext::IdentExt;
use crate::parse::{Parse, ParseStream, Result};
use std::fmt::{self, Display};
pub(crate) fn parse_inner(input: ParseStream, attrs: &mut Vec<Attribute>) -> Result<()> {
pub fn parse_inner(input: ParseStream, attrs: &mut Vec<Attribute>) -> Result<()> {
while input.peek(Token![#]) && input.peek2(Token![!]) {
attrs.push(input.call(parsing::single_parse_inner)?);
}
Ok(())
}
pub(crate) fn single_parse_inner(input: ParseStream) -> Result<Attribute> {
pub fn single_parse_inner(input: ParseStream) -> Result<Attribute> {
let content;
Ok(Attribute {
pound_token: input.parse()?,
style: AttrStyle::Inner(input.parse()?),
bracket_token: bracketed!(content in input),
meta: content.parse()?,
path: content.call(Path::parse_mod_style)?,
tokens: content.parse()?,
})
}
pub(crate) fn single_parse_outer(input: ParseStream) -> Result<Attribute> {
pub fn single_parse_outer(input: ParseStream) -> Result<Attribute> {
let content;
Ok(Attribute {
pound_token: input.parse()?,
style: AttrStyle::Outer,
bracket_token: bracketed!(content in input),
meta: content.parse()?,
path: content.call(Path::parse_mod_style)?,
tokens: content.parse()?,
})
}
// Like Path::parse_mod_style but accepts keywords in the path.
fn parse_meta_path(input: ParseStream) -> Result<Path> {
Ok(Path {
leading_colon: input.parse()?,
segments: {
let mut segments = Punctuated::new();
while input.peek(Ident::peek_any) {
let ident = Ident::parse_any(input)?;
segments.push_value(PathSegment::from(ident));
if !input.peek(Token![::]) {
break;
}
let punct = input.parse()?;
segments.push_punct(punct);
}
if segments.is_empty() {
return Err(input.error("expected path"));
} else if segments.trailing_punct() {
return Err(input.error("expected path segment"));
}
segments
},
})
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Meta {
fn parse(input: ParseStream) -> Result<Self> {
let path = input.call(Path::parse_mod_style)?;
let path = input.call(parse_meta_path)?;
parse_meta_after_path(path, input)
}
}
@ -658,7 +565,7 @@ pub(crate) mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for MetaList {
fn parse(input: ParseStream) -> Result<Self> {
let path = input.call(Path::parse_mod_style)?;
let path = input.call(parse_meta_path)?;
parse_meta_list_after_path(path, input)
}
}
@ -666,13 +573,28 @@ pub(crate) mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for MetaNameValue {
fn parse(input: ParseStream) -> Result<Self> {
let path = input.call(Path::parse_mod_style)?;
let path = input.call(parse_meta_path)?;
parse_meta_name_value_after_path(path, input)
}
}
pub(crate) fn parse_meta_after_path(path: Path, input: ParseStream) -> Result<Meta> {
if input.peek(token::Paren) || input.peek(token::Bracket) || input.peek(token::Brace) {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for NestedMeta {
fn parse(input: ParseStream) -> Result<Self> {
if input.peek(Lit) && !(input.peek(LitBool) && input.peek2(Token![=])) {
input.parse().map(NestedMeta::Lit)
} else if input.peek(Ident::peek_any)
|| input.peek(Token![::]) && input.peek3(Ident::peek_any)
{
input.parse().map(NestedMeta::Meta)
} else {
Err(input.error("expected identifier or literal"))
}
}
}
pub fn parse_meta_after_path(path: Path, input: ParseStream) -> Result<Meta> {
if input.peek(token::Paren) {
parse_meta_list_after_path(path, input).map(Meta::List)
} else if input.peek(Token![=]) {
parse_meta_name_value_after_path(path, input).map(Meta::NameValue)
@ -682,60 +604,21 @@ pub(crate) mod parsing {
}
fn parse_meta_list_after_path(path: Path, input: ParseStream) -> Result<MetaList> {
let (delimiter, tokens) = mac::parse_delimiter(input)?;
let content;
Ok(MetaList {
path,
delimiter,
tokens,
paren_token: parenthesized!(content in input),
nested: content.parse_terminated(NestedMeta::parse)?,
})
}
fn parse_meta_name_value_after_path(path: Path, input: ParseStream) -> Result<MetaNameValue> {
let eq_token: Token![=] = input.parse()?;
let ahead = input.fork();
let lit: Option<Lit> = ahead.parse()?;
let value = if let (Some(lit), true) = (lit, ahead.is_empty()) {
input.advance_to(&ahead);
Expr::Lit(ExprLit {
attrs: Vec::new(),
lit,
})
} else if input.peek(Token![#]) && input.peek2(token::Bracket) {
return Err(input.error("unexpected attribute inside of attribute"));
} else {
input.parse()?
};
Ok(MetaNameValue {
path,
eq_token,
value,
eq_token: input.parse()?,
lit: input.parse()?,
})
}
pub(super) struct DisplayAttrStyle<'a>(pub &'a AttrStyle);
impl<'a> Display for DisplayAttrStyle<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str(match self.0 {
AttrStyle::Outer => "#",
AttrStyle::Inner(_) => "#!",
})
}
}
pub(super) struct DisplayPath<'a>(pub &'a Path);
impl<'a> Display for DisplayPath<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
for (i, segment) in self.0.segments.iter().enumerate() {
if i > 0 || self.0.leading_colon.is_some() {
formatter.write_str("::")?;
}
write!(formatter, "{}", segment.ident)?;
}
Ok(())
}
}
}
#[cfg(feature = "printing")]
@ -752,7 +635,8 @@ mod printing {
b.to_tokens(tokens);
}
self.bracket_token.surround(tokens, |tokens| {
self.meta.to_tokens(tokens);
self.path.to_tokens(tokens);
self.tokens.to_tokens(tokens);
});
}
}
@ -761,7 +645,9 @@ mod printing {
impl ToTokens for MetaList {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.path.to_tokens(tokens);
self.delimiter.surround(tokens, self.tokens.clone());
self.paren_token.surround(tokens, |tokens| {
self.nested.to_tokens(tokens);
});
}
}
@ -770,7 +656,7 @@ mod printing {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.path.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.value.to_tokens(tokens);
self.lit.to_tokens(tokens);
}
}
}

2
src/await.rs Normal file
View File

@ -0,0 +1,2 @@
// See include!("await.rs") in token.rs.
export_token_macro! {[await]}

View File

@ -1,16 +1,16 @@
use std::ops::{AddAssign, MulAssign};
// For implementing base10_digits() accessor on LitInt.
pub(crate) struct BigInt {
pub struct BigInt {
digits: Vec<u8>,
}
impl BigInt {
pub(crate) fn new() -> Self {
pub fn new() -> Self {
BigInt { digits: Vec::new() }
}
pub(crate) fn to_string(&self) -> String {
pub fn to_string(&self) -> String {
let mut repr = String::with_capacity(self.digits.len());
let mut has_nonzero = false;

View File

@ -1,12 +1,18 @@
//! A stably addressed token buffer supporting efficient traversal based on a
//! cheaply copyable cursor.
//!
//! *This module is available only if Syn is built with the `"parsing"` feature.*
// This module is heavily commented as it contains most of the unsafe code in
// Syn, and caution should be used when editing it. The public-facing interface
// is 100% safe but the implementation is fragile internally.
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "proc-macro"
))]
use crate::proc_macro as pm;
use crate::Lifetime;
use proc_macro2::extra::DelimSpan;
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
use std::cmp::Ordering;
use std::marker::PhantomData;
@ -27,6 +33,8 @@ enum Entry {
/// A buffer that can be efficiently traversed multiple times, unlike
/// `TokenStream` which requires a deep copy in order to traverse more than
/// once.
///
/// *This type is available only if Syn is built with the `"parsing"` feature.*
pub struct TokenBuffer {
// NOTE: Do not implement clone on this - while the current design could be
// cloned, other designs which could be desirable may not be cloneable.
@ -55,9 +63,14 @@ impl TokenBuffer {
/// Creates a `TokenBuffer` containing all the tokens from the input
/// `proc_macro::TokenStream`.
#[cfg(feature = "proc-macro")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))]
pub fn new(stream: proc_macro::TokenStream) -> Self {
///
/// *This method is available only if Syn is built with both the `"parsing"` and
/// `"proc-macro"` features.*
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "proc-macro"
))]
pub fn new(stream: pm::TokenStream) -> Self {
Self::new2(stream.into())
}
@ -88,6 +101,11 @@ impl TokenBuffer {
///
/// An empty `Cursor` can be created directly, or one may create a `TokenBuffer`
/// object and get a cursor to its first token with `begin()`.
///
/// Two cursors are equal if they have the same location in the same input
/// stream, and have the same scope.
///
/// *This type is available only if Syn is built with the `"parsing"` feature.*
pub struct Cursor<'a> {
// The current entry which the `Cursor` is pointing at.
ptr: *const Entry,
@ -128,11 +146,11 @@ impl<'a> Cursor<'a> {
// past it, unless `ptr == scope`, which means that we're at the edge of
// our cursor's scope. We should only have `ptr != scope` at the exit
// from None-delimited groups entered with `ignore_none`.
while let Entry::End(_) = unsafe { &*ptr } {
while let Entry::End(_) = *ptr {
if ptr == scope {
break;
}
ptr = unsafe { ptr.add(1) };
ptr = ptr.add(1);
}
Cursor {
@ -154,7 +172,7 @@ impl<'a> Cursor<'a> {
/// If the cursor is looking at an `Entry::Group`, the bumped cursor will
/// point at the first token in the group (with the same scope end).
unsafe fn bump_ignore_group(self) -> Cursor<'a> {
unsafe { Cursor::create(self.ptr.offset(1), self.scope) }
Cursor::create(self.ptr.offset(1), self.scope)
}
/// While the cursor is looking at a `None`-delimited group, move it to look
@ -181,7 +199,7 @@ impl<'a> Cursor<'a> {
/// If the cursor is pointing at a `Group` with the given delimiter, returns
/// a cursor into that group and one pointing to the next `TokenTree`.
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> {
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> {
// If we're not trying to enter a none-delimited group, we want to
// ignore them. We have to make sure to _not_ ignore them when we want
// to enter them, of course. For obvious reasons.
@ -191,40 +209,16 @@ impl<'a> Cursor<'a> {
if let Entry::Group(group, end_offset) = self.entry() {
if group.delimiter() == delim {
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, span, after_group));
return Some((inside_of_group, group.span(), after_group));
}
}
None
}
pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let delimiter = group.delimiter();
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, delimiter, span, after_group));
}
None
}
pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((group.clone(), after_group));
}
None
}
/// If the cursor is pointing at a `Ident`, returns it along with a cursor
/// pointing at the next `TokenTree`.
pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
@ -319,33 +313,6 @@ impl<'a> Cursor<'a> {
}
}
/// Returns the `Span` of the token immediately prior to the position of
/// this cursor, or of the current token if there is no previous one.
#[cfg(any(feature = "full", feature = "derive"))]
pub(crate) fn prev_span(mut self) -> Span {
if start_of_buffer(self) < self.ptr {
self.ptr = unsafe { self.ptr.offset(-1) };
if let Entry::End(_) = self.entry() {
// Locate the matching Group begin token.
let mut depth = 1;
loop {
self.ptr = unsafe { self.ptr.offset(-1) };
match self.entry() {
Entry::Group(group, _) => {
depth -= 1;
if depth == 0 {
return group.span();
}
}
Entry::End(_) => depth += 1,
Entry::Literal(_) | Entry::Ident(_) | Entry::Punct(_) => {}
}
}
}
}
self.span()
}
/// Skip over the next token without cloning it. Returns `None` if this
/// cursor points to eof.
///
@ -389,7 +356,7 @@ impl<'a> PartialEq for Cursor<'a> {
impl<'a> PartialOrd for Cursor<'a> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
if same_buffer(*self, *other) {
Some(cmp_assuming_same_buffer(*self, *other))
Some(self.ptr.cmp(&other.ptr))
} else {
None
}
@ -401,18 +368,17 @@ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
}
pub(crate) fn same_buffer(a: Cursor, b: Cursor) -> bool {
start_of_buffer(a) == start_of_buffer(b)
}
fn start_of_buffer(cursor: Cursor) -> *const Entry {
unsafe {
match &*cursor.scope {
Entry::End(offset) => cursor.scope.offset(*offset),
match (&*a.scope, &*b.scope) {
(Entry::End(a_offset), Entry::End(b_offset)) => {
a.scope.offset(*a_offset) == b.scope.offset(*b_offset)
}
_ => unreachable!(),
}
}
}
#[cfg(any(feature = "full", feature = "derive"))]
pub(crate) fn cmp_assuming_same_buffer(a: Cursor, b: Cursor) -> Ordering {
a.ptr.cmp(&b.ptr)
}

View File

@ -96,28 +96,26 @@ macro_rules! custom_keyword {
#[doc(hidden)]
#[allow(dead_code, non_snake_case)]
pub fn $ident<__S: $crate::__private::IntoSpans<$crate::__private::Span>>(
pub fn $ident<__S: $crate::__private::IntoSpans<[$crate::__private::Span; 1]>>(
span: __S,
) -> $ident {
$ident {
span: $crate::__private::IntoSpans::into_spans(span),
span: $crate::__private::IntoSpans::into_spans(span)[0],
}
}
const _: () = {
impl $crate::__private::Default for $ident {
fn default() -> Self {
$ident {
span: $crate::__private::Span::call_site(),
}
impl $crate::__private::Default for $ident {
fn default() -> Self {
$ident {
span: $crate::__private::Span::call_site(),
}
}
}
$crate::impl_parse_for_custom_keyword!($ident);
$crate::impl_to_tokens_for_custom_keyword!($ident);
$crate::impl_clone_for_custom_keyword!($ident);
$crate::impl_extra_traits_for_custom_keyword!($ident);
};
$crate::impl_parse_for_custom_keyword!($ident);
$crate::impl_to_tokens_for_custom_keyword!($ident);
$crate::impl_clone_for_custom_keyword!($ident);
$crate::impl_extra_traits_for_custom_keyword!($ident);
};
}
@ -128,17 +126,17 @@ macro_rules! custom_keyword {
macro_rules! impl_parse_for_custom_keyword {
($ident:ident) => {
// For peek.
impl $crate::__private::CustomToken for $ident {
impl $crate::token::CustomToken for $ident {
fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
if let $crate::__private::Some((ident, _rest)) = cursor.ident() {
ident == $crate::__private::stringify!($ident)
ident == stringify!($ident)
} else {
false
}
}
fn display() -> &'static $crate::__private::str {
$crate::__private::concat!("`", $crate::__private::stringify!($ident), "`")
concat!("`", stringify!($ident), "`")
}
}
@ -146,14 +144,14 @@ macro_rules! impl_parse_for_custom_keyword {
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
input.step(|cursor| {
if let $crate::__private::Some((ident, rest)) = cursor.ident() {
if ident == $crate::__private::stringify!($ident) {
if ident == stringify!($ident) {
return $crate::__private::Ok(($ident { span: ident.span() }, rest));
}
}
$crate::__private::Err(cursor.error($crate::__private::concat!(
$crate::__private::Err(cursor.error(concat!(
"expected `",
$crate::__private::stringify!($ident),
"`",
stringify!($ident),
"`"
)))
})
}
@ -177,7 +175,7 @@ macro_rules! impl_to_tokens_for_custom_keyword {
($ident:ident) => {
impl $crate::__private::ToTokens for $ident {
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
let ident = $crate::Ident::new($crate::__private::stringify!($ident), self.span);
let ident = $crate::Ident::new(stringify!($ident), self.span);
$crate::__private::TokenStreamExt::append(tokens, ident);
}
}
@ -224,14 +222,10 @@ macro_rules! impl_clone_for_custom_keyword {
macro_rules! impl_extra_traits_for_custom_keyword {
($ident:ident) => {
impl $crate::__private::Debug for $ident {
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult {
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result {
$crate::__private::Formatter::write_str(
f,
$crate::__private::concat!(
"Keyword [",
$crate::__private::stringify!($ident),
"]",
),
concat!("Keyword [", stringify!($ident), "]"),
)
}
}

View File

@ -92,18 +92,16 @@ macro_rules! custom_punctuation {
}
}
const _: () = {
impl $crate::__private::Default for $ident {
fn default() -> Self {
$ident($crate::__private::Span::call_site())
}
impl $crate::__private::Default for $ident {
fn default() -> Self {
$ident($crate::__private::Span::call_site())
}
}
$crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
};
$crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
$crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
};
}
@ -113,20 +111,20 @@ macro_rules! custom_punctuation {
#[macro_export]
macro_rules! impl_parse_for_custom_punctuation {
($ident:ident, $($tt:tt)+) => {
impl $crate::__private::CustomToken for $ident {
fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
$crate::__private::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
impl $crate::token::CustomToken for $ident {
fn peek(cursor: $crate::buffer::Cursor) -> bool {
$crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
}
fn display() -> &'static $crate::__private::str {
$crate::__private::concat!("`", $crate::stringify_punct!($($tt)+), "`")
concat!("`", $crate::stringify_punct!($($tt)+), "`")
}
}
impl $crate::parse::Parse for $ident {
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
let spans: $crate::custom_punctuation_repr!($($tt)+) =
$crate::__private::parse_punct(input, $crate::stringify_punct!($($tt)+))?;
$crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
Ok($ident(spans))
}
}
@ -149,7 +147,7 @@ macro_rules! impl_to_tokens_for_custom_punctuation {
($ident:ident, $($tt:tt)+) => {
impl $crate::__private::ToTokens for $ident {
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
$crate::__private::print_punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
$crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
}
}
};
@ -195,8 +193,8 @@ macro_rules! impl_clone_for_custom_punctuation {
macro_rules! impl_extra_traits_for_custom_punctuation {
($ident:ident, $($tt:tt)+) => {
impl $crate::__private::Debug for $ident {
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::FmtResult {
$crate::__private::Formatter::write_str(f, $crate::__private::stringify!($ident))
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result {
$crate::__private::Formatter::write_str(f, stringify!($ident))
}
}
@ -297,6 +295,6 @@ macro_rules! custom_punctuation_unexpected {
#[macro_export]
macro_rules! stringify_punct {
($($tt:tt)+) => {
$crate::__private::concat!($($crate::__private::stringify!($tt)),+)
concat!($(stringify!($tt)),+)
};
}

View File

@ -3,8 +3,12 @@ use crate::punctuated::Punctuated;
ast_struct! {
/// An enum variant.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Variant {
/// Attributes tagged on the variant.
pub attrs: Vec<Attribute>,
/// Name of the variant.
@ -21,6 +25,9 @@ ast_struct! {
ast_enum_of_structs! {
/// Data stored within an enum variant or struct.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
@ -43,6 +50,9 @@ ast_enum_of_structs! {
ast_struct! {
/// Named fields of a struct or struct variant such as `Point { x: f64,
/// y: f64 }`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct FieldsNamed {
pub brace_token: token::Brace,
@ -52,6 +62,9 @@ ast_struct! {
ast_struct! {
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct FieldsUnnamed {
pub paren_token: token::Paren,
@ -134,14 +147,17 @@ impl<'a> IntoIterator for &'a mut Fields {
ast_struct! {
/// A field of a struct or enum variant.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Field {
/// Attributes tagged on the field.
pub attrs: Vec<Attribute>,
/// Visibility of the field.
pub vis: Visibility,
pub mutability: FieldMutability,
/// Name of the field, if any.
///
/// Fields of tuple structs have no names.
@ -149,16 +165,82 @@ ast_struct! {
pub colon_token: Option<Token![:]>,
/// Type of the field.
pub ty: Type,
}
}
ast_enum_of_structs! {
/// The visibility level of an item: inherited or `pub` or
/// `pub(restricted)`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: Expr#syntax-tree-enums
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum Visibility {
/// A public visibility level: `pub`.
Public(VisPublic),
/// A crate-level visibility: `crate`.
Crate(VisCrate),
/// A visibility level restricted to some path: `pub(self)` or
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
Restricted(VisRestricted),
/// An inherited visibility, which usually means private.
Inherited,
}
}
ast_struct! {
/// A public visibility level: `pub`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct VisPublic {
pub pub_token: Token![pub],
}
}
ast_struct! {
/// A crate-level visibility: `crate`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct VisCrate {
pub crate_token: Token![crate],
}
}
ast_struct! {
/// A visibility level restricted to some path: `pub(self)` or
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct VisRestricted {
pub pub_token: Token![pub],
pub paren_token: token::Paren,
pub in_token: Option<Token![in]>,
pub path: Box<Path>,
}
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::ext::IdentExt as _;
#[cfg(not(feature = "full"))]
use crate::parse::discouraged::Speculative as _;
use crate::ext::IdentExt;
use crate::parse::discouraged::Speculative;
use crate::parse::{Parse, ParseStream, Result};
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
@ -176,20 +258,7 @@ pub(crate) mod parsing {
};
let discriminant = if input.peek(Token![=]) {
let eq_token: Token![=] = input.parse()?;
#[cfg(feature = "full")]
let discriminant: Expr = input.parse()?;
#[cfg(not(feature = "full"))]
let discriminant = {
let begin = input.fork();
let ahead = input.fork();
let mut discriminant: Result<Expr> = ahead.parse();
if discriminant.is_ok() {
input.advance_to(&ahead);
} else if scan_lenient_discriminant(input).is_ok() {
discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input)));
}
discriminant?
};
Some((eq_token, discriminant))
} else {
None
@ -203,86 +272,13 @@ pub(crate) mod parsing {
}
}
#[cfg(not(feature = "full"))]
pub(crate) fn scan_lenient_discriminant(input: ParseStream) -> Result<()> {
use proc_macro2::Delimiter::{self, Brace, Bracket, Parenthesis};
let consume = |delimiter: Delimiter| {
Result::unwrap(input.step(|cursor| match cursor.group(delimiter) {
Some((_inside, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
}))
};
macro_rules! consume {
[$token:tt] => {
input.parse::<Option<Token![$token]>>().unwrap().is_some()
};
}
let mut initial = true;
let mut depth = 0usize;
loop {
if initial {
if consume![&] {
input.parse::<Option<Token![mut]>>()?;
} else if consume![if] || consume![match] || consume![while] {
depth += 1;
} else if input.parse::<Option<Lit>>()?.is_some()
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis))
|| (consume![async] || consume![const] || consume![loop] || consume![unsafe])
&& (consume(Brace) || break)
{
initial = false;
} else if consume![let] {
while !consume![=] {
if !((consume![|] || consume![ref] || consume![mut] || consume![@])
|| (consume![!] || input.parse::<Option<Lit>>()?.is_some())
|| (consume![..=] || consume![..] || consume![&] || consume![_])
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis)))
{
path::parsing::qpath(input, true)?;
}
}
} else if input.parse::<Option<Lifetime>>()?.is_some() && !consume![:] {
break;
} else if input.parse::<UnOp>().is_err() {
path::parsing::qpath(input, true)?;
initial = consume![!] || depth == 0 && input.peek(token::Brace);
}
} else if input.is_empty() || input.peek(Token![,]) {
return Ok(());
} else if depth > 0 && consume(Brace) {
if consume![else] && !consume(Brace) {
initial = consume![if] || break;
} else {
depth -= 1;
}
} else if input.parse::<BinOp>().is_ok() || (consume![..] | consume![=]) {
initial = true;
} else if consume![.] {
if input.parse::<Option<LitFloat>>()?.is_none()
&& (input.parse::<Member>()?.is_named() && consume![::])
{
AngleBracketedGenericArguments::do_parse(None, input)?;
}
} else if consume![as] {
input.parse::<Type>()?;
} else if !(consume(Brace) || consume(Bracket) || consume(Parenthesis)) {
break;
}
}
Err(input.error("unsupported expression"))
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for FieldsNamed {
fn parse(input: ParseStream) -> Result<Self> {
let content;
Ok(FieldsNamed {
brace_token: braced!(content in input),
named: content.parse_terminated(Field::parse_named, Token![,])?,
named: content.parse_terminated(Field::parse_named)?,
})
}
}
@ -293,7 +289,7 @@ pub(crate) mod parsing {
let content;
Ok(FieldsUnnamed {
paren_token: parenthesized!(content in input),
unnamed: content.parse_terminated(Field::parse_unnamed, Token![,])?,
unnamed: content.parse_terminated(Field::parse_unnamed)?,
})
}
}
@ -302,37 +298,16 @@ pub(crate) mod parsing {
/// Parses a named (braced struct) field.
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_named(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let unnamed_field = cfg!(feature = "full") && input.peek(Token![_]);
let ident = if unnamed_field {
input.call(Ident::parse_any)
} else {
input.parse()
}?;
let colon_token: Token![:] = input.parse()?;
let ty: Type = if unnamed_field
&& (input.peek(Token![struct])
|| input.peek(Token![union]) && input.peek2(token::Brace))
{
let begin = input.fork();
input.call(Ident::parse_any)?;
input.parse::<FieldsNamed>()?;
Type::Verbatim(verbatim::between(&begin, input))
} else {
input.parse()?
};
Ok(Field {
attrs,
vis,
mutability: FieldMutability::None,
ident: Some(ident),
colon_token: Some(colon_token),
ty,
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
ident: Some(if input.peek(Token![_]) {
input.call(Ident::parse_any)
} else {
input.parse()
}?),
colon_token: Some(input.parse()?),
ty: input.parse()?,
})
}
@ -342,13 +317,100 @@ pub(crate) mod parsing {
Ok(Field {
attrs: input.call(Attribute::parse_outer)?,
vis: input.parse()?,
mutability: FieldMutability::None,
ident: None,
colon_token: None,
ty: input.parse()?,
})
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Visibility {
fn parse(input: ParseStream) -> Result<Self> {
// Recognize an empty None-delimited group, as produced by a $:vis
// matcher that matched no tokens.
if input.peek(token::Group) {
let ahead = input.fork();
let group = crate::group::parse_group(&ahead)?;
if group.content.is_empty() {
input.advance_to(&ahead);
return Ok(Visibility::Inherited);
}
}
if input.peek(Token![pub]) {
Self::parse_pub(input)
} else if input.peek(Token![crate]) {
Self::parse_crate(input)
} else {
Ok(Visibility::Inherited)
}
}
}
impl Visibility {
fn parse_pub(input: ParseStream) -> Result<Self> {
let pub_token = input.parse::<Token![pub]>()?;
if input.peek(token::Paren) {
let ahead = input.fork();
let content;
let paren_token = parenthesized!(content in ahead);
if content.peek(Token![crate])
|| content.peek(Token![self])
|| content.peek(Token![super])
{
let path = content.call(Ident::parse_any)?;
// Ensure there are no additional tokens within `content`.
// Without explicitly checking, we may misinterpret a tuple
// field as a restricted visibility, causing a parse error.
// e.g. `pub (crate::A, crate::B)` (Issue #720).
if content.is_empty() {
input.advance_to(&ahead);
return Ok(Visibility::Restricted(VisRestricted {
pub_token,
paren_token,
in_token: None,
path: Box::new(Path::from(path)),
}));
}
} else if content.peek(Token![in]) {
let in_token: Token![in] = content.parse()?;
let path = content.call(Path::parse_mod_style)?;
input.advance_to(&ahead);
return Ok(Visibility::Restricted(VisRestricted {
pub_token,
paren_token,
in_token: Some(in_token),
path: Box::new(path),
}));
}
}
Ok(Visibility::Public(VisPublic { pub_token }))
}
fn parse_crate(input: ParseStream) -> Result<Self> {
if input.peek2(Token![::]) {
Ok(Visibility::Inherited)
} else {
Ok(Visibility::Crate(VisCrate {
crate_token: input.parse()?,
}))
}
}
#[cfg(feature = "full")]
pub(crate) fn is_some(&self) -> bool {
match self {
Visibility::Inherited => false,
_ => true,
}
}
}
}
#[cfg(feature = "printing")]
@ -401,4 +463,31 @@ mod printing {
self.ty.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for VisPublic {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.pub_token.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for VisCrate {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.crate_token.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for VisRestricted {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.pub_token.to_tokens(tokens);
self.paren_token.surround(tokens, |tokens| {
// TODO: If we have a path which is not "self" or "super" or
// "crate", automatically add the "in" token.
self.in_token.to_tokens(tokens);
self.path.to_tokens(tokens);
});
}
}
}

View File

@ -3,19 +3,32 @@ use crate::punctuated::Punctuated;
ast_struct! {
/// Data structure sent to a `proc_macro_derive` macro.
///
/// *This type is available only if Syn is built with the `"derive"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub struct DeriveInput {
/// Attributes tagged on the whole struct or enum.
pub attrs: Vec<Attribute>,
/// Visibility of the struct or enum.
pub vis: Visibility,
/// Name of the struct or enum.
pub ident: Ident,
/// Generics required to complete the definition.
pub generics: Generics,
/// Data within the struct or enum.
pub data: Data,
}
}
ast_enum! {
ast_enum_of_structs! {
/// The storage of a struct, enum or union data structure.
///
/// *This type is available only if Syn is built with the `"derive"` feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
@ -23,14 +36,24 @@ ast_enum! {
/// [syntax tree enum]: Expr#syntax-tree-enums
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub enum Data {
/// A struct input to a `proc_macro_derive` macro.
Struct(DataStruct),
/// An enum input to a `proc_macro_derive` macro.
Enum(DataEnum),
/// An untagged union input to a `proc_macro_derive` macro.
Union(DataUnion),
}
do_not_generate_to_tokens
}
ast_struct! {
/// A struct input to a `proc_macro_derive` macro.
///
/// *This type is available only if Syn is built with the `"derive"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub struct DataStruct {
pub struct_token: Token![struct],
@ -41,6 +64,9 @@ ast_struct! {
ast_struct! {
/// An enum input to a `proc_macro_derive` macro.
///
/// *This type is available only if Syn is built with the `"derive"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub struct DataEnum {
pub enum_token: Token![enum],
@ -51,6 +77,9 @@ ast_struct! {
ast_struct! {
/// An untagged union input to a `proc_macro_derive` macro.
///
/// *This type is available only if Syn is built with the `"derive"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub struct DataUnion {
pub union_token: Token![union],
@ -59,7 +88,7 @@ ast_struct! {
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::parse::{Parse, ParseStream, Result};
@ -132,7 +161,7 @@ pub(crate) mod parsing {
}
}
pub(crate) fn data_struct(
pub fn data_struct(
input: ParseStream,
) -> Result<(Option<WhereClause>, Fields, Option<Token![;]>)> {
let mut lookahead = input.lookahead1();
@ -168,7 +197,7 @@ pub(crate) mod parsing {
}
}
pub(crate) fn data_enum(
pub fn data_enum(
input: ParseStream,
) -> Result<(
Option<WhereClause>,
@ -179,12 +208,12 @@ pub(crate) mod parsing {
let content;
let brace = braced!(content in input);
let variants = content.parse_terminated(Variant::parse, Token![,])?;
let variants = content.parse_terminated(Variant::parse)?;
Ok((where_clause, brace, variants))
}
pub(crate) fn data_union(input: ParseStream) -> Result<(Option<WhereClause>, FieldsNamed)> {
pub fn data_union(input: ParseStream) -> Result<(Option<WhereClause>, FieldsNamed)> {
let where_clause = input.parse()?;
let fields = input.parse()?;
Ok((where_clause, fields))

View File

@ -1,7 +1,6 @@
//! Extensions to the parsing API with niche applicability.
use super::*;
use proc_macro2::extra::DelimSpan;
/// Extensions to the `ParseStream` API to support speculative parsing.
pub trait Speculative {
@ -193,27 +192,3 @@ impl<'a> Speculative for ParseBuffer<'a> {
.set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) });
}
}
/// Extensions to the `ParseStream` API to support manipulating invisible
/// delimiters the same as if they were visible.
pub trait AnyDelimiter {
/// Returns the delimiter, the span of the delimiter token, and the nested
/// contents for further parsing.
fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)>;
}
impl<'a> AnyDelimiter for ParseBuffer<'a> {
fn parse_any_delimiter(&self) -> Result<(Delimiter, DelimSpan, ParseBuffer)> {
self.step(|cursor| {
if let Some((content, delimiter, span, rest)) = cursor.any_group() {
let scope = crate::buffer::close_span_of_group(*cursor);
let nested = crate::parse::advance_step_cursor(cursor, content);
let unexpected = crate::parse::get_unexpected(self);
let content = crate::parse::new_parse_buffer(scope, nested, unexpected);
Ok(((delimiter, span, content), rest))
} else {
Err(cursor.error("expected any delimiter"))
}
})
}
}

View File

@ -7,6 +7,7 @@ use proc_macro2::{
#[cfg(feature = "printing")]
use quote::ToTokens;
use std::fmt::{self, Debug, Display};
use std::iter::FromIterator;
use std::slice;
use std::vec;
@ -33,34 +34,18 @@ pub type Result<T> = std::result::Result<T, Error>;
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use syn::parse::{Parse, ParseStream, Result};
/// use syn::{parse_macro_input, ItemFn};
/// use syn::{parse_macro_input, AttributeArgs, ItemFn};
///
/// # const IGNORE: &str = stringify! {
/// #[proc_macro_attribute]
/// # };
/// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
/// let args = parse_macro_input!(args as MyAttrArgs);
/// let args = parse_macro_input!(args as AttributeArgs);
/// let input = parse_macro_input!(input as ItemFn);
///
/// /* ... */
/// # TokenStream::new()
/// }
///
/// struct MyAttrArgs {
/// # _k: [(); { stringify! {
/// ...
/// # }; 0 }]
/// }
///
/// impl Parse for MyAttrArgs {
/// fn parse(input: ParseStream) -> Result<Self> {
/// # stringify! {
/// ...
/// # };
/// # unimplemented!()
/// }
/// }
/// ```
///
/// For errors that arise later than the initial parsing stage, the
@ -104,21 +89,14 @@ pub struct Error {
struct ErrorMessage {
// Span is implemented as an index into a thread-local interner to keep the
// size small. It is not safe to access from a different thread. We want
// errors to be Send and Sync to play nicely with ecosystem crates for error
// handling, so pin the span we're given to its original thread and assume
// it is Span::call_site if accessed from any other thread.
span: ThreadBound<SpanRange>,
// errors to be Send and Sync to play nicely with the Failure crate, so pin
// the span we're given to its original thread and assume it is
// Span::call_site if accessed from any other thread.
start_span: ThreadBound<Span>,
end_span: ThreadBound<Span>,
message: String,
}
// Cannot use std::ops::Range<Span> because that does not implement Copy,
// whereas ThreadBound<T> requires a Copy impl as a way to ensure no Drop impls
// are involved.
struct SpanRange {
start: Span,
end: Span,
}
#[cfg(test)]
struct _Test
where
@ -161,10 +139,8 @@ impl Error {
fn new(span: Span, message: String) -> Error {
Error {
messages: vec![ErrorMessage {
span: ThreadBound::new(SpanRange {
start: span,
end: span,
}),
start_span: ThreadBound::new(span),
end_span: ThreadBound::new(span),
message,
}],
}
@ -185,7 +161,6 @@ impl Error {
/// When in doubt it's recommended to stick to `Error::new` (or
/// `ParseStream::error`)!
#[cfg(feature = "printing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self {
return new_spanned(tokens.into_token_stream(), message.to_string());
@ -195,7 +170,8 @@ impl Error {
let end = iter.last().map_or(start, |t| t.span());
Error {
messages: vec![ErrorMessage {
span: ThreadBound::new(SpanRange { start, end }),
start_span: ThreadBound::new(start),
end_span: ThreadBound::new(end),
message,
}],
}
@ -208,7 +184,11 @@ impl Error {
/// if called from a different thread than the one on which the `Error` was
/// originally created.
pub fn span(&self) -> Span {
let SpanRange { start, end } = match self.messages[0].span.get() {
let start = match self.messages[0].start_span.get() {
Some(span) => *span,
None => return Span::call_site(),
};
let end = match self.messages[0].end_span.get() {
Some(span) => *span,
None => return Span::call_site(),
};
@ -274,34 +254,15 @@ impl Error {
impl ErrorMessage {
fn to_compile_error(&self) -> TokenStream {
let (start, end) = match self.span.get() {
Some(range) => (range.start, range.end),
None => (Span::call_site(), Span::call_site()),
};
let start = self
.start_span
.get()
.cloned()
.unwrap_or_else(Span::call_site);
let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
// ::core::compile_error!($message)
// compile_error!($message)
TokenStream::from_iter(vec![
TokenTree::Punct({
let mut punct = Punct::new(':', Spacing::Joint);
punct.set_span(start);
punct
}),
TokenTree::Punct({
let mut punct = Punct::new(':', Spacing::Alone);
punct.set_span(start);
punct
}),
TokenTree::Ident(Ident::new("core", start)),
TokenTree::Punct({
let mut punct = Punct::new(':', Spacing::Joint);
punct.set_span(start);
punct
}),
TokenTree::Punct({
let mut punct = Punct::new(':', Spacing::Alone);
punct.set_span(start);
punct
}),
TokenTree::Ident(Ident::new("compile_error", start)),
TokenTree::Punct({
let mut punct = Punct::new('!', Spacing::Alone);
@ -324,7 +285,7 @@ impl ErrorMessage {
}
#[cfg(feature = "parsing")]
pub(crate) fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
pub fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Error {
if cursor.eof() {
Error::new(scope, format!("unexpected end of input, {}", message))
} else {
@ -334,13 +295,14 @@ pub(crate) fn new_at<T: Display>(scope: Span, cursor: Cursor, message: T) -> Err
}
#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
pub(crate) fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
return new2(start, end, message.to_string());
fn new2(start: Span, end: Span, message: String) -> Error {
Error {
messages: vec![ErrorMessage {
span: ThreadBound::new(SpanRange { start, end }),
start_span: ThreadBound::new(start),
end_span: ThreadBound::new(end),
message,
}],
}
@ -385,26 +347,25 @@ impl Clone for Error {
impl Clone for ErrorMessage {
fn clone(&self) -> Self {
let start = self
.start_span
.get()
.cloned()
.unwrap_or_else(Span::call_site);
let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
ErrorMessage {
span: self.span,
start_span: ThreadBound::new(start),
end_span: ThreadBound::new(end),
message: self.message.clone(),
}
}
}
impl Clone for SpanRange {
fn clone(&self) -> Self {
*self
}
}
impl Copy for SpanRange {}
impl std::error::Error for Error {}
impl From<LexError> for Error {
fn from(err: LexError) -> Self {
Error::new(err.span(), err)
Error::new(err.span(), "lex error")
}
}

View File

@ -1,73 +1,39 @@
#[doc(hidden)]
pub use std::clone::Clone;
#[doc(hidden)]
pub use std::cmp::{Eq, PartialEq};
#[doc(hidden)]
pub use std::concat;
#[doc(hidden)]
pub use std::default::Default;
#[doc(hidden)]
pub use std::fmt::Debug;
#[doc(hidden)]
pub use std::fmt::{self, Debug, Formatter};
pub use std::hash::{Hash, Hasher};
#[doc(hidden)]
pub use std::marker::Copy;
#[doc(hidden)]
pub use std::option::Option::{None, Some};
#[doc(hidden)]
pub use std::result::Result::{Err, Ok};
#[doc(hidden)]
pub use std::stringify;
#[doc(hidden)]
pub type Formatter<'a> = std::fmt::Formatter<'a>;
#[doc(hidden)]
pub type FmtResult = std::fmt::Result;
#[doc(hidden)]
pub type bool = std::primitive::bool;
#[doc(hidden)]
pub type str = std::primitive::str;
#[cfg(feature = "printing")]
#[doc(hidden)]
pub use quote;
pub extern crate quote;
#[doc(hidden)]
pub type Span = proc_macro2::Span;
#[doc(hidden)]
pub type TokenStream2 = proc_macro2::TokenStream;
pub use proc_macro2::{Span, TokenStream as TokenStream2};
#[cfg(feature = "parsing")]
#[doc(hidden)]
pub use crate::group::{parse_braces, parse_brackets, parse_parens};
#[doc(hidden)]
pub use crate::span::IntoSpans;
#[cfg(all(feature = "parsing", feature = "printing"))]
#[doc(hidden)]
pub use crate::parse_quote::parse as parse_quote;
#[cfg(feature = "parsing")]
#[doc(hidden)]
pub use crate::token::parsing::{peek_punct, punct as parse_punct};
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "proc-macro"
))]
pub use proc_macro::TokenStream;
#[cfg(feature = "printing")]
#[doc(hidden)]
pub use crate::token::printing::punct as print_punct;
#[cfg(feature = "parsing")]
#[doc(hidden)]
pub use crate::token::private::CustomToken;
#[cfg(feature = "proc-macro")]
#[doc(hidden)]
pub type TokenStream = proc_macro::TokenStream;
#[cfg(feature = "printing")]
#[doc(hidden)]
pub use quote::{ToTokens, TokenStreamExt};
#[doc(hidden)]
#[allow(non_camel_case_types)]
pub type bool = help::Bool;
#[allow(non_camel_case_types)]
pub type str = help::Str;
mod help {
pub type Bool = bool;
pub type Str = str;
}
pub struct private(pub(crate) ());

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,6 @@
//! Extension traits to provide parsing methods on foreign types.
//!
//! *This module is available only if Syn is built with the `"parsing"` feature.*
use crate::buffer::Cursor;
use crate::parse::Peek;
@ -11,6 +13,8 @@ use proc_macro2::Ident;
///
/// This trait is sealed and cannot be implemented for types outside of Syn. It
/// is implemented only for `proc_macro2::Ident`.
///
/// *This trait is available only if Syn is built with the `"parsing"` feature.*
pub trait IdentExt: Sized + private::Sealed {
/// Parses any identifier including keywords.
///
@ -92,8 +96,8 @@ impl IdentExt for Ident {
fn unraw(&self) -> Ident {
let string = self.to_string();
if let Some(string) = string.strip_prefix("r#") {
Ident::new(string, self.span())
if string.starts_with("r#") {
Ident::new(&string[2..], self.span())
} else {
self.clone()
}

View File

@ -3,7 +3,7 @@ use super::*;
ast_struct! {
/// A complete file of Rust source code.
///
/// Typically `File` objects are created with [`parse_file`].
/// *This type is available only if Syn is built with the `"full"` feature.*
///
/// # Example
///
@ -86,7 +86,7 @@ ast_struct! {
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::parse::{Parse, ParseStream, Result};

432
src/gen/clone.rs generated
View File

@ -41,30 +41,6 @@ impl Clone for Arm {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for AssocConst {
fn clone(&self) -> Self {
AssocConst {
ident: self.ident.clone(),
generics: self.generics.clone(),
eq_token: self.eq_token.clone(),
value: self.value.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for AssocType {
fn clone(&self) -> Self {
AssocType {
ident: self.ident.clone(),
generics: self.generics.clone(),
eq_token: self.eq_token.clone(),
ty: self.ty.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Copy for AttrStyle {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
@ -81,7 +57,8 @@ impl Clone for Attribute {
pound_token: self.pound_token.clone(),
style: self.style.clone(),
bracket_token: self.bracket_token.clone(),
meta: self.meta.clone(),
path: self.path.clone(),
tokens: self.tokens.clone(),
}
}
}
@ -98,18 +75,6 @@ impl Clone for BareFnArg {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for BareVariadic {
fn clone(&self) -> Self {
BareVariadic {
attrs: self.attrs.clone(),
name: self.name.clone(),
dots: self.dots.clone(),
comma: self.comma.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Copy for BinOp {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
@ -118,6 +83,17 @@ impl Clone for BinOp {
*self
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for Binding {
fn clone(&self) -> Self {
Binding {
ident: self.ident.clone(),
eq_token: self.eq_token.clone(),
ty: self.ty.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for Block {
@ -161,7 +137,6 @@ impl Clone for Constraint {
fn clone(&self) -> Self {
Constraint {
ident: self.ident.clone(),
generics: self.generics.clone(),
colon_token: self.colon_token.clone(),
bounds: self.bounds.clone(),
}
@ -233,6 +208,8 @@ impl Clone for Expr {
#[cfg(feature = "full")]
Expr::Assign(v0) => Expr::Assign(v0.clone()),
#[cfg(feature = "full")]
Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()),
#[cfg(feature = "full")]
Expr::Async(v0) => Expr::Async(v0.clone()),
#[cfg(feature = "full")]
Expr::Await(v0) => Expr::Await(v0.clone()),
@ -240,42 +217,45 @@ impl Clone for Expr {
#[cfg(feature = "full")]
Expr::Block(v0) => Expr::Block(v0.clone()),
#[cfg(feature = "full")]
Expr::Box(v0) => Expr::Box(v0.clone()),
#[cfg(feature = "full")]
Expr::Break(v0) => Expr::Break(v0.clone()),
Expr::Call(v0) => Expr::Call(v0.clone()),
Expr::Cast(v0) => Expr::Cast(v0.clone()),
#[cfg(feature = "full")]
Expr::Closure(v0) => Expr::Closure(v0.clone()),
#[cfg(feature = "full")]
Expr::Const(v0) => Expr::Const(v0.clone()),
#[cfg(feature = "full")]
Expr::Continue(v0) => Expr::Continue(v0.clone()),
Expr::Field(v0) => Expr::Field(v0.clone()),
#[cfg(feature = "full")]
Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()),
#[cfg(feature = "full")]
Expr::Group(v0) => Expr::Group(v0.clone()),
#[cfg(feature = "full")]
Expr::If(v0) => Expr::If(v0.clone()),
Expr::Index(v0) => Expr::Index(v0.clone()),
#[cfg(feature = "full")]
Expr::Infer(v0) => Expr::Infer(v0.clone()),
#[cfg(feature = "full")]
Expr::Let(v0) => Expr::Let(v0.clone()),
Expr::Lit(v0) => Expr::Lit(v0.clone()),
#[cfg(feature = "full")]
Expr::Loop(v0) => Expr::Loop(v0.clone()),
#[cfg(feature = "full")]
Expr::Macro(v0) => Expr::Macro(v0.clone()),
#[cfg(feature = "full")]
Expr::Match(v0) => Expr::Match(v0.clone()),
#[cfg(feature = "full")]
Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()),
Expr::Paren(v0) => Expr::Paren(v0.clone()),
Expr::Path(v0) => Expr::Path(v0.clone()),
#[cfg(feature = "full")]
Expr::Range(v0) => Expr::Range(v0.clone()),
#[cfg(feature = "full")]
Expr::Reference(v0) => Expr::Reference(v0.clone()),
#[cfg(feature = "full")]
Expr::Repeat(v0) => Expr::Repeat(v0.clone()),
#[cfg(feature = "full")]
Expr::Return(v0) => Expr::Return(v0.clone()),
#[cfg(feature = "full")]
Expr::Struct(v0) => Expr::Struct(v0.clone()),
#[cfg(feature = "full")]
Expr::Try(v0) => Expr::Try(v0.clone()),
@ -283,6 +263,8 @@ impl Clone for Expr {
Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()),
#[cfg(feature = "full")]
Expr::Tuple(v0) => Expr::Tuple(v0.clone()),
#[cfg(feature = "full")]
Expr::Type(v0) => Expr::Type(v0.clone()),
Expr::Unary(v0) => Expr::Unary(v0.clone()),
#[cfg(feature = "full")]
Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()),
@ -291,7 +273,7 @@ impl Clone for Expr {
Expr::While(v0) => Expr::While(v0.clone()),
#[cfg(feature = "full")]
Expr::Yield(v0) => Expr::Yield(v0.clone()),
#[cfg(not(feature = "full"))]
#[cfg(any(syn_no_non_exhaustive, not(feature = "full")))]
_ => unreachable!(),
}
}
@ -321,6 +303,18 @@ impl Clone for ExprAssign {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprAssignOp {
fn clone(&self) -> Self {
ExprAssignOp {
attrs: self.attrs.clone(),
left: self.left.clone(),
op: self.op.clone(),
right: self.right.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprAsync {
fn clone(&self) -> Self {
ExprAsync {
@ -368,6 +362,17 @@ impl Clone for ExprBlock {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprBox {
fn clone(&self) -> Self {
ExprBox {
attrs: self.attrs.clone(),
box_token: self.box_token.clone(),
expr: self.expr.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprBreak {
fn clone(&self) -> Self {
ExprBreak {
@ -408,8 +413,6 @@ impl Clone for ExprClosure {
fn clone(&self) -> Self {
ExprClosure {
attrs: self.attrs.clone(),
lifetimes: self.lifetimes.clone(),
constness: self.constness.clone(),
movability: self.movability.clone(),
asyncness: self.asyncness.clone(),
capture: self.capture.clone(),
@ -423,17 +426,6 @@ impl Clone for ExprClosure {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprConst {
fn clone(&self) -> Self {
ExprConst {
attrs: self.attrs.clone(),
const_token: self.const_token.clone(),
block: self.block.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprContinue {
fn clone(&self) -> Self {
ExprContinue {
@ -470,7 +462,7 @@ impl Clone for ExprForLoop {
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprGroup {
fn clone(&self) -> Self {
@ -508,16 +500,6 @@ impl Clone for ExprIndex {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprInfer {
fn clone(&self) -> Self {
ExprInfer {
attrs: self.attrs.clone(),
underscore_token: self.underscore_token.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprLet {
fn clone(&self) -> Self {
ExprLet {
@ -551,7 +533,7 @@ impl Clone for ExprLoop {
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprMacro {
fn clone(&self) -> Self {
@ -574,7 +556,7 @@ impl Clone for ExprMatch {
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprMethodCall {
fn clone(&self) -> Self {
@ -617,19 +599,20 @@ impl Clone for ExprRange {
fn clone(&self) -> Self {
ExprRange {
attrs: self.attrs.clone(),
start: self.start.clone(),
from: self.from.clone(),
limits: self.limits.clone(),
end: self.end.clone(),
to: self.to.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprReference {
fn clone(&self) -> Self {
ExprReference {
attrs: self.attrs.clone(),
and_token: self.and_token.clone(),
raw: self.raw.clone(),
mutability: self.mutability.clone(),
expr: self.expr.clone(),
}
@ -659,13 +642,12 @@ impl Clone for ExprReturn {
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprStruct {
fn clone(&self) -> Self {
ExprStruct {
attrs: self.attrs.clone(),
qself: self.qself.clone(),
path: self.path.clone(),
brace_token: self.brace_token.clone(),
fields: self.fields.clone(),
@ -707,6 +689,18 @@ impl Clone for ExprTuple {
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprType {
fn clone(&self) -> Self {
ExprType {
attrs: self.attrs.clone(),
expr: self.expr.clone(),
colon_token: self.colon_token.clone(),
ty: self.ty.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ExprUnary {
@ -760,22 +754,12 @@ impl Clone for Field {
Field {
attrs: self.attrs.clone(),
vis: self.vis.clone(),
mutability: self.mutability.clone(),
ident: self.ident.clone(),
colon_token: self.colon_token.clone(),
ty: self.ty.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for FieldMutability {
fn clone(&self) -> Self {
match self {
FieldMutability::None => FieldMutability::None,
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for FieldPat {
@ -788,7 +772,7 @@ impl Clone for FieldPat {
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for FieldValue {
fn clone(&self) -> Self {
@ -862,6 +846,8 @@ impl Clone for ForeignItem {
ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
}
}
}
@ -913,7 +899,6 @@ impl Clone for ForeignItemType {
vis: self.vis.clone(),
type_token: self.type_token.clone(),
ident: self.ident.clone(),
generics: self.generics.clone(),
semi_token: self.semi_token.clone(),
}
}
@ -926,19 +911,28 @@ impl Clone for GenericArgument {
GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
GenericArgument::AssocType(v0) => GenericArgument::AssocType(v0.clone()),
GenericArgument::AssocConst(v0) => GenericArgument::AssocConst(v0.clone()),
GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for GenericMethodArgument {
fn clone(&self) -> Self {
match self {
GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()),
GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for GenericParam {
fn clone(&self) -> Self {
match self {
GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
GenericParam::Type(v0) => GenericParam::Type(v0.clone()),
GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
GenericParam::Const(v0) => GenericParam::Const(v0.clone()),
}
}
@ -961,10 +955,12 @@ impl Clone for ImplItem {
fn clone(&self) -> Self {
match self {
ImplItem::Const(v0) => ImplItem::Const(v0.clone()),
ImplItem::Fn(v0) => ImplItem::Fn(v0.clone()),
ImplItem::Method(v0) => ImplItem::Method(v0.clone()),
ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
}
}
}
@ -978,7 +974,6 @@ impl Clone for ImplItemConst {
defaultness: self.defaultness.clone(),
const_token: self.const_token.clone(),
ident: self.ident.clone(),
generics: self.generics.clone(),
colon_token: self.colon_token.clone(),
ty: self.ty.clone(),
eq_token: self.eq_token.clone(),
@ -989,19 +984,6 @@ impl Clone for ImplItemConst {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ImplItemFn {
fn clone(&self) -> Self {
ImplItemFn {
attrs: self.attrs.clone(),
vis: self.vis.clone(),
defaultness: self.defaultness.clone(),
sig: self.sig.clone(),
block: self.block.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ImplItemMacro {
fn clone(&self) -> Self {
ImplItemMacro {
@ -1013,6 +995,19 @@ impl Clone for ImplItemMacro {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ImplItemMethod {
fn clone(&self) -> Self {
ImplItemMethod {
attrs: self.attrs.clone(),
vis: self.vis.clone(),
defaultness: self.defaultness.clone(),
sig: self.sig.clone(),
block: self.block.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ImplItemType {
fn clone(&self) -> Self {
ImplItemType {
@ -1028,13 +1023,6 @@ impl Clone for ImplItemType {
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ImplRestriction {
fn clone(&self) -> Self {
match *self {}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for Index {
@ -1057,6 +1045,7 @@ impl Clone for Item {
Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()),
Item::Impl(v0) => Item::Impl(v0.clone()),
Item::Macro(v0) => Item::Macro(v0.clone()),
Item::Macro2(v0) => Item::Macro2(v0.clone()),
Item::Mod(v0) => Item::Mod(v0.clone()),
Item::Static(v0) => Item::Static(v0.clone()),
Item::Struct(v0) => Item::Struct(v0.clone()),
@ -1066,6 +1055,8 @@ impl Clone for Item {
Item::Union(v0) => Item::Union(v0.clone()),
Item::Use(v0) => Item::Use(v0.clone()),
Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
}
}
}
@ -1078,7 +1069,6 @@ impl Clone for ItemConst {
vis: self.vis.clone(),
const_token: self.const_token.clone(),
ident: self.ident.clone(),
generics: self.generics.clone(),
colon_token: self.colon_token.clone(),
ty: self.ty.clone(),
eq_token: self.eq_token.clone(),
@ -1135,7 +1125,6 @@ impl Clone for ItemForeignMod {
fn clone(&self) -> Self {
ItemForeignMod {
attrs: self.attrs.clone(),
unsafety: self.unsafety.clone(),
abi: self.abi.clone(),
brace_token: self.brace_token.clone(),
items: self.items.clone(),
@ -1173,12 +1162,24 @@ impl Clone for ItemMacro {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ItemMacro2 {
fn clone(&self) -> Self {
ItemMacro2 {
attrs: self.attrs.clone(),
vis: self.vis.clone(),
macro_token: self.macro_token.clone(),
ident: self.ident.clone(),
rules: self.rules.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for ItemMod {
fn clone(&self) -> Self {
ItemMod {
attrs: self.attrs.clone(),
vis: self.vis.clone(),
unsafety: self.unsafety.clone(),
mod_token: self.mod_token.clone(),
ident: self.ident.clone(),
content: self.content.clone(),
@ -1228,7 +1229,6 @@ impl Clone for ItemTrait {
vis: self.vis.clone(),
unsafety: self.unsafety.clone(),
auto_token: self.auto_token.clone(),
restriction: self.restriction.clone(),
trait_token: self.trait_token.clone(),
ident: self.ident.clone(),
generics: self.generics.clone(),
@ -1311,9 +1311,9 @@ impl Clone for Label {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for LifetimeParam {
impl Clone for LifetimeDef {
fn clone(&self) -> Self {
LifetimeParam {
LifetimeDef {
attrs: self.attrs.clone(),
lifetime: self.lifetime.clone(),
colon_token: self.colon_token.clone(),
@ -1358,17 +1358,6 @@ impl Clone for Local {
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for LocalInit {
fn clone(&self) -> Self {
LocalInit {
eq_token: self.eq_token.clone(),
expr: self.expr.clone(),
diverge: self.diverge.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for Macro {
@ -1419,8 +1408,8 @@ impl Clone for MetaList {
fn clone(&self) -> Self {
MetaList {
path: self.path.clone(),
delimiter: self.delimiter.clone(),
tokens: self.tokens.clone(),
paren_token: self.paren_token.clone(),
nested: self.nested.clone(),
}
}
}
@ -1431,7 +1420,29 @@ impl Clone for MetaNameValue {
MetaNameValue {
path: self.path.clone(),
eq_token: self.eq_token.clone(),
value: self.value.clone(),
lit: self.lit.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for MethodTurbofish {
fn clone(&self) -> Self {
MethodTurbofish {
colon2_token: self.colon2_token.clone(),
lt_token: self.lt_token.clone(),
args: self.args.clone(),
gt_token: self.gt_token.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for NestedMeta {
fn clone(&self) -> Self {
match self {
NestedMeta::Meta(v0) => NestedMeta::Meta(v0.clone()),
NestedMeta::Lit(v0) => NestedMeta::Lit(v0.clone()),
}
}
}
@ -1451,12 +1462,11 @@ impl Clone for ParenthesizedGenericArguments {
impl Clone for Pat {
fn clone(&self) -> Self {
match self {
Pat::Const(v0) => Pat::Const(v0.clone()),
Pat::Box(v0) => Pat::Box(v0.clone()),
Pat::Ident(v0) => Pat::Ident(v0.clone()),
Pat::Lit(v0) => Pat::Lit(v0.clone()),
Pat::Macro(v0) => Pat::Macro(v0.clone()),
Pat::Or(v0) => Pat::Or(v0.clone()),
Pat::Paren(v0) => Pat::Paren(v0.clone()),
Pat::Path(v0) => Pat::Path(v0.clone()),
Pat::Range(v0) => Pat::Range(v0.clone()),
Pat::Reference(v0) => Pat::Reference(v0.clone()),
@ -1468,6 +1478,19 @@ impl Clone for Pat {
Pat::Type(v0) => Pat::Type(v0.clone()),
Pat::Verbatim(v0) => Pat::Verbatim(v0.clone()),
Pat::Wild(v0) => Pat::Wild(v0.clone()),
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for PatBox {
fn clone(&self) -> Self {
PatBox {
attrs: self.attrs.clone(),
box_token: self.box_token.clone(),
pat: self.pat.clone(),
}
}
}
@ -1486,6 +1509,26 @@ impl Clone for PatIdent {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for PatLit {
fn clone(&self) -> Self {
PatLit {
attrs: self.attrs.clone(),
expr: self.expr.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for PatMacro {
fn clone(&self) -> Self {
PatMacro {
attrs: self.attrs.clone(),
mac: self.mac.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for PatOr {
fn clone(&self) -> Self {
PatOr {
@ -1497,12 +1540,24 @@ impl Clone for PatOr {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for PatParen {
impl Clone for PatPath {
fn clone(&self) -> Self {
PatParen {
PatPath {
attrs: self.attrs.clone(),
paren_token: self.paren_token.clone(),
pat: self.pat.clone(),
qself: self.qself.clone(),
path: self.path.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for PatRange {
fn clone(&self) -> Self {
PatRange {
attrs: self.attrs.clone(),
lo: self.lo.clone(),
limits: self.limits.clone(),
hi: self.hi.clone(),
}
}
}
@ -1545,11 +1600,10 @@ impl Clone for PatStruct {
fn clone(&self) -> Self {
PatStruct {
attrs: self.attrs.clone(),
qself: self.qself.clone(),
path: self.path.clone(),
brace_token: self.brace_token.clone(),
fields: self.fields.clone(),
rest: self.rest.clone(),
dot2_token: self.dot2_token.clone(),
}
}
}
@ -1570,10 +1624,8 @@ impl Clone for PatTupleStruct {
fn clone(&self) -> Self {
PatTupleStruct {
attrs: self.attrs.clone(),
qself: self.qself.clone(),
path: self.path.clone(),
paren_token: self.paren_token.clone(),
elems: self.elems.clone(),
pat: self.pat.clone(),
}
}
}
@ -1634,6 +1686,17 @@ impl Clone for PathSegment {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for PredicateEq {
fn clone(&self) -> Self {
PredicateEq {
lhs_ty: self.lhs_ty.clone(),
eq_token: self.eq_token.clone(),
rhs_ty: self.rhs_ty.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for PredicateLifetime {
fn clone(&self) -> Self {
PredicateLifetime {
@ -1687,8 +1750,6 @@ impl Clone for Receiver {
reference: self.reference.clone(),
mutability: self.mutability.clone(),
self_token: self.self_token.clone(),
colon_token: self.colon_token.clone(),
ty: self.ty.clone(),
}
}
}
@ -1723,34 +1784,13 @@ impl Clone for Signature {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for StaticMutability {
fn clone(&self) -> Self {
match self {
StaticMutability::Mut(v0) => StaticMutability::Mut(v0.clone()),
StaticMutability::None => StaticMutability::None,
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for Stmt {
fn clone(&self) -> Self {
match self {
Stmt::Local(v0) => Stmt::Local(v0.clone()),
Stmt::Item(v0) => Stmt::Item(v0.clone()),
Stmt::Expr(v0, v1) => Stmt::Expr(v0.clone(), v1.clone()),
Stmt::Macro(v0) => Stmt::Macro(v0.clone()),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for StmtMacro {
fn clone(&self) -> Self {
StmtMacro {
attrs: self.attrs.clone(),
mac: self.mac.clone(),
semi_token: self.semi_token.clone(),
Stmt::Expr(v0) => Stmt::Expr(v0.clone()),
Stmt::Semi(v0, v1) => Stmt::Semi(v0.clone(), v1.clone()),
}
}
}
@ -1782,10 +1822,12 @@ impl Clone for TraitItem {
fn clone(&self) -> Self {
match self {
TraitItem::Const(v0) => TraitItem::Const(v0.clone()),
TraitItem::Fn(v0) => TraitItem::Fn(v0.clone()),
TraitItem::Method(v0) => TraitItem::Method(v0.clone()),
TraitItem::Type(v0) => TraitItem::Type(v0.clone()),
TraitItem::Macro(v0) => TraitItem::Macro(v0.clone()),
TraitItem::Verbatim(v0) => TraitItem::Verbatim(v0.clone()),
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
}
}
}
@ -1797,7 +1839,6 @@ impl Clone for TraitItemConst {
attrs: self.attrs.clone(),
const_token: self.const_token.clone(),
ident: self.ident.clone(),
generics: self.generics.clone(),
colon_token: self.colon_token.clone(),
ty: self.ty.clone(),
default: self.default.clone(),
@ -1807,18 +1848,6 @@ impl Clone for TraitItemConst {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for TraitItemFn {
fn clone(&self) -> Self {
TraitItemFn {
attrs: self.attrs.clone(),
sig: self.sig.clone(),
default: self.default.clone(),
semi_token: self.semi_token.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for TraitItemMacro {
fn clone(&self) -> Self {
TraitItemMacro {
@ -1830,6 +1859,18 @@ impl Clone for TraitItemMacro {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for TraitItemMethod {
fn clone(&self) -> Self {
TraitItemMethod {
attrs: self.attrs.clone(),
sig: self.sig.clone(),
default: self.default.clone(),
semi_token: self.semi_token.clone(),
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for TraitItemType {
fn clone(&self) -> Self {
TraitItemType {
@ -1864,6 +1905,8 @@ impl Clone for Type {
Type::TraitObject(v0) => Type::TraitObject(v0.clone()),
Type::Tuple(v0) => Type::Tuple(v0.clone()),
Type::Verbatim(v0) => Type::Verbatim(v0.clone()),
#[cfg(syn_no_non_exhaustive)]
_ => unreachable!(),
}
}
}
@ -1961,7 +2004,6 @@ impl Clone for TypeParamBound {
match self {
TypeParamBound::Trait(v0) => TypeParamBound::Trait(v0.clone()),
TypeParamBound::Lifetime(v0) => TypeParamBound::Lifetime(v0.clone()),
TypeParamBound::Verbatim(v0) => TypeParamBound::Verbatim(v0.clone()),
}
}
}
@ -2112,15 +2154,13 @@ impl Clone for UseTree {
}
}
}
#[cfg(feature = "full")]
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for Variadic {
fn clone(&self) -> Self {
Variadic {
attrs: self.attrs.clone(),
pat: self.pat.clone(),
dots: self.dots.clone(),
comma: self.comma.clone(),
}
}
}
@ -2138,6 +2178,24 @@ impl Clone for Variant {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for VisCrate {
fn clone(&self) -> Self {
VisCrate {
crate_token: self.crate_token.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for VisPublic {
fn clone(&self) -> Self {
VisPublic {
pub_token: self.pub_token.clone(),
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for VisRestricted {
fn clone(&self) -> Self {
VisRestricted {
@ -2154,6 +2212,7 @@ impl Clone for Visibility {
fn clone(&self) -> Self {
match self {
Visibility::Public(v0) => Visibility::Public(v0.clone()),
Visibility::Crate(v0) => Visibility::Crate(v0.clone()),
Visibility::Restricted(v0) => Visibility::Restricted(v0.clone()),
Visibility::Inherited => Visibility::Inherited,
}
@ -2174,8 +2233,9 @@ impl Clone for WhereClause {
impl Clone for WherePredicate {
fn clone(&self) -> Self {
match self {
WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()),
WherePredicate::Type(v0) => WherePredicate::Type(v0.clone()),
WherePredicate::Lifetime(v0) => WherePredicate::Lifetime(v0.clone()),
WherePredicate::Eq(v0) => WherePredicate::Eq(v0.clone()),
}
}
}

2954
src/gen/debug.rs generated

File diff suppressed because it is too large Load Diff

481
src/gen/eq.rs generated
View File

@ -37,28 +37,6 @@ impl PartialEq for Arm {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for AssocConst {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for AssocConst {
fn eq(&self, other: &Self) -> bool {
self.ident == other.ident && self.generics == other.generics
&& self.value == other.value
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for AssocType {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for AssocType {
fn eq(&self, other: &Self) -> bool {
self.ident == other.ident && self.generics == other.generics
&& self.ty == other.ty
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for AttrStyle {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -78,7 +56,8 @@ impl Eq for Attribute {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for Attribute {
fn eq(&self, other: &Self) -> bool {
self.style == other.style && self.meta == other.meta
self.style == other.style && self.path == other.path
&& TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -93,16 +72,6 @@ impl PartialEq for BareFnArg {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for BareVariadic {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for BareVariadic {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.name == other.name && self.comma == other.comma
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for BinOp {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -127,20 +96,30 @@ impl PartialEq for BinOp {
(BinOp::Ne(_), BinOp::Ne(_)) => true,
(BinOp::Ge(_), BinOp::Ge(_)) => true,
(BinOp::Gt(_), BinOp::Gt(_)) => true,
(BinOp::AddAssign(_), BinOp::AddAssign(_)) => true,
(BinOp::SubAssign(_), BinOp::SubAssign(_)) => true,
(BinOp::MulAssign(_), BinOp::MulAssign(_)) => true,
(BinOp::DivAssign(_), BinOp::DivAssign(_)) => true,
(BinOp::RemAssign(_), BinOp::RemAssign(_)) => true,
(BinOp::BitXorAssign(_), BinOp::BitXorAssign(_)) => true,
(BinOp::BitAndAssign(_), BinOp::BitAndAssign(_)) => true,
(BinOp::BitOrAssign(_), BinOp::BitOrAssign(_)) => true,
(BinOp::ShlAssign(_), BinOp::ShlAssign(_)) => true,
(BinOp::ShrAssign(_), BinOp::ShrAssign(_)) => true,
(BinOp::AddEq(_), BinOp::AddEq(_)) => true,
(BinOp::SubEq(_), BinOp::SubEq(_)) => true,
(BinOp::MulEq(_), BinOp::MulEq(_)) => true,
(BinOp::DivEq(_), BinOp::DivEq(_)) => true,
(BinOp::RemEq(_), BinOp::RemEq(_)) => true,
(BinOp::BitXorEq(_), BinOp::BitXorEq(_)) => true,
(BinOp::BitAndEq(_), BinOp::BitAndEq(_)) => true,
(BinOp::BitOrEq(_), BinOp::BitOrEq(_)) => true,
(BinOp::ShlEq(_), BinOp::ShlEq(_)) => true,
(BinOp::ShrEq(_), BinOp::ShrEq(_)) => true,
_ => false,
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for Binding {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for Binding {
fn eq(&self, other: &Self) -> bool {
self.ident == other.ident && self.ty == other.ty
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for Block {}
@ -179,8 +158,7 @@ impl Eq for Constraint {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for Constraint {
fn eq(&self, other: &Self) -> bool {
self.ident == other.ident && self.generics == other.generics
&& self.bounds == other.bounds
self.ident == other.ident && self.bounds == other.bounds
}
}
#[cfg(feature = "derive")]
@ -252,6 +230,8 @@ impl PartialEq for Expr {
#[cfg(feature = "full")]
(Expr::Assign(self0), Expr::Assign(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::AssignOp(self0), Expr::AssignOp(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Async(self0), Expr::Async(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Await(self0), Expr::Await(other0)) => self0 == other0,
@ -259,42 +239,45 @@ impl PartialEq for Expr {
#[cfg(feature = "full")]
(Expr::Block(self0), Expr::Block(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Box(self0), Expr::Box(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Break(self0), Expr::Break(other0)) => self0 == other0,
(Expr::Call(self0), Expr::Call(other0)) => self0 == other0,
(Expr::Cast(self0), Expr::Cast(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Closure(self0), Expr::Closure(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Const(self0), Expr::Const(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Continue(self0), Expr::Continue(other0)) => self0 == other0,
(Expr::Field(self0), Expr::Field(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::ForLoop(self0), Expr::ForLoop(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Group(self0), Expr::Group(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::If(self0), Expr::If(other0)) => self0 == other0,
(Expr::Index(self0), Expr::Index(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Infer(self0), Expr::Infer(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Let(self0), Expr::Let(other0)) => self0 == other0,
(Expr::Lit(self0), Expr::Lit(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Loop(self0), Expr::Loop(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Macro(self0), Expr::Macro(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Match(self0), Expr::Match(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::MethodCall(self0), Expr::MethodCall(other0)) => self0 == other0,
(Expr::Paren(self0), Expr::Paren(other0)) => self0 == other0,
(Expr::Path(self0), Expr::Path(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Range(self0), Expr::Range(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Reference(self0), Expr::Reference(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Repeat(self0), Expr::Repeat(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Return(self0), Expr::Return(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Struct(self0), Expr::Struct(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Try(self0), Expr::Try(other0)) => self0 == other0,
@ -302,6 +285,8 @@ impl PartialEq for Expr {
(Expr::TryBlock(self0), Expr::TryBlock(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Tuple(self0), Expr::Tuple(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Type(self0), Expr::Type(other0)) => self0 == other0,
(Expr::Unary(self0), Expr::Unary(other0)) => self0 == other0,
#[cfg(feature = "full")]
(Expr::Unsafe(self0), Expr::Unsafe(other0)) => self0 == other0,
@ -338,6 +323,17 @@ impl PartialEq for ExprAssign {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprAssignOp {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprAssignOp {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.left == other.left && self.op == other.op
&& self.right == other.right
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprAsync {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -381,6 +377,16 @@ impl PartialEq for ExprBlock {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprBox {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprBox {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.expr == other.expr
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprBreak {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -416,8 +422,7 @@ impl Eq for ExprClosure {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprClosure {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.lifetimes == other.lifetimes
&& self.constness == other.constness && self.movability == other.movability
self.attrs == other.attrs && self.movability == other.movability
&& self.asyncness == other.asyncness && self.capture == other.capture
&& self.inputs == other.inputs && self.output == other.output
&& self.body == other.body
@ -425,16 +430,6 @@ impl PartialEq for ExprClosure {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprConst {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprConst {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.block == other.block
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprContinue {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -465,10 +460,10 @@ impl PartialEq for ExprForLoop {
&& self.expr == other.expr && self.body == other.body
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprGroup {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprGroup {
fn eq(&self, other: &Self) -> bool {
@ -499,16 +494,6 @@ impl PartialEq for ExprIndex {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprInfer {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprInfer {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprLet {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -537,10 +522,10 @@ impl PartialEq for ExprLoop {
self.attrs == other.attrs && self.label == other.label && self.body == other.body
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprMacro {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprMacro {
fn eq(&self, other: &Self) -> bool {
@ -557,10 +542,10 @@ impl PartialEq for ExprMatch {
self.attrs == other.attrs && self.expr == other.expr && self.arms == other.arms
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprMethodCall {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprMethodCall {
fn eq(&self, other: &Self) -> bool {
@ -596,14 +581,14 @@ impl Eq for ExprRange {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprRange {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.start == other.start
&& self.limits == other.limits && self.end == other.end
self.attrs == other.attrs && self.from == other.from
&& self.limits == other.limits && self.to == other.to
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprReference {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprReference {
fn eq(&self, other: &Self) -> bool {
@ -631,14 +616,14 @@ impl PartialEq for ExprReturn {
self.attrs == other.attrs && self.expr == other.expr
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprStruct {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprStruct {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
self.attrs == other.attrs && self.path == other.path
&& self.fields == other.fields && self.dot2_token == other.dot2_token
&& self.rest == other.rest
}
@ -673,6 +658,16 @@ impl PartialEq for ExprTuple {
self.attrs == other.attrs && self.elems == other.elems
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprType {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ExprType {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.expr == other.expr && self.ty == other.ty
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ExprUnary {}
@ -721,23 +716,10 @@ impl Eq for Field {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for Field {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.vis == other.vis
&& self.mutability == other.mutability && self.ident == other.ident
self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
&& self.colon_token == other.colon_token && self.ty == other.ty
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for FieldMutability {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for FieldMutability {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(FieldMutability::None, FieldMutability::None) => true,
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for FieldPat {}
@ -749,10 +731,10 @@ impl PartialEq for FieldPat {
&& self.colon_token == other.colon_token && self.pat == other.pat
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for FieldValue {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for FieldValue {
fn eq(&self, other: &Self) -> bool {
@ -880,7 +862,6 @@ impl Eq for ForeignItemType {}
impl PartialEq for ForeignItemType {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
&& self.generics == other.generics
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -900,10 +881,7 @@ impl PartialEq for GenericArgument {
(GenericArgument::Const(self0), GenericArgument::Const(other0)) => {
self0 == other0
}
(GenericArgument::AssocType(self0), GenericArgument::AssocType(other0)) => {
self0 == other0
}
(GenericArgument::AssocConst(self0), GenericArgument::AssocConst(other0)) => {
(GenericArgument::Binding(self0), GenericArgument::Binding(other0)) => {
self0 == other0
}
(GenericArgument::Constraint(self0), GenericArgument::Constraint(other0)) => {
@ -913,6 +891,25 @@ impl PartialEq for GenericArgument {
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for GenericMethodArgument {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for GenericMethodArgument {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(GenericMethodArgument::Type(self0), GenericMethodArgument::Type(other0)) => {
self0 == other0
}
(
GenericMethodArgument::Const(self0),
GenericMethodArgument::Const(other0),
) => self0 == other0,
_ => false,
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for GenericParam {}
@ -921,10 +918,10 @@ impl Eq for GenericParam {}
impl PartialEq for GenericParam {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0,
(GenericParam::Lifetime(self0), GenericParam::Lifetime(other0)) => {
self0 == other0
}
(GenericParam::Type(self0), GenericParam::Type(other0)) => self0 == other0,
(GenericParam::Const(self0), GenericParam::Const(other0)) => self0 == other0,
_ => false,
}
@ -950,7 +947,7 @@ impl PartialEq for ImplItem {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(ImplItem::Const(self0), ImplItem::Const(other0)) => self0 == other0,
(ImplItem::Fn(self0), ImplItem::Fn(other0)) => self0 == other0,
(ImplItem::Method(self0), ImplItem::Method(other0)) => self0 == other0,
(ImplItem::Type(self0), ImplItem::Type(other0)) => self0 == other0,
(ImplItem::Macro(self0), ImplItem::Macro(other0)) => self0 == other0,
(ImplItem::Verbatim(self0), ImplItem::Verbatim(other0)) => {
@ -969,20 +966,7 @@ impl PartialEq for ImplItemConst {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.vis == other.vis
&& self.defaultness == other.defaultness && self.ident == other.ident
&& self.generics == other.generics && self.ty == other.ty
&& self.expr == other.expr
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ImplItemFn {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ImplItemFn {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.vis == other.vis
&& self.defaultness == other.defaultness && self.sig == other.sig
&& self.block == other.block
&& self.ty == other.ty && self.expr == other.expr
}
}
#[cfg(feature = "full")]
@ -998,6 +982,18 @@ impl PartialEq for ImplItemMacro {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ImplItemMethod {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ImplItemMethod {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.vis == other.vis
&& self.defaultness == other.defaultness && self.sig == other.sig
&& self.block == other.block
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ImplItemType {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -1010,16 +1006,6 @@ impl PartialEq for ImplItemType {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ImplRestriction {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ImplRestriction {
fn eq(&self, _other: &Self) -> bool {
match *self {}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for Item {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -1033,6 +1019,7 @@ impl PartialEq for Item {
(Item::ForeignMod(self0), Item::ForeignMod(other0)) => self0 == other0,
(Item::Impl(self0), Item::Impl(other0)) => self0 == other0,
(Item::Macro(self0), Item::Macro(other0)) => self0 == other0,
(Item::Macro2(self0), Item::Macro2(other0)) => self0 == other0,
(Item::Mod(self0), Item::Mod(other0)) => self0 == other0,
(Item::Static(self0), Item::Static(other0)) => self0 == other0,
(Item::Struct(self0), Item::Struct(other0)) => self0 == other0,
@ -1056,8 +1043,7 @@ impl Eq for ItemConst {}
impl PartialEq for ItemConst {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
&& self.generics == other.generics && self.ty == other.ty
&& self.expr == other.expr
&& self.ty == other.ty && self.expr == other.expr
}
}
#[cfg(feature = "full")]
@ -1100,8 +1086,7 @@ impl Eq for ItemForeignMod {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ItemForeignMod {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.unsafety == other.unsafety
&& self.abi == other.abi && self.items == other.items
self.attrs == other.attrs && self.abi == other.abi && self.items == other.items
}
}
#[cfg(feature = "full")]
@ -1130,13 +1115,23 @@ impl PartialEq for ItemMacro {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ItemMacro2 {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ItemMacro2 {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
&& TokenStreamHelper(&self.rules) == TokenStreamHelper(&other.rules)
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for ItemMod {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for ItemMod {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.vis == other.vis
&& self.unsafety == other.unsafety && self.ident == other.ident
self.attrs == other.attrs && self.vis == other.vis && self.ident == other.ident
&& self.content == other.content && self.semi == other.semi
}
}
@ -1173,8 +1168,8 @@ impl PartialEq for ItemTrait {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.vis == other.vis
&& self.unsafety == other.unsafety && self.auto_token == other.auto_token
&& self.restriction == other.restriction && self.ident == other.ident
&& self.generics == other.generics && self.colon_token == other.colon_token
&& self.ident == other.ident && self.generics == other.generics
&& self.colon_token == other.colon_token
&& self.supertraits == other.supertraits && self.items == other.items
}
}
@ -1234,10 +1229,10 @@ impl PartialEq for Label {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for LifetimeParam {}
impl Eq for LifetimeDef {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for LifetimeParam {
impl PartialEq for LifetimeDef {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.lifetime == other.lifetime
&& self.colon_token == other.colon_token && self.bounds == other.bounds
@ -1293,16 +1288,6 @@ impl PartialEq for Local {
self.attrs == other.attrs && self.pat == other.pat && self.init == other.init
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for LocalInit {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for LocalInit {
fn eq(&self, other: &Self) -> bool {
self.expr == other.expr && self.diverge == other.diverge
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for Macro {}
@ -1351,8 +1336,7 @@ impl Eq for MetaList {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for MetaList {
fn eq(&self, other: &Self) -> bool {
self.path == other.path && self.delimiter == other.delimiter
&& TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
self.path == other.path && self.nested == other.nested
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -1362,7 +1346,31 @@ impl Eq for MetaNameValue {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for MetaNameValue {
fn eq(&self, other: &Self) -> bool {
self.path == other.path && self.value == other.value
self.path == other.path && self.lit == other.lit
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for MethodTurbofish {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for MethodTurbofish {
fn eq(&self, other: &Self) -> bool {
self.args == other.args
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for NestedMeta {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for NestedMeta {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(NestedMeta::Meta(self0), NestedMeta::Meta(other0)) => self0 == other0,
(NestedMeta::Lit(self0), NestedMeta::Lit(other0)) => self0 == other0,
_ => false,
}
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -1383,12 +1391,11 @@ impl Eq for Pat {}
impl PartialEq for Pat {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Pat::Const(self0), Pat::Const(other0)) => self0 == other0,
(Pat::Box(self0), Pat::Box(other0)) => self0 == other0,
(Pat::Ident(self0), Pat::Ident(other0)) => self0 == other0,
(Pat::Lit(self0), Pat::Lit(other0)) => self0 == other0,
(Pat::Macro(self0), Pat::Macro(other0)) => self0 == other0,
(Pat::Or(self0), Pat::Or(other0)) => self0 == other0,
(Pat::Paren(self0), Pat::Paren(other0)) => self0 == other0,
(Pat::Path(self0), Pat::Path(other0)) => self0 == other0,
(Pat::Range(self0), Pat::Range(other0)) => self0 == other0,
(Pat::Reference(self0), Pat::Reference(other0)) => self0 == other0,
@ -1408,6 +1415,16 @@ impl PartialEq for Pat {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for PatBox {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for PatBox {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.pat == other.pat
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for PatIdent {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -1420,6 +1437,26 @@ impl PartialEq for PatIdent {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for PatLit {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for PatLit {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.expr == other.expr
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for PatMacro {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for PatMacro {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.mac == other.mac
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for PatOr {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -1431,12 +1468,23 @@ impl PartialEq for PatOr {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for PatParen {}
impl Eq for PatPath {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for PatParen {
impl PartialEq for PatPath {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.pat == other.pat
self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for PatRange {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for PatRange {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.lo == other.lo && self.limits == other.limits
&& self.hi == other.hi
}
}
#[cfg(feature = "full")]
@ -1477,8 +1525,8 @@ impl Eq for PatStruct {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for PatStruct {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
&& self.fields == other.fields && self.rest == other.rest
self.attrs == other.attrs && self.path == other.path
&& self.fields == other.fields && self.dot2_token == other.dot2_token
}
}
#[cfg(feature = "full")]
@ -1498,8 +1546,7 @@ impl Eq for PatTupleStruct {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for PatTupleStruct {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.qself == other.qself && self.path == other.path
&& self.elems == other.elems
self.attrs == other.attrs && self.path == other.path && self.pat == other.pat
}
}
#[cfg(feature = "full")]
@ -1565,6 +1612,16 @@ impl PartialEq for PathSegment {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for PredicateEq {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for PredicateEq {
fn eq(&self, other: &Self) -> bool {
self.lhs_ty == other.lhs_ty && self.rhs_ty == other.rhs_ty
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for PredicateLifetime {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -1618,7 +1675,6 @@ impl PartialEq for Receiver {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.reference == other.reference
&& self.mutability == other.mutability
&& self.colon_token == other.colon_token && self.ty == other.ty
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -1651,20 +1707,6 @@ impl PartialEq for Signature {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for StaticMutability {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for StaticMutability {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(StaticMutability::Mut(_), StaticMutability::Mut(_)) => true,
(StaticMutability::None, StaticMutability::None) => true,
_ => false,
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for Stmt {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -1673,25 +1715,12 @@ impl PartialEq for Stmt {
match (self, other) {
(Stmt::Local(self0), Stmt::Local(other0)) => self0 == other0,
(Stmt::Item(self0), Stmt::Item(other0)) => self0 == other0,
(Stmt::Expr(self0, self1), Stmt::Expr(other0, other1)) => {
self0 == other0 && self1 == other1
}
(Stmt::Macro(self0), Stmt::Macro(other0)) => self0 == other0,
(Stmt::Expr(self0), Stmt::Expr(other0)) => self0 == other0,
(Stmt::Semi(self0, _), Stmt::Semi(other0, _)) => self0 == other0,
_ => false,
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for StmtMacro {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for StmtMacro {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.mac == other.mac
&& self.semi_token == other.semi_token
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for TraitBound {}
@ -1726,7 +1755,7 @@ impl PartialEq for TraitItem {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(TraitItem::Const(self0), TraitItem::Const(other0)) => self0 == other0,
(TraitItem::Fn(self0), TraitItem::Fn(other0)) => self0 == other0,
(TraitItem::Method(self0), TraitItem::Method(other0)) => self0 == other0,
(TraitItem::Type(self0), TraitItem::Type(other0)) => self0 == other0,
(TraitItem::Macro(self0), TraitItem::Macro(other0)) => self0 == other0,
(TraitItem::Verbatim(self0), TraitItem::Verbatim(other0)) => {
@ -1743,24 +1772,12 @@ impl Eq for TraitItemConst {}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for TraitItemConst {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.ident == other.ident
&& self.generics == other.generics && self.ty == other.ty
self.attrs == other.attrs && self.ident == other.ident && self.ty == other.ty
&& self.default == other.default
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for TraitItemFn {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for TraitItemFn {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.sig == other.sig
&& self.default == other.default && self.semi_token == other.semi_token
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for TraitItemMacro {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -1772,6 +1789,17 @@ impl PartialEq for TraitItemMacro {
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for TraitItemMethod {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for TraitItemMethod {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.sig == other.sig
&& self.default == other.default && self.semi_token == other.semi_token
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for TraitItemType {}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -1909,9 +1937,6 @@ impl PartialEq for TypeParamBound {
(TypeParamBound::Lifetime(self0), TypeParamBound::Lifetime(other0)) => {
self0 == other0
}
(TypeParamBound::Verbatim(self0), TypeParamBound::Verbatim(other0)) => {
TokenStreamHelper(self0) == TokenStreamHelper(other0)
}
_ => false,
}
}
@ -2070,14 +2095,14 @@ impl PartialEq for UseTree {
}
}
}
#[cfg(feature = "full")]
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for Variadic {}
#[cfg(feature = "full")]
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for Variadic {
fn eq(&self, other: &Self) -> bool {
self.attrs == other.attrs && self.pat == other.pat && self.comma == other.comma
self.attrs == other.attrs
}
}
#[cfg(any(feature = "derive", feature = "full"))]
@ -2093,6 +2118,26 @@ impl PartialEq for Variant {
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for VisCrate {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for VisCrate {
fn eq(&self, _other: &Self) -> bool {
true
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for VisPublic {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for VisPublic {
fn eq(&self, _other: &Self) -> bool {
true
}
}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Eq for VisRestricted {}
#[cfg(any(feature = "derive", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
@ -2109,7 +2154,8 @@ impl Eq for Visibility {}
impl PartialEq for Visibility {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Visibility::Public(_), Visibility::Public(_)) => true,
(Visibility::Public(self0), Visibility::Public(other0)) => self0 == other0,
(Visibility::Crate(self0), Visibility::Crate(other0)) => self0 == other0,
(Visibility::Restricted(self0), Visibility::Restricted(other0)) => {
self0 == other0
}
@ -2136,12 +2182,13 @@ impl Eq for WherePredicate {}
impl PartialEq for WherePredicate {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => {
self0 == other0
}
(WherePredicate::Type(self0), WherePredicate::Type(other0)) => {
self0 == other0
}
(WherePredicate::Lifetime(self0), WherePredicate::Lifetime(other0)) => {
self0 == other0
}
(WherePredicate::Eq(self0), WherePredicate::Eq(other0)) => self0 == other0,
_ => false,
}
}

1600
src/gen/fold.rs generated

File diff suppressed because it is too large Load Diff

599
src/gen/hash.rs generated

File diff suppressed because it is too large Load Diff

1820
src/gen/visit.rs generated

File diff suppressed because it is too large Load Diff

1877
src/gen/visit_mut.rs generated

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,10 @@
#[cfg(feature = "fold")]
pub(crate) mod fold {
pub mod fold {
use crate::fold::Fold;
use crate::punctuated::{Pair, Punctuated};
use proc_macro2::Span;
pub(crate) trait FoldHelper {
pub trait FoldHelper {
type Item;
fn lift<F>(self, f: F) -> Self
where
@ -31,4 +33,122 @@ pub(crate) mod fold {
.collect()
}
}
pub fn tokens_helper<F: Fold + ?Sized, S: Spans>(folder: &mut F, spans: &S) -> S {
spans.fold(folder)
}
pub trait Spans {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self;
}
impl Spans for Span {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
folder.fold_span(*self)
}
}
impl Spans for [Span; 1] {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
[folder.fold_span(self[0])]
}
}
impl Spans for [Span; 2] {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
[folder.fold_span(self[0]), folder.fold_span(self[1])]
}
}
impl Spans for [Span; 3] {
fn fold<F: Fold + ?Sized>(&self, folder: &mut F) -> Self {
[
folder.fold_span(self[0]),
folder.fold_span(self[1]),
folder.fold_span(self[2]),
]
}
}
}
#[cfg(feature = "visit")]
pub mod visit {
use crate::visit::Visit;
use proc_macro2::Span;
pub fn tokens_helper<'ast, V: Visit<'ast> + ?Sized, S: Spans>(visitor: &mut V, spans: &S) {
spans.visit(visitor);
}
pub trait Spans {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V);
}
impl Spans for Span {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) {
visitor.visit_span(self);
}
}
impl Spans for [Span; 1] {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) {
visitor.visit_span(&self[0]);
}
}
impl Spans for [Span; 2] {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) {
visitor.visit_span(&self[0]);
visitor.visit_span(&self[1]);
}
}
impl Spans for [Span; 3] {
fn visit<'ast, V: Visit<'ast> + ?Sized>(&self, visitor: &mut V) {
visitor.visit_span(&self[0]);
visitor.visit_span(&self[1]);
visitor.visit_span(&self[2]);
}
}
}
#[cfg(feature = "visit-mut")]
pub mod visit_mut {
use crate::visit_mut::VisitMut;
use proc_macro2::Span;
pub fn tokens_helper<V: VisitMut + ?Sized, S: Spans>(visitor: &mut V, spans: &mut S) {
spans.visit_mut(visitor);
}
pub trait Spans {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V);
}
impl Spans for Span {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
visitor.visit_span_mut(self);
}
}
impl Spans for [Span; 1] {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
visitor.visit_span_mut(&mut self[0]);
}
}
impl Spans for [Span; 2] {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
visitor.visit_span_mut(&mut self[0]);
visitor.visit_span_mut(&mut self[1]);
}
}
impl Spans for [Span; 3] {
fn visit_mut<V: VisitMut + ?Sized>(&mut self, visitor: &mut V) {
visitor.visit_span_mut(&mut self[0]);
visitor.visit_span_mut(&mut self[1]);
visitor.visit_span_mut(&mut self[2]);
}
}
}

View File

@ -1,6 +1,5 @@
use super::*;
use crate::punctuated::{Iter, IterMut, Punctuated};
use proc_macro2::TokenStream;
#[cfg(all(feature = "printing", feature = "extra-traits"))]
use std::fmt::{self, Debug};
#[cfg(all(feature = "printing", feature = "extra-traits"))]
@ -10,12 +9,8 @@ ast_struct! {
/// Lifetimes and type parameters attached to a declaration of a function,
/// enum, trait, etc.
///
/// This struct represents two distinct optional syntactic elements,
/// [generic parameters] and [where clause]. In some locations of the
/// grammar, there may be other tokens in between these two things.
///
/// [generic parameters]: https://doc.rust-lang.org/stable/reference/items/generics.html#generic-parameters
/// [where clause]: https://doc.rust-lang.org/stable/reference/items/generics.html#where-clauses
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Generics {
pub lt_token: Option<Token![<]>,
@ -29,6 +24,9 @@ ast_enum_of_structs! {
/// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
/// `'a: 'b`, `const LEN: usize`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
@ -36,30 +34,22 @@ ast_enum_of_structs! {
/// [syntax tree enum]: Expr#syntax-tree-enums
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum GenericParam {
/// A lifetime parameter: `'a: 'b + 'c + 'd`.
Lifetime(LifetimeParam),
/// A generic type parameter: `T: Into<String>`.
Type(TypeParam),
/// A lifetime definition: `'a: 'b + 'c + 'd`.
Lifetime(LifetimeDef),
/// A const generic parameter: `const LENGTH: usize`.
Const(ConstParam),
}
}
ast_struct! {
/// A lifetime definition: `'a: 'b + 'c + 'd`.
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct LifetimeParam {
pub attrs: Vec<Attribute>,
pub lifetime: Lifetime,
pub colon_token: Option<Token![:]>,
pub bounds: Punctuated<Lifetime, Token![+]>,
}
}
ast_struct! {
/// A generic type parameter: `T: Into<String>`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeParam {
pub attrs: Vec<Attribute>,
@ -71,8 +61,25 @@ ast_struct! {
}
}
ast_struct! {
/// A lifetime definition: `'a: 'b + 'c + 'd`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct LifetimeDef {
pub attrs: Vec<Attribute>,
pub lifetime: Lifetime,
pub colon_token: Option<Token![:]>,
pub bounds: Punctuated<Lifetime, Token![+]>,
}
}
ast_struct! {
/// A const generic parameter: `const LENGTH: usize`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct ConstParam {
pub attrs: Vec<Attribute>,
@ -97,28 +104,6 @@ impl Default for Generics {
}
impl Generics {
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;</code><a
/// href="struct.LifetimeParam.html"><code
/// style="padding-left:0;padding-right:0;">LifetimeParam</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the lifetime parameters in `self.params`.
pub fn lifetimes(&self) -> Lifetimes {
Lifetimes(self.params.iter())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;mut </code><a
/// href="struct.LifetimeParam.html"><code
/// style="padding-left:0;padding-right:0;">LifetimeParam</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the lifetime parameters in `self.params`.
pub fn lifetimes_mut(&mut self) -> LifetimesMut {
LifetimesMut(self.params.iter_mut())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;</code><a
@ -141,6 +126,28 @@ impl Generics {
TypeParamsMut(self.params.iter_mut())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;</code><a
/// href="struct.LifetimeDef.html"><code
/// style="padding-left:0;padding-right:0;">LifetimeDef</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the lifetime parameters in `self.params`.
pub fn lifetimes(&self) -> Lifetimes {
Lifetimes(self.params.iter())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;mut </code><a
/// href="struct.LifetimeDef.html"><code
/// style="padding-left:0;padding-right:0;">LifetimeDef</code></a><code
/// style="padding-left:0;">&gt;</code>
/// over the lifetime parameters in `self.params`.
pub fn lifetimes_mut(&mut self) -> LifetimesMut {
LifetimesMut(self.params.iter_mut())
}
/// Returns an
/// <code
/// style="padding-right:0;">Iterator&lt;Item = &amp;</code><a
@ -172,42 +179,6 @@ impl Generics {
}
}
pub struct Lifetimes<'a>(Iter<'a, GenericParam>);
impl<'a> Iterator for Lifetimes<'a> {
type Item = &'a LifetimeParam;
fn next(&mut self) -> Option<Self::Item> {
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Lifetime(lifetime) = next {
Some(lifetime)
} else {
self.next()
}
}
}
pub struct LifetimesMut<'a>(IterMut<'a, GenericParam>);
impl<'a> Iterator for LifetimesMut<'a> {
type Item = &'a mut LifetimeParam;
fn next(&mut self) -> Option<Self::Item> {
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Lifetime(lifetime) = next {
Some(lifetime)
} else {
self.next()
}
}
}
pub struct TypeParams<'a>(Iter<'a, GenericParam>);
impl<'a> Iterator for TypeParams<'a> {
@ -244,6 +215,42 @@ impl<'a> Iterator for TypeParamsMut<'a> {
}
}
pub struct Lifetimes<'a>(Iter<'a, GenericParam>);
impl<'a> Iterator for Lifetimes<'a> {
type Item = &'a LifetimeDef;
fn next(&mut self) -> Option<Self::Item> {
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Lifetime(lifetime) = next {
Some(lifetime)
} else {
self.next()
}
}
}
pub struct LifetimesMut<'a>(IterMut<'a, GenericParam>);
impl<'a> Iterator for LifetimesMut<'a> {
type Item = &'a mut LifetimeDef;
fn next(&mut self) -> Option<Self::Item> {
let next = match self.0.next() {
Some(item) => item,
None => return None,
};
if let GenericParam::Lifetime(lifetime) = next {
Some(lifetime)
} else {
self.next()
}
}
}
pub struct ConstParams<'a>(Iter<'a, GenericParam>);
impl<'a> Iterator for ConstParams<'a> {
@ -281,6 +288,9 @@ impl<'a> Iterator for ConstParamsMut<'a> {
}
/// Returned by `Generics::split_for_impl`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(
doc_cfg,
@ -289,6 +299,9 @@ impl<'a> Iterator for ConstParamsMut<'a> {
pub struct ImplGenerics<'a>(&'a Generics);
/// Returned by `Generics::split_for_impl`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(
doc_cfg,
@ -297,6 +310,9 @@ pub struct ImplGenerics<'a>(&'a Generics);
pub struct TypeGenerics<'a>(&'a Generics);
/// Returned by `TypeGenerics::as_turbofish`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature and the `"printing"` feature.*
#[cfg(feature = "printing")]
#[cfg_attr(
doc_cfg,
@ -324,6 +340,9 @@ impl Generics {
/// }
/// # ;
/// ```
///
/// *This method is available only if Syn is built with the `"derive"` or
/// `"full"` feature and the `"printing"` feature.*
#[cfg_attr(
doc_cfg,
doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing")))
@ -391,6 +410,9 @@ generics_wrapper_impls!(Turbofish);
#[cfg(feature = "printing")]
impl<'a> TypeGenerics<'a> {
/// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
///
/// *This method is available only if Syn is built with the `"derive"` or
/// `"full"` feature and the `"printing"` feature.*
pub fn as_turbofish(&self) -> Turbofish {
Turbofish(self.0)
}
@ -398,11 +420,14 @@ impl<'a> TypeGenerics<'a> {
ast_struct! {
/// A set of bound lifetimes: `for<'a, 'b, 'c>`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct BoundLifetimes {
pub for_token: Token![for],
pub lt_token: Token![<],
pub lifetimes: Punctuated<GenericParam, Token![,]>,
pub lifetimes: Punctuated<LifetimeDef, Token![,]>,
pub gt_token: Token![>],
}
}
@ -418,9 +443,9 @@ impl Default for BoundLifetimes {
}
}
impl LifetimeParam {
impl LifetimeDef {
pub fn new(lifetime: Lifetime) -> Self {
LifetimeParam {
LifetimeDef {
attrs: Vec::new(),
lifetime,
colon_token: None,
@ -444,17 +469,21 @@ impl From<Ident> for TypeParam {
ast_enum_of_structs! {
/// A trait or lifetime used as a bound on a type parameter.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
#[non_exhaustive]
pub enum TypeParamBound {
Trait(TraitBound),
Lifetime(Lifetime),
Verbatim(TokenStream),
}
}
ast_struct! {
/// A trait used as a bound on a type parameter.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TraitBound {
pub paren_token: Option<token::Paren>,
@ -469,6 +498,9 @@ ast_struct! {
ast_enum! {
/// A modifier on a trait bound, currently only used for the `?` in
/// `?Sized`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum TraitBoundModifier {
None,
@ -479,6 +511,9 @@ ast_enum! {
ast_struct! {
/// A `where` clause in a definition: `where T: Deserialize<'de>, D:
/// 'static`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct WhereClause {
pub where_token: Token![where],
@ -489,34 +524,32 @@ ast_struct! {
ast_enum_of_structs! {
/// A single predicate in a `where` clause: `T: Deserialize<'de>`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: Expr#syntax-tree-enums
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
#[non_exhaustive]
pub enum WherePredicate {
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
Type(PredicateType),
/// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
Lifetime(PredicateLifetime),
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
Type(PredicateType),
}
}
ast_struct! {
/// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct PredicateLifetime {
pub lifetime: Lifetime,
pub colon_token: Token![:],
pub bounds: Punctuated<Lifetime, Token![+]>,
/// An equality predicate in a `where` clause (unsupported).
Eq(PredicateEq),
}
}
ast_struct! {
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct PredicateType {
/// Any lifetimes from a `for` binding
@ -529,10 +562,36 @@ ast_struct! {
}
}
ast_struct! {
/// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct PredicateLifetime {
pub lifetime: Lifetime,
pub colon_token: Token![:],
pub bounds: Punctuated<Lifetime, Token![+]>,
}
}
ast_struct! {
/// An equality predicate in a `where` clause (unsupported).
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct PredicateEq {
pub lhs_ty: Type,
pub eq_token: Token![=],
pub rhs_ty: Type,
}
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::ext::IdentExt as _;
use crate::ext::IdentExt;
use crate::parse::{Parse, ParseStream, Result};
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
@ -553,7 +612,7 @@ pub(crate) mod parsing {
let attrs = input.call(Attribute::parse_outer)?;
let lookahead = input.lookahead1();
if lookahead.peek(Lifetime) {
params.push_value(GenericParam::Lifetime(LifetimeParam {
params.push_value(GenericParam::Lifetime(LifetimeDef {
attrs,
..input.parse()?
}));
@ -610,7 +669,7 @@ pub(crate) mod parsing {
..input.parse()?
}))
} else if lookahead.peek(Lifetime) {
Ok(GenericParam::Lifetime(LifetimeParam {
Ok(GenericParam::Lifetime(LifetimeDef {
attrs,
..input.parse()?
}))
@ -626,10 +685,10 @@ pub(crate) mod parsing {
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for LifetimeParam {
impl Parse for LifetimeDef {
fn parse(input: ParseStream) -> Result<Self> {
let has_colon;
Ok(LifetimeParam {
Ok(LifetimeDef {
attrs: input.call(Attribute::parse_outer)?,
lifetime: input.parse()?,
colon_token: {
@ -672,14 +731,7 @@ pub(crate) mod parsing {
lifetimes: {
let mut lifetimes = Punctuated::new();
while !input.peek(Token![>]) {
let attrs = input.call(Attribute::parse_outer)?;
let lifetime: Lifetime = input.parse()?;
lifetimes.push_value(GenericParam::Lifetime(LifetimeParam {
attrs,
lifetime,
colon_token: None,
bounds: Punctuated::new(),
}));
lifetimes.push_value(input.parse()?);
if input.peek(Token![>]) {
break;
}
@ -710,12 +762,19 @@ pub(crate) mod parsing {
let ident: Ident = input.parse()?;
let colon_token: Option<Token![:]> = input.parse()?;
let begin_bound = input.fork();
let mut is_maybe_const = false;
let mut bounds = Punctuated::new();
if colon_token.is_some() {
loop {
if input.peek(Token![,]) || input.peek(Token![>]) || input.peek(Token![=]) {
break;
}
if input.peek(Token![~]) && input.peek2(Token![const]) {
input.parse::<Token![~]>()?;
input.parse::<Token![const]>()?;
is_maybe_const = true;
}
let value: TypeParamBound = input.parse()?;
bounds.push_value(value);
if !input.peek(Token![+]) {
@ -726,13 +785,19 @@ pub(crate) mod parsing {
}
}
let eq_token: Option<Token![=]> = input.parse()?;
let default = if eq_token.is_some() {
let mut eq_token: Option<Token![=]> = input.parse()?;
let mut default = if eq_token.is_some() {
Some(input.parse::<Type>()?)
} else {
None
};
if is_maybe_const {
bounds.clear();
eq_token = None;
default = Some(Type::Verbatim(verbatim::between(begin_bound, input)));
}
Ok(TypeParam {
attrs,
ident,
@ -751,30 +816,15 @@ pub(crate) mod parsing {
return input.parse().map(TypeParamBound::Lifetime);
}
let begin = input.fork();
let content;
let (paren_token, content) = if input.peek(token::Paren) {
(Some(parenthesized!(content in input)), &content)
} else {
(None, input)
};
let is_tilde_const =
cfg!(feature = "full") && content.peek(Token![~]) && content.peek2(Token![const]);
if is_tilde_const {
content.parse::<Token![~]>()?;
content.parse::<Token![const]>()?;
if input.peek(token::Paren) {
let content;
let paren_token = parenthesized!(content in input);
let mut bound: TraitBound = content.parse()?;
bound.paren_token = Some(paren_token);
return Ok(TypeParamBound::Trait(bound));
}
let mut bound: TraitBound = content.parse()?;
bound.paren_token = paren_token;
if is_tilde_const {
Ok(TypeParamBound::Verbatim(verbatim::between(&begin, input)))
} else {
Ok(TypeParamBound::Trait(bound))
}
input.parse().map(TypeParamBound::Trait)
}
}
@ -794,8 +844,7 @@ pub(crate) mod parsing {
|| input.peek(Token![::])
|| input.peek(Token![?])
|| input.peek(Lifetime)
|| input.peek(token::Paren)
|| input.peek(Token![~]))
|| input.peek(token::Paren))
{
break;
}
@ -807,6 +856,15 @@ pub(crate) mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for TraitBound {
fn parse(input: ParseStream) -> Result<Self> {
#[cfg(feature = "full")]
let tilde_const = if input.peek(Token![~]) && input.peek2(Token![const]) {
let tilde_token = input.parse::<Token![~]>()?;
let const_token = input.parse::<Token![const]>()?;
Some((tilde_token, const_token))
} else {
None
};
let modifier: TraitBoundModifier = input.parse()?;
let lifetimes: Option<BoundLifetimes> = input.parse()?;
@ -820,6 +878,21 @@ pub(crate) mod parsing {
path.segments.last_mut().unwrap().arguments = parenthesized;
}
#[cfg(feature = "full")]
{
if let Some((tilde_token, const_token)) = tilde_const {
path.segments.insert(
0,
PathSegment {
ident: Ident::new("const", const_token.span),
arguments: PathArguments::None,
},
);
let (_const, punct) = path.segments.pairs_mut().next().unwrap().into_tuple();
*punct.unwrap() = Token![::](tilde_token.span);
}
}
Ok(TraitBound {
paren_token: None,
modifier,
@ -974,7 +1047,11 @@ mod printing {
use super::*;
use crate::attr::FilterAttrs;
use crate::print::TokensOrDefault;
#[cfg(feature = "full")]
use crate::punctuated::Pair;
use proc_macro2::TokenStream;
#[cfg(feature = "full")]
use proc_macro2::TokenTree;
use quote::{ToTokens, TokenStreamExt};
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
@ -988,6 +1065,9 @@ mod printing {
// Print lifetimes before types and consts, regardless of their
// order in self.params.
//
// TODO: ordering rules for const parameters vs type parameters have
// not been settled yet. https://github.com/rust-lang/rust/issues/44580
let mut trailing_or_empty = true;
for param in self.params.pairs() {
if let GenericParam::Lifetime(_) = **param.value() {
@ -996,7 +1076,7 @@ mod printing {
}
}
for param in self.params.pairs() {
match param.value() {
match **param.value() {
GenericParam::Type(_) | GenericParam::Const(_) => {
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
@ -1022,6 +1102,9 @@ mod printing {
// Print lifetimes before types and consts, regardless of their
// order in self.params.
//
// TODO: ordering rules for const parameters vs type parameters have
// not been settled yet. https://github.com/rust-lang/rust/issues/44580
let mut trailing_or_empty = true;
for param in self.0.params.pairs() {
if let GenericParam::Lifetime(_) = **param.value() {
@ -1037,7 +1120,7 @@ mod printing {
<Token![,]>::default().to_tokens(tokens);
trailing_or_empty = true;
}
match param.value() {
match *param.value() {
GenericParam::Lifetime(_) => unreachable!(),
GenericParam::Type(param) => {
// Leave off the type parameter defaults
@ -1074,6 +1157,9 @@ mod printing {
// Print lifetimes before types and consts, regardless of their
// order in self.params.
//
// TODO: ordering rules for const parameters vs type parameters have
// not been settled yet. https://github.com/rust-lang/rust/issues/44580
let mut trailing_or_empty = true;
for param in self.0.params.pairs() {
if let GenericParam::Lifetime(def) = *param.value() {
@ -1091,7 +1177,7 @@ mod printing {
<Token![,]>::default().to_tokens(tokens);
trailing_or_empty = true;
}
match param.value() {
match *param.value() {
GenericParam::Lifetime(_) => unreachable!(),
GenericParam::Type(param) => {
// Leave off the type parameter defaults
@ -1129,7 +1215,7 @@ mod printing {
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for LifetimeParam {
impl ToTokens for LifetimeDef {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
self.lifetime.to_tokens(tokens);
@ -1150,6 +1236,29 @@ mod printing {
self.bounds.to_tokens(tokens);
}
if let Some(default) = &self.default {
#[cfg(feature = "full")]
{
if self.eq_token.is_none() {
if let Type::Verbatim(default) = default {
let mut iter = default.clone().into_iter().peekable();
while let Some(token) = iter.next() {
if let TokenTree::Punct(q) = token {
if q.as_char() == '~' {
if let Some(TokenTree::Ident(c)) = iter.peek() {
if c == "const" {
if self.bounds.is_empty() {
TokensOrDefault(&self.colon_token)
.to_tokens(tokens);
}
return default.to_tokens(tokens);
}
}
}
}
}
}
}
}
TokensOrDefault(&self.eq_token).to_tokens(tokens);
default.to_tokens(tokens);
}
@ -1160,9 +1269,26 @@ mod printing {
impl ToTokens for TraitBound {
fn to_tokens(&self, tokens: &mut TokenStream) {
let to_tokens = |tokens: &mut TokenStream| {
#[cfg(feature = "full")]
let skip = match self.path.segments.pairs().next() {
Some(Pair::Punctuated(t, p)) if t.ident == "const" => {
Token![~](p.spans[0]).to_tokens(tokens);
t.to_tokens(tokens);
1
}
_ => 0,
};
self.modifier.to_tokens(tokens);
self.lifetimes.to_tokens(tokens);
self.path.to_tokens(tokens);
#[cfg(feature = "full")]
{
self.path.leading_colon.to_tokens(tokens);
tokens.append_all(self.path.segments.pairs().skip(skip));
}
#[cfg(not(feature = "full"))]
{
self.path.to_tokens(tokens);
}
};
match &self.paren_token {
Some(paren) => paren.surround(tokens, to_tokens),
@ -1206,6 +1332,16 @@ mod printing {
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for PredicateType {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.lifetimes.to_tokens(tokens);
self.bounded_ty.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for PredicateLifetime {
fn to_tokens(&self, tokens: &mut TokenStream) {
@ -1216,12 +1352,11 @@ mod printing {
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for PredicateType {
impl ToTokens for PredicateEq {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.lifetimes.to_tokens(tokens);
self.bounded_ty.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.bounds.to_tokens(tokens);
self.lhs_ty.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.rhs_ty.to_tokens(tokens);
}
}
}

View File

@ -1,33 +1,26 @@
use crate::error::Result;
use crate::parse::ParseBuffer;
use crate::token;
use proc_macro2::extra::DelimSpan;
use proc_macro2::Delimiter;
use proc_macro2::{Delimiter, Span};
// Not public API.
#[doc(hidden)]
pub struct Parens<'a> {
#[doc(hidden)]
pub token: token::Paren,
#[doc(hidden)]
pub content: ParseBuffer<'a>,
}
// Not public API.
#[doc(hidden)]
pub struct Braces<'a> {
#[doc(hidden)]
pub token: token::Brace,
#[doc(hidden)]
pub content: ParseBuffer<'a>,
}
// Not public API.
#[doc(hidden)]
pub struct Brackets<'a> {
#[doc(hidden)]
pub token: token::Bracket,
#[doc(hidden)]
pub content: ParseBuffer<'a>,
}
@ -35,9 +28,7 @@ pub struct Brackets<'a> {
#[cfg(any(feature = "full", feature = "derive"))]
#[doc(hidden)]
pub struct Group<'a> {
#[doc(hidden)]
pub token: token::Group,
#[doc(hidden)]
pub content: ParseBuffer<'a>,
}
@ -71,7 +62,7 @@ pub fn parse_brackets<'a>(input: &ParseBuffer<'a>) -> Result<Brackets<'a>> {
#[cfg(any(feature = "full", feature = "derive"))]
pub(crate) fn parse_group<'a>(input: &ParseBuffer<'a>) -> Result<Group<'a>> {
parse_delimited(input, Delimiter::None).map(|(span, content)| Group {
token: token::Group(span.join()),
token: token::Group(span),
content,
})
}
@ -79,7 +70,7 @@ pub(crate) fn parse_group<'a>(input: &ParseBuffer<'a>) -> Result<Group<'a>> {
fn parse_delimited<'a>(
input: &ParseBuffer<'a>,
delimiter: Delimiter,
) -> Result<(DelimSpan, ParseBuffer<'a>)> {
) -> Result<(Span, ParseBuffer<'a>)> {
input.step(|cursor| {
if let Some((content, span, rest)) = cursor.group(delimiter) {
let scope = crate::buffer::close_span_of_group(*cursor);
@ -128,7 +119,7 @@ fn parse_delimited<'a>(
/// struct_token: input.parse()?,
/// ident: input.parse()?,
/// paren_token: parenthesized!(content in input),
/// fields: content.parse_terminated(Type::parse, Token![,])?,
/// fields: content.parse_terminated(Type::parse)?,
/// semi_token: input.parse()?,
/// })
/// }
@ -194,7 +185,7 @@ macro_rules! parenthesized {
/// struct_token: input.parse()?,
/// ident: input.parse()?,
/// brace_token: braced!(content in input),
/// fields: content.parse_terminated(Field::parse, Token![,])?,
/// fields: content.parse_terminated(Field::parse)?,
/// })
/// }
/// }

View File

@ -1,14 +1,66 @@
#[cfg(feature = "parsing")]
use crate::buffer::Cursor;
#[cfg(feature = "parsing")]
use crate::lookahead;
#[cfg(feature = "parsing")]
use crate::parse::{Parse, ParseStream, Result};
#[cfg(feature = "parsing")]
use crate::token::Token;
pub use proc_macro2::Ident;
#[cfg(feature = "parsing")]
pub_if_not_doc! {
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn Ident(marker: lookahead::TokenMarker) -> Ident {
match marker {}
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn Ident(marker: lookahead::TokenMarker) -> Ident {
match marker {}
}
#[cfg(feature = "parsing")]
fn accept_as_ident(ident: &Ident) -> bool {
match ident.to_string().as_str() {
"_" |
// Based on https://doc.rust-lang.org/grammar.html#keywords
// and https://github.com/rust-lang/rfcs/blob/master/text/2421-unreservations-2018.md
// and https://github.com/rust-lang/rfcs/blob/master/text/2420-unreserve-proc.md
"abstract" | "as" | "become" | "box" | "break" | "const" | "continue" |
"crate" | "do" | "else" | "enum" | "extern" | "false" | "final" | "fn" |
"for" | "if" | "impl" | "in" | "let" | "loop" | "macro" | "match" |
"mod" | "move" | "mut" | "override" | "priv" | "pub" | "ref" |
"return" | "Self" | "self" | "static" | "struct" | "super" | "trait" |
"true" | "type" | "typeof" | "unsafe" | "unsized" | "use" | "virtual" |
"where" | "while" | "yield" => false,
_ => true,
}
}
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Ident {
fn parse(input: ParseStream) -> Result<Self> {
input.step(|cursor| {
if let Some((ident, rest)) = cursor.ident() {
if accept_as_ident(&ident) {
return Ok((ident, rest));
}
}
Err(cursor.error("expected identifier"))
})
}
}
#[cfg(feature = "parsing")]
impl Token for Ident {
fn peek(cursor: Cursor) -> bool {
if let Some((ident, _rest)) = cursor.ident() {
accept_as_ident(&ident)
} else {
false
}
}
fn display() -> &'static str {
"identifier"
}
}
@ -34,7 +86,7 @@ impl From<Token![_]> for Ident {
}
}
pub(crate) fn xid_ok(symbol: &str) -> bool {
pub fn xid_ok(symbol: &str) -> bool {
let mut chars = symbol.chars();
let first = chars.next().unwrap();
if !(first == '_' || unicode_ident::is_xid_start(first)) {
@ -47,61 +99,3 @@ pub(crate) fn xid_ok(symbol: &str) -> bool {
}
true
}
#[cfg(feature = "parsing")]
mod parsing {
use crate::buffer::Cursor;
use crate::parse::{Parse, ParseStream, Result};
use crate::token::Token;
use proc_macro2::Ident;
fn accept_as_ident(ident: &Ident) -> bool {
match ident.to_string().as_str() {
"_" |
// Based on https://doc.rust-lang.org/1.65.0/reference/keywords.html
"abstract" | "as" | "async" | "await" | "become" | "box" | "break" |
"const" | "continue" | "crate" | "do" | "dyn" | "else" | "enum" |
"extern" | "false" | "final" | "fn" | "for" | "if" | "impl" | "in" |
"let" | "loop" | "macro" | "match" | "mod" | "move" | "mut" |
"override" | "priv" | "pub" | "ref" | "return" | "Self" | "self" |
"static" | "struct" | "super" | "trait" | "true" | "try" | "type" |
"typeof" | "unsafe" | "unsized" | "use" | "virtual" | "where" |
"while" | "yield" => false,
_ => true,
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Ident {
fn parse(input: ParseStream) -> Result<Self> {
input.step(|cursor| {
if let Some((ident, rest)) = cursor.ident() {
if accept_as_ident(&ident) {
Ok((ident, rest))
} else {
Err(cursor.error(format_args!(
"expected identifier, found keyword `{}`",
ident,
)))
}
} else {
Err(cursor.error("expected identifier"))
}
})
}
}
impl Token for Ident {
fn peek(cursor: Cursor) -> bool {
if let Some((ident, _rest)) = cursor.ident() {
accept_as_ident(&ident)
} else {
false
}
}
fn display() -> &'static str {
"identifier"
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -62,7 +62,7 @@
//!
//! ```toml
//! [dependencies]
//! syn = "2.0"
//! syn = "1.0"
//! quote = "1.0"
//!
//! [lib]
@ -94,8 +94,9 @@
//! ```
//!
//! The [`heapsize`] example directory shows a complete working implementation
//! of a derive macro. The example derives a `HeapSize` trait which computes an
//! estimate of the amount of heap memory owned by a value.
//! of a derive macro. It works on any Rust compiler 1.31+. The example derives
//! a `HeapSize` trait which computes an estimate of the amount of heap memory
//! owned by a value.
//!
//! [`heapsize`]: https://github.com/dtolnay/syn/tree/master/examples/heapsize
//!
@ -249,9 +250,8 @@
//! dynamic library libproc_macro from rustc toolchain.
// Syn types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/syn/2.0.48")]
#![doc(html_root_url = "https://docs.rs/syn/1.0.107")]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![deny(unsafe_op_in_unsafe_fn)]
#![allow(non_camel_case_types)]
#![allow(
clippy::bool_to_int_with_if,
@ -260,20 +260,14 @@
clippy::cast_possible_wrap,
clippy::cast_ptr_alignment,
clippy::default_trait_access,
clippy::derivable_impls,
clippy::diverging_sub_expression,
clippy::doc_markdown,
clippy::expl_impl_clone_on_copy,
clippy::explicit_auto_deref,
clippy::if_not_else,
clippy::inherent_to_string,
clippy::into_iter_without_iter,
clippy::items_after_statements,
clippy::large_enum_variant,
clippy::let_underscore_untyped, // https://github.com/rust-lang/rust-clippy/issues/10410
clippy::manual_assert,
clippy::manual_let_else,
clippy::match_like_matches_macro,
clippy::match_on_vec_items,
clippy::match_same_arms,
clippy::match_wildcard_for_single_variants, // clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984
@ -284,7 +278,6 @@
clippy::needless_doctest_main,
clippy::needless_pass_by_value,
clippy::never_loop,
clippy::range_plus_one,
clippy::redundant_else,
clippy::return_self_not_must_use,
clippy::similar_names,
@ -292,16 +285,20 @@
clippy::too_many_arguments,
clippy::too_many_lines,
clippy::trivially_copy_pass_by_ref,
clippy::uninhabited_references,
clippy::uninlined_format_args,
clippy::unnecessary_box_returns,
clippy::unnecessary_unwrap,
clippy::used_underscore_binding,
clippy::wildcard_imports,
clippy::wildcard_imports
)]
#[cfg(feature = "proc-macro")]
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "proc-macro"
))]
extern crate proc_macro;
extern crate proc_macro2;
#[cfg(feature = "printing")]
extern crate quote;
#[macro_use]
mod macros;
@ -313,300 +310,155 @@ mod group;
#[macro_use]
pub mod token;
mod ident;
pub use crate::ident::Ident;
#[cfg(any(feature = "full", feature = "derive"))]
mod attr;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub use crate::attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue};
pub use crate::attr::{
AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
};
mod bigint;
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod buffer;
mod custom_keyword;
mod custom_punctuation;
#[cfg(any(feature = "full", feature = "derive"))]
mod data;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub use crate::data::{Field, Fields, FieldsNamed, FieldsUnnamed, Variant};
#[cfg(any(feature = "full", feature = "derive"))]
mod derive;
#[cfg(feature = "derive")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
pub use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
mod drops;
mod error;
pub use crate::error::{Error, Result};
pub use crate::data::{
Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic, VisRestricted,
Visibility,
};
#[cfg(any(feature = "full", feature = "derive"))]
mod expr;
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub use crate::expr::{Arm, Label, RangeLimits};
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub use crate::expr::{
Expr, ExprBinary, ExprCall, ExprCast, ExprField, ExprIndex, ExprLit, ExprMacro, ExprMethodCall,
ExprParen, ExprPath, ExprReference, ExprStruct, ExprUnary, FieldValue, Index, Member,
Arm, FieldValue, GenericMethodArgument, Label, MethodTurbofish, RangeLimits,
};
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub use crate::expr::{
ExprArray, ExprAssign, ExprAsync, ExprAwait, ExprBlock, ExprBreak, ExprClosure, ExprConst,
ExprContinue, ExprForLoop, ExprGroup, ExprIf, ExprInfer, ExprLet, ExprLoop, ExprMatch,
ExprRange, ExprRepeat, ExprReturn, ExprTry, ExprTryBlock, ExprTuple, ExprUnsafe, ExprWhile,
ExprYield,
Expr, ExprArray, ExprAssign, ExprAssignOp, ExprAsync, ExprAwait, ExprBinary, ExprBlock,
ExprBox, ExprBreak, ExprCall, ExprCast, ExprClosure, ExprContinue, ExprField, ExprForLoop,
ExprGroup, ExprIf, ExprIndex, ExprLet, ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall,
ExprParen, ExprPath, ExprRange, ExprReference, ExprRepeat, ExprReturn, ExprStruct, ExprTry,
ExprTryBlock, ExprTuple, ExprType, ExprUnary, ExprUnsafe, ExprWhile, ExprYield, Index, Member,
};
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod ext;
#[cfg(feature = "full")]
mod file;
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub use crate::file::File;
#[cfg(any(feature = "full", feature = "derive"))]
mod generics;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub use crate::generics::{
BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeParam, PredicateLifetime,
PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound, WhereClause,
WherePredicate,
BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeDef, PredicateEq,
PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound,
WhereClause, WherePredicate,
};
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
#[cfg_attr(
doc_cfg,
doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing")))
)]
pub use crate::generics::{ImplGenerics, Turbofish, TypeGenerics};
mod ident;
#[doc(inline)]
pub use crate::ident::Ident;
#[cfg(feature = "full")]
mod item;
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub use crate::item::{
FnArg, ForeignItem, ForeignItemFn, ForeignItemMacro, ForeignItemStatic, ForeignItemType,
ImplItem, ImplItemConst, ImplItemFn, ImplItemMacro, ImplItemType, ImplRestriction, Item,
ItemConst, ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl, ItemMacro, ItemMod,
ImplItem, ImplItemConst, ImplItemMacro, ImplItemMethod, ImplItemType, Item, ItemConst,
ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl, ItemMacro, ItemMacro2, ItemMod,
ItemStatic, ItemStruct, ItemTrait, ItemTraitAlias, ItemType, ItemUnion, ItemUse, Receiver,
Signature, StaticMutability, TraitItem, TraitItemConst, TraitItemFn, TraitItemMacro,
TraitItemType, UseGlob, UseGroup, UseName, UsePath, UseRename, UseTree, Variadic,
Signature, TraitItem, TraitItemConst, TraitItemMacro, TraitItemMethod, TraitItemType, UseGlob,
UseGroup, UseName, UsePath, UseRename, UseTree,
};
#[cfg(feature = "full")]
mod file;
#[cfg(feature = "full")]
pub use crate::file::File;
mod lifetime;
#[doc(inline)]
pub use crate::lifetime::Lifetime;
mod lit;
#[doc(hidden)] // https://github.com/dtolnay/syn/issues/1566
pub use crate::lit::StrStyle;
#[doc(inline)]
pub use crate::lit::{Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr};
#[cfg(feature = "parsing")]
mod lookahead;
pub use crate::lit::{
Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
};
#[cfg(any(feature = "full", feature = "derive"))]
mod mac;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub use crate::mac::{Macro, MacroDelimiter};
#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
#[cfg_attr(
doc_cfg,
doc(cfg(all(feature = "parsing", any(feature = "full", feature = "derive"))))
)]
pub mod meta;
#[cfg(any(feature = "full", feature = "derive"))]
mod derive;
#[cfg(feature = "derive")]
pub use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
#[cfg(any(feature = "full", feature = "derive"))]
mod op;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub use crate::op::{BinOp, UnOp};
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod parse;
#[cfg(feature = "full")]
mod stmt;
#[cfg(feature = "full")]
pub use crate::stmt::{Block, Local, Stmt};
#[cfg(all(feature = "parsing", feature = "proc-macro"))]
mod parse_macro_input;
#[cfg(all(feature = "parsing", feature = "printing"))]
mod parse_quote;
#[cfg(any(feature = "full", feature = "derive"))]
mod ty;
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::ty::{
Abi, BareFnArg, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup, TypeImplTrait, TypeInfer,
TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, TypeReference, TypeSlice, TypeTraitObject,
TypeTuple, Variadic,
};
#[cfg(feature = "full")]
mod pat;
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub use crate::expr::{
ExprConst as PatConst, ExprLit as PatLit, ExprMacro as PatMacro, ExprPath as PatPath,
ExprRange as PatRange,
};
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub use crate::pat::{
FieldPat, Pat, PatIdent, PatOr, PatParen, PatReference, PatRest, PatSlice, PatStruct, PatTuple,
PatTupleStruct, PatType, PatWild,
FieldPat, Pat, PatBox, PatIdent, PatLit, PatMacro, PatOr, PatPath, PatRange, PatReference,
PatRest, PatSlice, PatStruct, PatTuple, PatTupleStruct, PatType, PatWild,
};
#[cfg(any(feature = "full", feature = "derive"))]
mod path;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub use crate::path::{
AngleBracketedGenericArguments, AssocConst, AssocType, Constraint, GenericArgument,
AngleBracketedGenericArguments, Binding, Constraint, GenericArgument,
ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
};
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
mod print;
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod buffer;
mod drops;
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod ext;
pub mod punctuated;
#[cfg(all(any(feature = "full", feature = "derive"), feature = "extra-traits"))]
mod tt;
#[cfg(any(feature = "full", feature = "derive"))]
mod restriction;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub use crate::restriction::{FieldMutability, VisRestricted, Visibility};
// Not public API except the `parse_quote!` macro.
#[cfg(feature = "parsing")]
#[doc(hidden)]
pub mod parse_quote;
mod sealed;
mod span;
// Not public API except the `parse_macro_input!` macro.
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "parsing",
feature = "proc-macro"
))]
#[doc(hidden)]
pub mod parse_macro_input;
#[cfg(all(feature = "parsing", feature = "printing"))]
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
pub mod spanned;
#[cfg(feature = "full")]
mod stmt;
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub use crate::stmt::{Block, Local, LocalInit, Stmt, StmtMacro};
mod thread;
#[cfg(all(any(feature = "full", feature = "derive"), feature = "extra-traits"))]
mod tt;
#[cfg(any(feature = "full", feature = "derive"))]
mod ty;
#[cfg(any(feature = "full", feature = "derive"))]
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub use crate::ty::{
Abi, BareFnArg, BareVariadic, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup,
TypeImplTrait, TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, TypeReference,
TypeSlice, TypeTraitObject, TypeTuple,
};
#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
mod verbatim;
#[cfg(all(feature = "parsing", feature = "full"))]
mod whitespace;
mod gen {
/// Syntax tree traversal to transform the nodes of an owned syntax tree.
///
/// Each method of the [`Fold`] trait is a hook that can be overridden to
/// customize the behavior when transforming the corresponding type of node.
/// By default, every method recursively visits the substructure of the
/// input by invoking the right visitor method of each of its fields.
///
/// [`Fold`]: fold::Fold
///
/// ```
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
/// #
/// pub trait Fold {
/// /* ... */
///
/// fn fold_expr_binary(&mut self, node: ExprBinary) -> ExprBinary {
/// fold_expr_binary(self, node)
/// }
///
/// /* ... */
/// # fn fold_attribute(&mut self, node: Attribute) -> Attribute;
/// # fn fold_expr(&mut self, node: Expr) -> Expr;
/// # fn fold_bin_op(&mut self, node: BinOp) -> BinOp;
/// }
///
/// pub fn fold_expr_binary<V>(v: &mut V, node: ExprBinary) -> ExprBinary
/// where
/// V: Fold + ?Sized,
/// {
/// ExprBinary {
/// attrs: node
/// .attrs
/// .into_iter()
/// .map(|attr| v.fold_attribute(attr))
/// .collect(),
/// left: Box::new(v.fold_expr(*node.left)),
/// op: v.fold_bin_op(node.op),
/// right: Box::new(v.fold_expr(*node.right)),
/// }
/// }
///
/// /* ... */
/// ```
///
/// <br>
///
/// # Example
///
/// This fold inserts parentheses to fully parenthesizes any expression.
///
/// ```
/// // [dependencies]
/// // quote = "1.0"
/// // syn = { version = "2.0", features = ["fold", "full"] }
///
/// use quote::quote;
/// use syn::fold::{fold_expr, Fold};
/// use syn::{token, Expr, ExprParen};
///
/// struct ParenthesizeEveryExpr;
///
/// impl Fold for ParenthesizeEveryExpr {
/// fn fold_expr(&mut self, expr: Expr) -> Expr {
/// Expr::Paren(ExprParen {
/// attrs: Vec::new(),
/// expr: Box::new(fold_expr(self, expr)),
/// paren_token: token::Paren::default(),
/// })
/// }
/// }
///
/// fn main() {
/// let code = quote! { a() + b(1) * c.d };
/// let expr: Expr = syn::parse2(code).unwrap();
/// let parenthesized = ParenthesizeEveryExpr.fold_expr(expr);
/// println!("{}", quote!(#parenthesized));
///
/// // Output: (((a)()) + (((b)((1))) * ((c).d)))
/// }
/// ```
#[cfg(feature = "fold")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "fold")))]
#[rustfmt::skip]
pub mod fold;
/// Syntax tree traversal to walk a shared borrow of a syntax tree.
///
/// Each method of the [`Visit`] trait is a hook that can be overridden to
@ -647,6 +499,8 @@ mod gen {
/// /* ... */
/// ```
///
/// *This module is available only if Syn is built with the `"visit"` feature.*
///
/// <br>
///
/// # Example
@ -657,7 +511,7 @@ mod gen {
/// ```
/// // [dependencies]
/// // quote = "1.0"
/// // syn = { version = "2.0", features = ["full", "visit"] }
/// // syn = { version = "1.0", features = ["full", "visit"] }
///
/// use quote::quote;
/// use syn::visit::{self, Visit};
@ -767,6 +621,9 @@ mod gen {
/// /* ... */
/// ```
///
/// *This module is available only if Syn is built with the `"visit-mut"`
/// feature.*
///
/// <br>
///
/// # Example
@ -777,7 +634,7 @@ mod gen {
/// ```
/// // [dependencies]
/// // quote = "1.0"
/// // syn = { version = "2.0", features = ["full", "visit-mut"] }
/// // syn = { version = "1.0", features = ["full", "visit-mut"] }
///
/// use quote::quote;
/// use syn::visit_mut::{self, VisitMut};
@ -820,14 +677,97 @@ mod gen {
#[rustfmt::skip]
pub mod visit_mut;
/// Syntax tree traversal to transform the nodes of an owned syntax tree.
///
/// Each method of the [`Fold`] trait is a hook that can be overridden to
/// customize the behavior when transforming the corresponding type of node.
/// By default, every method recursively visits the substructure of the
/// input by invoking the right visitor method of each of its fields.
///
/// [`Fold`]: fold::Fold
///
/// ```
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
/// #
/// pub trait Fold {
/// /* ... */
///
/// fn fold_expr_binary(&mut self, node: ExprBinary) -> ExprBinary {
/// fold_expr_binary(self, node)
/// }
///
/// /* ... */
/// # fn fold_attribute(&mut self, node: Attribute) -> Attribute;
/// # fn fold_expr(&mut self, node: Expr) -> Expr;
/// # fn fold_bin_op(&mut self, node: BinOp) -> BinOp;
/// }
///
/// pub fn fold_expr_binary<V>(v: &mut V, node: ExprBinary) -> ExprBinary
/// where
/// V: Fold + ?Sized,
/// {
/// ExprBinary {
/// attrs: node
/// .attrs
/// .into_iter()
/// .map(|attr| v.fold_attribute(attr))
/// .collect(),
/// left: Box::new(v.fold_expr(*node.left)),
/// op: v.fold_bin_op(node.op),
/// right: Box::new(v.fold_expr(*node.right)),
/// }
/// }
///
/// /* ... */
/// ```
///
/// *This module is available only if Syn is built with the `"fold"` feature.*
///
/// <br>
///
/// # Example
///
/// This fold inserts parentheses to fully parenthesizes any expression.
///
/// ```
/// // [dependencies]
/// // quote = "1.0"
/// // syn = { version = "1.0", features = ["fold", "full"] }
///
/// use quote::quote;
/// use syn::fold::{fold_expr, Fold};
/// use syn::{token, Expr, ExprParen};
///
/// struct ParenthesizeEveryExpr;
///
/// impl Fold for ParenthesizeEveryExpr {
/// fn fold_expr(&mut self, expr: Expr) -> Expr {
/// Expr::Paren(ExprParen {
/// attrs: Vec::new(),
/// expr: Box::new(fold_expr(self, expr)),
/// paren_token: token::Paren::default(),
/// })
/// }
/// }
///
/// fn main() {
/// let code = quote! { a() + b(1) * c.d };
/// let expr: Expr = syn::parse2(code).unwrap();
/// let parenthesized = ParenthesizeEveryExpr.fold_expr(expr);
/// println!("{}", quote!(#parenthesized));
///
/// // Output: (((a)()) + (((b)((1))) * ((c).d)))
/// }
/// ```
#[cfg(feature = "fold")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "fold")))]
#[rustfmt::skip]
pub mod fold;
#[cfg(feature = "clone-impls")]
#[rustfmt::skip]
mod clone;
#[cfg(feature = "extra-traits")]
#[rustfmt::skip]
mod debug;
#[cfg(feature = "extra-traits")]
#[rustfmt::skip]
mod eq;
@ -836,28 +776,48 @@ mod gen {
#[rustfmt::skip]
mod hash;
#[cfg(feature = "extra-traits")]
#[rustfmt::skip]
mod debug;
#[cfg(any(feature = "full", feature = "derive"))]
#[path = "../gen_helper.rs"]
mod helper;
}
#[cfg(feature = "fold")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "fold")))]
pub use crate::gen::fold;
#[cfg(feature = "visit")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "visit")))]
pub use crate::gen::visit;
#[cfg(feature = "visit-mut")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "visit-mut")))]
pub use crate::gen::visit_mut;
pub use crate::gen::*;
// Not public API.
#[doc(hidden)]
#[path = "export.rs"]
pub mod __private;
mod custom_keyword;
mod custom_punctuation;
mod sealed;
mod span;
mod thread;
#[cfg(feature = "parsing")]
mod lookahead;
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod parse;
#[cfg(feature = "full")]
mod reserved;
#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
mod verbatim;
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
mod print;
////////////////////////////////////////////////////////////////////////////////
mod error;
pub use crate::error::{Error, Result};
/// Parse tokens of source code into the chosen syntax tree node.
///
/// This is preferred over parsing a string because tokens are able to preserve
@ -871,6 +831,9 @@ pub mod __private;
///
/// [`syn::parse2`]: parse2
///
/// *This function is available only if Syn is built with both the `"parsing"` and
/// `"proc-macro"` features.*
///
/// # Examples
///
/// ```
@ -896,7 +859,11 @@ pub mod __private;
/// expanded.into()
/// }
/// ```
#[cfg(all(feature = "parsing", feature = "proc-macro"))]
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "parsing",
feature = "proc-macro"
))]
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
parse::Parser::parse(T::parse, tokens)
@ -914,6 +881,8 @@ pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
/// instead.
///
/// [`syn::parse`]: parse()
///
/// *This function is available only if Syn is built with the `"parsing"` feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
@ -922,6 +891,8 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
/// Parse a string of Rust code into the chosen syntax tree node.
///
/// *This function is available only if Syn is built with the `"parsing"` feature.*
///
/// # Hygiene
///
/// Every span in the resulting syntax tree will be set to resolve at the macro
@ -958,6 +929,9 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
///
/// If present, either of these would be an error using `from_str`.
///
/// *This function is available only if Syn is built with the `"parsing"` and
/// `"full"` features.*
///
/// # Examples
///
/// ```no_run
@ -965,7 +939,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
/// use std::fs::File;
/// use std::io::Read;
///
/// fn run() -> Result<(), Box<dyn Error>> {
/// fn run() -> Result<(), Box<Error>> {
/// let mut file = File::open("path/to/code.rs")?;
/// let mut content = String::new();
/// file.read_to_string(&mut content)?;

View File

@ -113,16 +113,14 @@ impl Hash for Lifetime {
}
#[cfg(feature = "parsing")]
pub_if_not_doc! {
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime {
match marker {}
}
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime {
match marker {}
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::parse::{Parse, ParseStream, Result};

View File

@ -19,7 +19,6 @@ ast_enum_of_structs! {
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: crate::Expr#syntax-tree-enums
#[non_exhaustive]
pub enum Lit {
/// A UTF-8 string literal: `"foo"`.
Str(LitStr),
@ -143,7 +142,8 @@ impl LitStr {
/// # Example
///
/// ```
/// use syn::{Attribute, Error, Expr, Lit, Meta, Path, Result};
/// use proc_macro2::Span;
/// use syn::{Attribute, Error, Ident, Lit, Meta, MetaNameValue, Path, Result};
///
/// // Parses the path from an attribute that looks like:
/// //
@ -151,20 +151,19 @@ impl LitStr {
/// //
/// // or returns `None` if the input is some other attribute.
/// fn get_path(attr: &Attribute) -> Result<Option<Path>> {
/// if !attr.path().is_ident("path") {
/// if !attr.path.is_ident("path") {
/// return Ok(None);
/// }
///
/// if let Meta::NameValue(meta) = &attr.meta {
/// if let Expr::Lit(expr) = &meta.value {
/// if let Lit::Str(lit_str) = &expr.lit {
/// return lit_str.parse().map(Some);
/// }
/// match attr.parse_meta()? {
/// Meta::NameValue(MetaNameValue { lit: Lit::Str(lit_str), .. }) => {
/// lit_str.parse().map(Some)
/// }
/// _ => {
/// let message = "expected #[path = \"...\"]";
/// Err(Error::new_spanned(attr, message))
/// }
/// }
///
/// let message = "expected #[path = \"...\"]";
/// Err(Error::new_spanned(attr, message))
/// }
/// ```
#[cfg(feature = "parsing")]
@ -228,17 +227,7 @@ impl LitStr {
let mut tokens = TokenStream::from_str(&self.value())?;
tokens = respan_token_stream(tokens, self.span());
let result = parser.parse2(tokens)?;
let suffix = self.suffix();
if !suffix.is_empty() {
return Err(Error::new(
self.span(),
format!("unexpected suffix `{}` on string literal", suffix),
));
}
Ok(result)
parser.parse2(tokens)
}
pub fn span(&self) -> Span {
@ -370,7 +359,11 @@ impl LitInt {
None => panic!("Not an integer literal: `{}`", repr),
};
let mut token: Literal = repr.parse().unwrap();
let mut token = match value::to_literal(repr, &digits, &suffix) {
Some(token) => token,
None => panic!("Unsupported integer literal: `{}`", repr),
};
token.set_span(span);
LitInt {
repr: Box::new(LitIntRepr {
@ -464,7 +457,11 @@ impl LitFloat {
None => panic!("Not a float literal: `{}`", repr),
};
let mut token: Literal = repr.parse().unwrap();
let mut token = match value::to_literal(repr, &digits, &suffix) {
Some(token) => token,
None => panic!("Unsupported float literal: `{}`", repr),
};
token.set_span(span);
LitFloat {
repr: Box::new(LitFloatRepr {
@ -560,133 +557,70 @@ mod debug_impls {
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for LitStr {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
impl LitStr {
pub(crate) fn debug(
&self,
formatter: &mut fmt::Formatter,
name: &str,
) -> fmt::Result {
formatter
.debug_struct(name)
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
self.debug(formatter, "LitStr")
formatter
.debug_struct("LitStr")
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for LitByteStr {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
impl LitByteStr {
pub(crate) fn debug(
&self,
formatter: &mut fmt::Formatter,
name: &str,
) -> fmt::Result {
formatter
.debug_struct(name)
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
self.debug(formatter, "LitByteStr")
formatter
.debug_struct("LitByteStr")
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for LitByte {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
impl LitByte {
pub(crate) fn debug(
&self,
formatter: &mut fmt::Formatter,
name: &str,
) -> fmt::Result {
formatter
.debug_struct(name)
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
self.debug(formatter, "LitByte")
formatter
.debug_struct("LitByte")
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for LitChar {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
impl LitChar {
pub(crate) fn debug(
&self,
formatter: &mut fmt::Formatter,
name: &str,
) -> fmt::Result {
formatter
.debug_struct(name)
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
self.debug(formatter, "LitChar")
formatter
.debug_struct("LitChar")
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for LitInt {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
impl LitInt {
pub(crate) fn debug(
&self,
formatter: &mut fmt::Formatter,
name: &str,
) -> fmt::Result {
formatter
.debug_struct(name)
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
self.debug(formatter, "LitInt")
formatter
.debug_struct("LitInt")
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for LitFloat {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
impl LitFloat {
pub(crate) fn debug(
&self,
formatter: &mut fmt::Formatter,
name: &str,
) -> fmt::Result {
formatter
.debug_struct(name)
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
self.debug(formatter, "LitFloat")
formatter
.debug_struct("LitFloat")
.field("token", &format_args!("{}", self.repr.token))
.finish()
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for LitBool {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
impl LitBool {
pub(crate) fn debug(
&self,
formatter: &mut fmt::Formatter,
name: &str,
) -> fmt::Result {
formatter
.debug_struct(name)
.field("value", &self.value)
.finish()
}
}
self.debug(formatter, "LitBool")
formatter
.debug_struct("LitBool")
.field("value", &self.value)
.finish()
}
}
}
@ -758,12 +692,10 @@ macro_rules! lit_extra_traits {
}
#[cfg(feature = "parsing")]
pub_if_not_doc! {
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn $ty(marker: lookahead::TokenMarker) -> $ty {
match marker {}
}
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn $ty(marker: lookahead::TokenMarker) -> $ty {
match marker {}
}
};
}
@ -776,37 +708,34 @@ lit_extra_traits!(LitInt);
lit_extra_traits!(LitFloat);
#[cfg(feature = "parsing")]
pub_if_not_doc! {
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
match marker {}
}
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn LitBool(marker: lookahead::TokenMarker) -> LitBool {
match marker {}
}
/// The style of a string literal, either plain quoted or a raw string like
/// `r##"data"##`.
#[doc(hidden)] // https://github.com/dtolnay/syn/issues/1566
pub enum StrStyle {
/// An ordinary string like `"data"`.
Cooked,
/// A raw string like `r##"data"##`.
///
/// The unsigned integer is the number of `#` symbols used.
Raw(usize),
}
#[cfg(feature = "parsing")]
pub_if_not_doc! {
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
match marker {}
ast_enum! {
/// The style of a string literal, either plain quoted or a raw string like
/// `r##"data"##`.
pub enum StrStyle #no_visit {
/// An ordinary string like `"data"`.
Cooked,
/// A raw string like `r##"data"##`.
///
/// The unsigned integer is the number of `#` symbols used.
Raw(usize),
}
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn Lit(marker: lookahead::TokenMarker) -> Lit {
match marker {}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use crate::buffer::Cursor;
use crate::parse::{Parse, ParseStream, Result};
@ -854,22 +783,23 @@ pub(crate) mod parsing {
repr.insert(0, '-');
if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
let mut token: Literal = repr.parse().unwrap();
token.set_span(span);
return Some((
Lit::Int(LitInt {
repr: Box::new(LitIntRepr {
token,
digits,
suffix,
if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
token.set_span(span);
return Some((
Lit::Int(LitInt {
repr: Box::new(LitIntRepr {
token,
digits,
suffix,
}),
}),
}),
rest,
));
rest,
));
}
}
let (digits, suffix) = value::parse_lit_float(&repr)?;
let mut token: Literal = repr.parse().unwrap();
let mut token = value::to_literal(&repr, &digits, &suffix)?;
token.set_span(span);
Some((
Lit::Float(LitFloat {
@ -1029,7 +959,6 @@ mod value {
let repr = token.to_string();
match byte(&repr, 0) {
// "...", r"...", r#"..."#
b'"' | b'r' => {
let (_, suffix) = parse_lit_str(&repr);
return Lit::Str(LitStr {
@ -1037,14 +966,12 @@ mod value {
});
}
b'b' => match byte(&repr, 1) {
// b"...", br"...", br#"...#"
b'"' | b'r' => {
let (_, suffix) = parse_lit_byte_str(&repr);
return Lit::ByteStr(LitByteStr {
repr: Box::new(LitRepr { token, suffix }),
});
}
// b'...'
b'\'' => {
let (_, suffix) = parse_lit_byte(&repr);
return Lit::Byte(LitByte {
@ -1053,7 +980,6 @@ mod value {
}
_ => {}
},
// '...'
b'\'' => {
let (_, suffix) = parse_lit_char(&repr);
return Lit::Char(LitChar {
@ -1061,7 +987,6 @@ mod value {
});
}
b'0'..=b'9' | b'-' => {
// 0, 123, 0xFF, 0o77, 0b11
if let Some((digits, suffix)) = parse_lit_int(&repr) {
return Lit::Int(LitInt {
repr: Box::new(LitIntRepr {
@ -1071,7 +996,6 @@ mod value {
}),
});
}
// 1.0, 1e-1, 1e+1
if let Some((digits, suffix)) = parse_lit_float(&repr) {
return Lit::Float(LitFloat {
repr: Box::new(LitFloatRepr {
@ -1082,7 +1006,6 @@ mod value {
});
}
}
// true, false
b't' | b'f' => {
if repr == "true" || repr == "false" {
return Lit::Bool(LitBool {
@ -1091,10 +1014,6 @@ mod value {
});
}
}
// c"...", cr"...", cr#"..."#
// TODO: add a Lit::CStr variant?
b'c' => return Lit::Verbatim(token),
b'(' if repr == "(/*ERROR*/)" => return Lit::Verbatim(token),
_ => {}
}
@ -1142,7 +1061,7 @@ mod value {
/// Get the byte at offset idx, or a default of `b'\0'` if we're looking
/// past the end of the input buffer.
pub(crate) fn byte<S: AsRef<[u8]> + ?Sized>(s: &S, idx: usize) -> u8 {
pub fn byte<S: AsRef<[u8]> + ?Sized>(s: &S, idx: usize) -> u8 {
let s = s.as_ref();
if idx < s.len() {
s[idx]
@ -1156,7 +1075,7 @@ mod value {
}
// Returns (content, suffix).
pub(crate) fn parse_lit_str(s: &str) -> (Box<str>, Box<str>) {
pub fn parse_lit_str(s: &str) -> (Box<str>, Box<str>) {
match byte(s, 0) {
b'"' => parse_lit_str_cooked(s),
b'r' => parse_lit_str_raw(s),
@ -1182,7 +1101,7 @@ mod value {
b'x' => {
let (byte, rest) = backslash_x(s);
s = rest;
assert!(byte <= 0x7F, "Invalid \\x byte in string literal");
assert!(byte <= 0x80, "Invalid \\x byte in string literal");
char::from_u32(u32::from(byte)).unwrap()
}
b'u' => {
@ -1198,10 +1117,11 @@ mod value {
b'\'' => '\'',
b'"' => '"',
b'\r' | b'\n' => loop {
let b = byte(s, 0);
match b {
b' ' | b'\t' | b'\n' | b'\r' => s = &s[1..],
_ => continue 'outer,
let ch = next_chr(s);
if ch.is_whitespace() {
s = &s[ch.len_utf8()..];
} else {
continue 'outer;
}
},
b => panic!("unexpected byte {:?} after \\ character in byte literal", b),
@ -1247,7 +1167,7 @@ mod value {
}
// Returns (content, suffix).
pub(crate) fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
pub fn parse_lit_byte_str(s: &str) -> (Vec<u8>, Box<str>) {
assert_eq!(byte(s, 0), b'b');
match byte(s, 1) {
b'"' => parse_lit_byte_str_cooked(s),
@ -1289,7 +1209,8 @@ mod value {
b'"' => b'"',
b'\r' | b'\n' => loop {
let byte = byte(v, 0);
if matches!(byte, b' ' | b'\t' | b'\n' | b'\r') {
let ch = char::from_u32(u32::from(byte)).unwrap();
if ch.is_whitespace() {
v = &v[1..];
} else {
continue 'outer;
@ -1323,7 +1244,7 @@ mod value {
}
// Returns (value, suffix).
pub(crate) fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
pub fn parse_lit_byte(s: &str) -> (u8, Box<str>) {
assert_eq!(byte(s, 0), b'b');
assert_eq!(byte(s, 1), b'\'');
@ -1362,7 +1283,7 @@ mod value {
}
// Returns (value, suffix).
pub(crate) fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
pub fn parse_lit_char(mut s: &str) -> (char, Box<str>) {
assert_eq!(byte(s, 0), b'\'');
s = &s[1..];
@ -1467,7 +1388,7 @@ mod value {
}
// Returns base 10 digits and suffix.
pub(crate) fn parse_lit_int(mut s: &str) -> Option<(Box<str>, Box<str>)> {
pub fn parse_lit_int(mut s: &str) -> Option<(Box<str>, Box<str>)> {
let negative = byte(s, 0) == b'-';
if negative {
s = &s[1..];
@ -1491,7 +1412,6 @@ mod value {
};
let mut value = BigInt::new();
let mut has_digit = false;
'outer: loop {
let b = byte(s, 0);
let digit = match b {
@ -1535,16 +1455,11 @@ mod value {
return None;
}
has_digit = true;
value *= base;
value += digit;
s = &s[1..];
}
if !has_digit {
return None;
}
let suffix = s;
if suffix.is_empty() || crate::ident::xid_ok(suffix) {
let mut repr = value.to_string();
@ -1558,7 +1473,7 @@ mod value {
}
// Returns base 10 digits and suffix.
pub(crate) fn parse_lit_float(input: &str) -> Option<(Box<str>, Box<str>)> {
pub fn parse_lit_float(input: &str) -> Option<(Box<str>, Box<str>)> {
// Rust's floating point literals are very similar to the ones parsed by
// the standard library, except that rust's literals can contain
// ignorable underscores. Let's remove those underscores.
@ -1648,4 +1563,38 @@ mod value {
None
}
}
#[allow(clippy::unnecessary_wraps)]
pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
#[cfg(syn_no_negative_literal_parse)]
{
// Rustc older than https://github.com/rust-lang/rust/pull/87262.
if repr.starts_with('-') {
let f64_parse_finite = || digits.parse().ok().filter(|x: &f64| x.is_finite());
let f32_parse_finite = || digits.parse().ok().filter(|x: &f32| x.is_finite());
return if suffix == "f64" {
f64_parse_finite().map(Literal::f64_suffixed)
} else if suffix == "f32" {
f32_parse_finite().map(Literal::f32_suffixed)
} else if suffix == "i64" {
digits.parse().ok().map(Literal::i64_suffixed)
} else if suffix == "i32" {
digits.parse().ok().map(Literal::i32_suffixed)
} else if suffix == "i16" {
digits.parse().ok().map(Literal::i16_suffixed)
} else if suffix == "i8" {
digits.parse().ok().map(Literal::i8_suffixed)
} else if !suffix.is_empty() {
None
} else if digits.contains('.') {
f64_parse_finite().map(Literal::f64_unsuffixed)
} else {
digits.parse().ok().map(Literal::i64_unsuffixed)
};
}
}
let _ = digits;
let _ = suffix;
Some(repr.parse::<Literal>().unwrap())
}
}

View File

@ -24,7 +24,7 @@ use std::cell::RefCell;
/// # Example
///
/// ```
/// use syn::{ConstParam, Ident, Lifetime, LifetimeParam, Result, Token, TypeParam};
/// use syn::{ConstParam, Ident, Lifetime, LifetimeDef, Result, Token, TypeParam};
/// use syn::parse::{Parse, ParseStream};
///
/// // A generic parameter, a single one of the comma-separated elements inside
@ -40,7 +40,7 @@ use std::cell::RefCell;
/// // | ^
/// enum GenericParam {
/// Type(TypeParam),
/// Lifetime(LifetimeParam),
/// Lifetime(LifetimeDef),
/// Const(ConstParam),
/// }
///
@ -65,7 +65,7 @@ pub struct Lookahead1<'a> {
comparisons: RefCell<Vec<&'static str>>,
}
pub(crate) fn new(scope: Span, cursor: Cursor) -> Lookahead1 {
pub fn new(scope: Span, cursor: Cursor) -> Lookahead1 {
Lookahead1 {
scope,
cursor,
@ -162,7 +162,7 @@ impl<S> IntoSpans<S> for TokenMarker {
}
}
pub(crate) fn is_delimiter(cursor: Cursor, delimiter: Delimiter) -> bool {
pub fn is_delimiter(cursor: Cursor, delimiter: Delimiter) -> bool {
cursor.group(delimiter).is_some()
}

View File

@ -1,17 +1,17 @@
use super::*;
use crate::token::{Brace, Bracket, Paren};
use proc_macro2::extra::DelimSpan;
#[cfg(any(feature = "parsing", feature = "printing"))]
use proc_macro2::Delimiter;
use proc_macro2::TokenStream;
#[cfg(feature = "parsing")]
use proc_macro2::TokenTree;
use proc_macro2::{Delimiter, Group, Span, TokenTree};
#[cfg(feature = "parsing")]
use crate::parse::{Parse, ParseStream, Parser, Result};
ast_struct! {
/// A macro invocation: `println!("{}", mac)`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Macro {
pub path: Path,
@ -23,6 +23,9 @@ ast_struct! {
ast_enum! {
/// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum MacroDelimiter {
Paren(Paren),
@ -31,14 +34,20 @@ ast_enum! {
}
}
impl MacroDelimiter {
pub fn span(&self) -> &DelimSpan {
match self {
MacroDelimiter::Paren(token) => &token.span,
MacroDelimiter::Brace(token) => &token.span,
MacroDelimiter::Bracket(token) => &token.span,
}
}
#[cfg(feature = "parsing")]
fn delimiter_span_close(macro_delimiter: &MacroDelimiter) -> Span {
let delimiter = match macro_delimiter {
MacroDelimiter::Paren(_) => Delimiter::Parenthesis,
MacroDelimiter::Brace(_) => Delimiter::Brace,
MacroDelimiter::Bracket(_) => Delimiter::Bracket,
};
let mut group = Group::new(delimiter, TokenStream::new());
group.set_span(match macro_delimiter {
MacroDelimiter::Paren(token) => token.span,
MacroDelimiter::Brace(token) => token.span,
MacroDelimiter::Bracket(token) => token.span,
});
group.span_close()
}
impl Macro {
@ -135,16 +144,16 @@ impl Macro {
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
let scope = self.delimiter.span().close();
let scope = delimiter_span_close(&self.delimiter);
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
}
}
#[cfg(feature = "parsing")]
pub(crate) fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, TokenStream)> {
pub fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, TokenStream)> {
input.step(|cursor| {
if let Some((TokenTree::Group(g), rest)) = cursor.token_tree() {
let span = g.delim_span();
let span = g.span();
let delimiter = match g.delimiter() {
Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
@ -161,7 +170,7 @@ pub(crate) fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, Tok
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::parse::{Parse, ParseStream, Result};
@ -189,23 +198,22 @@ mod printing {
use proc_macro2::TokenStream;
use quote::ToTokens;
impl MacroDelimiter {
pub(crate) fn surround(&self, tokens: &mut TokenStream, inner: TokenStream) {
let (delim, span) = match self {
MacroDelimiter::Paren(paren) => (Delimiter::Parenthesis, paren.span),
MacroDelimiter::Brace(brace) => (Delimiter::Brace, brace.span),
MacroDelimiter::Bracket(bracket) => (Delimiter::Bracket, bracket.span),
};
token::printing::delim(delim, span.join(), tokens, inner);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for Macro {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.path.to_tokens(tokens);
self.bang_token.to_tokens(tokens);
self.delimiter.surround(tokens, self.tokens.clone());
match &self.delimiter {
MacroDelimiter::Paren(paren) => {
paren.surround(tokens, |tokens| self.tokens.to_tokens(tokens));
}
MacroDelimiter::Brace(brace) => {
brace.surround(tokens, |tokens| self.tokens.to_tokens(tokens));
}
MacroDelimiter::Bracket(bracket) => {
bracket.surround(tokens, |tokens| self.tokens.to_tokens(tokens));
}
}
}
}
}

View File

@ -4,17 +4,14 @@
)]
macro_rules! ast_struct {
(
$(#[$attr:meta])*
$pub:ident $struct:ident $name:ident #full $body:tt
[$($attrs_pub:tt)*]
struct $name:ident #full $($rest:tt)*
) => {
check_keyword_matches!(pub $pub);
check_keyword_matches!(struct $struct);
#[cfg(feature = "full")]
$(#[$attr])* $pub $struct $name $body
$($attrs_pub)* struct $name $($rest)*
#[cfg(not(feature = "full"))]
$(#[$attr])* $pub $struct $name {
$($attrs_pub)* struct $name {
_noconstruct: ::std::marker::PhantomData<::proc_macro2::Span>,
}
@ -27,26 +24,35 @@ macro_rules! ast_struct {
};
(
$(#[$attr:meta])*
$pub:ident $struct:ident $name:ident $body:tt
[$($attrs_pub:tt)*]
struct $name:ident $($rest:tt)*
) => {
check_keyword_matches!(pub $pub);
check_keyword_matches!(struct $struct);
$($attrs_pub)* struct $name $($rest)*
};
$(#[$attr])* $pub $struct $name $body
($($t:tt)*) => {
strip_attrs_pub!(ast_struct!($($t)*));
};
}
#[cfg(any(feature = "full", feature = "derive"))]
macro_rules! ast_enum {
// Drop the `#no_visit` attribute, if present.
(
$(#[$enum_attr:meta])*
$pub:ident $enum:ident $name:ident $body:tt
) => {
check_keyword_matches!(pub $pub);
check_keyword_matches!(enum $enum);
[$($attrs_pub:tt)*]
enum $name:ident #no_visit $($rest:tt)*
) => (
ast_enum!([$($attrs_pub)*] enum $name $($rest)*);
);
$(#[$enum_attr])* $pub $enum $name $body
(
[$($attrs_pub:tt)*]
enum $name:ident $($rest:tt)*
) => (
$($attrs_pub)* enum $name $($rest)*
);
($($t:tt)*) => {
strip_attrs_pub!(ast_enum!($($t)*));
};
}
@ -54,32 +60,35 @@ macro_rules! ast_enum_of_structs {
(
$(#[$enum_attr:meta])*
$pub:ident $enum:ident $name:ident $body:tt
$($remaining:tt)*
) => {
check_keyword_matches!(pub $pub);
check_keyword_matches!(enum $enum);
$(#[$enum_attr])* $pub $enum $name $body
ast_enum_of_structs_impl!($name $body);
ast_enum!($(#[$enum_attr])* $pub $enum $name $body);
ast_enum_of_structs_impl!($pub $enum $name $body $($remaining)*);
};
}
macro_rules! ast_enum_of_structs_impl {
(
$name:ident {
$pub:ident $enum:ident $name:ident {
$(
$(#[cfg $cfg_attr:tt])*
$(#[doc $($doc_attr:tt)*])*
$variant:ident $( ($($member:ident)::+) )*,
)*
}
$($remaining:tt)*
) => {
check_keyword_matches!(pub $pub);
check_keyword_matches!(enum $enum);
$($(
ast_enum_from_struct!($name::$variant, $($member)::+);
)*)*
#[cfg(feature = "printing")]
generate_to_tokens! {
$($remaining)*
()
tokens
$name {
@ -107,7 +116,13 @@ macro_rules! ast_enum_from_struct {
}
#[cfg(feature = "printing")]
#[cfg_attr(
not(any(feature = "full", feature = "derive")),
allow(unknown_lints, unused_macro_rules)
)]
macro_rules! generate_to_tokens {
(do_not_generate_to_tokens $($foo:tt)*) => ();
(
($($arms:tt)*) $tokens:ident $name:ident {
$(#[cfg $cfg_attr:tt])*
@ -148,29 +163,15 @@ macro_rules! generate_to_tokens {
};
}
// Rustdoc bug: does not respect the doc(hidden) on some items.
#[cfg(all(doc, feature = "parsing"))]
macro_rules! pub_if_not_doc {
($(#[$m:meta])* $pub:ident $($item:tt)*) => {
macro_rules! strip_attrs_pub {
($mac:ident!($(#[$m:meta])* $pub:ident $($t:tt)*)) => {
check_keyword_matches!(pub $pub);
$(#[$m])*
$pub(crate) $($item)*
};
}
#[cfg(all(not(doc), feature = "parsing"))]
macro_rules! pub_if_not_doc {
($(#[$m:meta])* $pub:ident $($item:tt)*) => {
check_keyword_matches!(pub $pub);
$(#[$m])*
$pub $($item)*
$mac!([$(#[$m])* $pub] $($t)*);
};
}
macro_rules! check_keyword_matches {
(enum enum) => {};
(pub pub) => {};
(struct struct) => {};
}

View File

@ -1,426 +0,0 @@
//! Facility for interpreting structured content inside of an `Attribute`.
use crate::ext::IdentExt as _;
use crate::lit::Lit;
use crate::parse::{Error, ParseStream, Parser, Result};
use crate::path::{Path, PathSegment};
use crate::punctuated::Punctuated;
use proc_macro2::Ident;
use std::fmt::Display;
/// Make a parser that is usable with `parse_macro_input!` in a
/// `#[proc_macro_attribute]` macro.
///
/// *Warning:* When parsing attribute args **other than** the
/// `proc_macro::TokenStream` input of a `proc_macro_attribute`, you do **not**
/// need this function. In several cases your callers will get worse error
/// messages if you use this function, because the surrounding delimiter's span
/// is concealed from attribute macros by rustc. Use
/// [`Attribute::parse_nested_meta`] instead.
///
/// [`Attribute::parse_nested_meta`]: crate::Attribute::parse_nested_meta
///
/// # Example
///
/// This example implements an attribute macro whose invocations look like this:
///
/// ```
/// # const IGNORE: &str = stringify! {
/// #[tea(kind = "EarlGrey", hot)]
/// struct Picard {...}
/// # };
/// ```
///
/// The "parameters" supported by the attribute are:
///
/// - `kind = "..."`
/// - `hot`
/// - `with(sugar, milk, ...)`, a comma-separated list of ingredients
///
/// ```
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use syn::{parse_macro_input, LitStr, Path};
///
/// # const IGNORE: &str = stringify! {
/// #[proc_macro_attribute]
/// # };
/// pub fn tea(args: TokenStream, input: TokenStream) -> TokenStream {
/// let mut kind: Option<LitStr> = None;
/// let mut hot: bool = false;
/// let mut with: Vec<Path> = Vec::new();
/// let tea_parser = syn::meta::parser(|meta| {
/// if meta.path.is_ident("kind") {
/// kind = Some(meta.value()?.parse()?);
/// Ok(())
/// } else if meta.path.is_ident("hot") {
/// hot = true;
/// Ok(())
/// } else if meta.path.is_ident("with") {
/// meta.parse_nested_meta(|meta| {
/// with.push(meta.path);
/// Ok(())
/// })
/// } else {
/// Err(meta.error("unsupported tea property"))
/// }
/// });
///
/// parse_macro_input!(args with tea_parser);
/// eprintln!("kind={kind:?} hot={hot} with={with:?}");
///
/// /* ... */
/// # TokenStream::new()
/// }
/// ```
///
/// The `syn::meta` library will take care of dealing with the commas including
/// trailing commas, and producing sensible error messages on unexpected input.
///
/// ```console
/// error: expected `,`
/// --> src/main.rs:3:37
/// |
/// 3 | #[tea(kind = "EarlGrey", with(sugar = "lol", milk))]
/// | ^
/// ```
///
/// # Example
///
/// Same as above but we factor out most of the logic into a separate function.
///
/// ```
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use syn::meta::ParseNestedMeta;
/// use syn::parse::{Parser, Result};
/// use syn::{parse_macro_input, LitStr, Path};
///
/// # const IGNORE: &str = stringify! {
/// #[proc_macro_attribute]
/// # };
/// pub fn tea(args: TokenStream, input: TokenStream) -> TokenStream {
/// let mut attrs = TeaAttributes::default();
/// let tea_parser = syn::meta::parser(|meta| attrs.parse(meta));
/// parse_macro_input!(args with tea_parser);
///
/// /* ... */
/// # TokenStream::new()
/// }
///
/// #[derive(Default)]
/// struct TeaAttributes {
/// kind: Option<LitStr>,
/// hot: bool,
/// with: Vec<Path>,
/// }
///
/// impl TeaAttributes {
/// fn parse(&mut self, meta: ParseNestedMeta) -> Result<()> {
/// if meta.path.is_ident("kind") {
/// self.kind = Some(meta.value()?.parse()?);
/// Ok(())
/// } else /* just like in last example */
/// # { unimplemented!() }
///
/// }
/// }
/// ```
pub fn parser(logic: impl FnMut(ParseNestedMeta) -> Result<()>) -> impl Parser<Output = ()> {
|input: ParseStream| {
if input.is_empty() {
Ok(())
} else {
parse_nested_meta(input, logic)
}
}
}
/// Context for parsing a single property in the conventional syntax for
/// structured attributes.
///
/// # Examples
///
/// Refer to usage examples on the following two entry-points:
///
/// - [`Attribute::parse_nested_meta`] if you have an entire `Attribute` to
/// parse. Always use this if possible. Generally this is able to produce
/// better error messages because `Attribute` holds span information for all
/// of the delimiters therein.
///
/// - [`syn::meta::parser`] if you are implementing a `proc_macro_attribute`
/// macro and parsing the arguments to the attribute macro, i.e. the ones
/// written in the same attribute that dispatched the macro invocation. Rustc
/// does not pass span information for the surrounding delimiters into the
/// attribute macro invocation in this situation, so error messages might be
/// less precise.
///
/// [`Attribute::parse_nested_meta`]: crate::Attribute::parse_nested_meta
/// [`syn::meta::parser`]: crate::meta::parser
#[non_exhaustive]
pub struct ParseNestedMeta<'a> {
pub path: Path,
pub input: ParseStream<'a>,
}
impl<'a> ParseNestedMeta<'a> {
/// Used when parsing `key = "value"` syntax.
///
/// All it does is advance `meta.input` past the `=` sign in the input. You
/// could accomplish the same effect by writing
/// `meta.parse::<Token![=]>()?`, so at most it is a minor convenience to
/// use `meta.value()?`.
///
/// # Example
///
/// ```
/// use syn::{parse_quote, Attribute, LitStr};
///
/// let attr: Attribute = parse_quote! {
/// #[tea(kind = "EarlGrey")]
/// };
/// // conceptually:
/// if attr.path().is_ident("tea") { // this parses the `tea`
/// attr.parse_nested_meta(|meta| { // this parses the `(`
/// if meta.path.is_ident("kind") { // this parses the `kind`
/// let value = meta.value()?; // this parses the `=`
/// let s: LitStr = value.parse()?; // this parses `"EarlGrey"`
/// if s.value() == "EarlGrey" {
/// // ...
/// }
/// Ok(())
/// } else {
/// Err(meta.error("unsupported attribute"))
/// }
/// })?;
/// }
/// # anyhow::Ok(())
/// ```
pub fn value(&self) -> Result<ParseStream<'a>> {
self.input.parse::<Token![=]>()?;
Ok(self.input)
}
/// Used when parsing `list(...)` syntax **if** the content inside the
/// nested parentheses is also expected to conform to Rust's structured
/// attribute convention.
///
/// # Example
///
/// ```
/// use syn::{parse_quote, Attribute};
///
/// let attr: Attribute = parse_quote! {
/// #[tea(with(sugar, milk))]
/// };
///
/// if attr.path().is_ident("tea") {
/// attr.parse_nested_meta(|meta| {
/// if meta.path.is_ident("with") {
/// meta.parse_nested_meta(|meta| { // <---
/// if meta.path.is_ident("sugar") {
/// // Here we can go even deeper if needed.
/// Ok(())
/// } else if meta.path.is_ident("milk") {
/// Ok(())
/// } else {
/// Err(meta.error("unsupported ingredient"))
/// }
/// })
/// } else {
/// Err(meta.error("unsupported tea property"))
/// }
/// })?;
/// }
/// # anyhow::Ok(())
/// ```
///
/// # Counterexample
///
/// If you don't need `parse_nested_meta`'s help in parsing the content
/// written within the nested parentheses, keep in mind that you can always
/// just parse it yourself from the exposed ParseStream. Rust syntax permits
/// arbitrary tokens within those parentheses so for the crazier stuff,
/// `parse_nested_meta` is not what you want.
///
/// ```
/// use syn::{parenthesized, parse_quote, Attribute, LitInt};
///
/// let attr: Attribute = parse_quote! {
/// #[repr(align(32))]
/// };
///
/// let mut align: Option<LitInt> = None;
/// if attr.path().is_ident("repr") {
/// attr.parse_nested_meta(|meta| {
/// if meta.path.is_ident("align") {
/// let content;
/// parenthesized!(content in meta.input);
/// align = Some(content.parse()?);
/// Ok(())
/// } else {
/// Err(meta.error("unsupported repr"))
/// }
/// })?;
/// }
/// # anyhow::Ok(())
/// ```
pub fn parse_nested_meta(
&self,
logic: impl FnMut(ParseNestedMeta) -> Result<()>,
) -> Result<()> {
let content;
parenthesized!(content in self.input);
parse_nested_meta(&content, logic)
}
/// Report that the attribute's content did not conform to expectations.
///
/// The span of the resulting error will cover `meta.path` *and* everything
/// that has been parsed so far since it.
///
/// There are 2 ways you might call this. First, if `meta.path` is not
/// something you recognize:
///
/// ```
/// # use syn::Attribute;
/// #
/// # fn example(attr: &Attribute) -> syn::Result<()> {
/// attr.parse_nested_meta(|meta| {
/// if meta.path.is_ident("kind") {
/// // ...
/// Ok(())
/// } else {
/// Err(meta.error("unsupported tea property"))
/// }
/// })?;
/// # Ok(())
/// # }
/// ```
///
/// In this case, it behaves exactly like
/// `syn::Error::new_spanned(&meta.path, "message...")`.
///
/// ```console
/// error: unsupported tea property
/// --> src/main.rs:3:26
/// |
/// 3 | #[tea(kind = "EarlGrey", wat = "foo")]
/// | ^^^
/// ```
///
/// More usefully, the second place is if you've already parsed a value but
/// have decided not to accept the value:
///
/// ```
/// # use syn::Attribute;
/// #
/// # fn example(attr: &Attribute) -> syn::Result<()> {
/// use syn::Expr;
///
/// attr.parse_nested_meta(|meta| {
/// if meta.path.is_ident("kind") {
/// let expr: Expr = meta.value()?.parse()?;
/// match expr {
/// Expr::Lit(expr) => /* ... */
/// # unimplemented!(),
/// Expr::Path(expr) => /* ... */
/// # unimplemented!(),
/// Expr::Macro(expr) => /* ... */
/// # unimplemented!(),
/// _ => Err(meta.error("tea kind must be a string literal, path, or macro")),
/// }
/// } else /* as above */
/// # { unimplemented!() }
///
/// })?;
/// # Ok(())
/// # }
/// ```
///
/// ```console
/// error: tea kind must be a string literal, path, or macro
/// --> src/main.rs:3:7
/// |
/// 3 | #[tea(kind = async { replicator.await })]
/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
/// ```
///
/// Often you may want to use `syn::Error::new_spanned` even in this
/// situation. In the above code, that would be:
///
/// ```
/// # use syn::{Error, Expr};
/// #
/// # fn example(expr: Expr) -> syn::Result<()> {
/// match expr {
/// Expr::Lit(expr) => /* ... */
/// # unimplemented!(),
/// Expr::Path(expr) => /* ... */
/// # unimplemented!(),
/// Expr::Macro(expr) => /* ... */
/// # unimplemented!(),
/// _ => Err(Error::new_spanned(expr, "unsupported expression type for `kind`")),
/// }
/// # }
/// ```
///
/// ```console
/// error: unsupported expression type for `kind`
/// --> src/main.rs:3:14
/// |
/// 3 | #[tea(kind = async { replicator.await })]
/// | ^^^^^^^^^^^^^^^^^^^^^^^^^^
/// ```
pub fn error(&self, msg: impl Display) -> Error {
let start_span = self.path.segments[0].ident.span();
let end_span = self.input.cursor().prev_span();
crate::error::new2(start_span, end_span, msg)
}
}
pub(crate) fn parse_nested_meta(
input: ParseStream,
mut logic: impl FnMut(ParseNestedMeta) -> Result<()>,
) -> Result<()> {
loop {
let path = input.call(parse_meta_path)?;
logic(ParseNestedMeta { path, input })?;
if input.is_empty() {
return Ok(());
}
input.parse::<Token![,]>()?;
if input.is_empty() {
return Ok(());
}
}
}
// Like Path::parse_mod_style, but accepts keywords in the path.
fn parse_meta_path(input: ParseStream) -> Result<Path> {
Ok(Path {
leading_colon: input.parse()?,
segments: {
let mut segments = Punctuated::new();
if input.peek(Ident::peek_any) {
let ident = Ident::parse_any(input)?;
segments.push_value(PathSegment::from(ident));
} else if input.is_empty() {
return Err(input.error("expected nested attribute"));
} else if input.peek(Lit) {
return Err(input.error("unexpected literal in nested attribute, expected ident"));
} else {
return Err(input.error("unexpected token in nested attribute, expected ident"));
}
while input.peek(Token![::]) {
let punct = input.parse()?;
segments.push_punct(punct);
let ident = Ident::parse_any(input)?;
segments.push_value(PathSegment::from(ident));
}
segments
},
})
}

156
src/op.rs
View File

@ -1,7 +1,9 @@
ast_enum! {
/// A binary operator: `+`, `+=`, `&`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
#[non_exhaustive]
pub enum BinOp {
/// The `+` operator (addition)
Add(Token![+]),
@ -40,32 +42,34 @@ ast_enum! {
/// The `>` operator (greater than)
Gt(Token![>]),
/// The `+=` operator
AddAssign(Token![+=]),
AddEq(Token![+=]),
/// The `-=` operator
SubAssign(Token![-=]),
SubEq(Token![-=]),
/// The `*=` operator
MulAssign(Token![*=]),
MulEq(Token![*=]),
/// The `/=` operator
DivAssign(Token![/=]),
DivEq(Token![/=]),
/// The `%=` operator
RemAssign(Token![%=]),
RemEq(Token![%=]),
/// The `^=` operator
BitXorAssign(Token![^=]),
BitXorEq(Token![^=]),
/// The `&=` operator
BitAndAssign(Token![&=]),
BitAndEq(Token![&=]),
/// The `|=` operator
BitOrAssign(Token![|=]),
BitOrEq(Token![|=]),
/// The `<<=` operator
ShlAssign(Token![<<=]),
ShlEq(Token![<<=]),
/// The `>>=` operator
ShrAssign(Token![>>=]),
ShrEq(Token![>>=]),
}
}
ast_enum! {
/// A unary operator: `*`, `!`, `-`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
#[non_exhaustive]
pub enum UnOp {
/// The `*` operator for dereferencing
Deref(Token![*]),
@ -77,71 +81,83 @@ ast_enum! {
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::parse::{Parse, ParseStream, Result};
fn parse_binop(input: ParseStream) -> Result<BinOp> {
if input.peek(Token![&&]) {
input.parse().map(BinOp::And)
} else if input.peek(Token![||]) {
input.parse().map(BinOp::Or)
} else if input.peek(Token![<<]) {
input.parse().map(BinOp::Shl)
} else if input.peek(Token![>>]) {
input.parse().map(BinOp::Shr)
} else if input.peek(Token![==]) {
input.parse().map(BinOp::Eq)
} else if input.peek(Token![<=]) {
input.parse().map(BinOp::Le)
} else if input.peek(Token![!=]) {
input.parse().map(BinOp::Ne)
} else if input.peek(Token![>=]) {
input.parse().map(BinOp::Ge)
} else if input.peek(Token![+]) {
input.parse().map(BinOp::Add)
} else if input.peek(Token![-]) {
input.parse().map(BinOp::Sub)
} else if input.peek(Token![*]) {
input.parse().map(BinOp::Mul)
} else if input.peek(Token![/]) {
input.parse().map(BinOp::Div)
} else if input.peek(Token![%]) {
input.parse().map(BinOp::Rem)
} else if input.peek(Token![^]) {
input.parse().map(BinOp::BitXor)
} else if input.peek(Token![&]) {
input.parse().map(BinOp::BitAnd)
} else if input.peek(Token![|]) {
input.parse().map(BinOp::BitOr)
} else if input.peek(Token![<]) {
input.parse().map(BinOp::Lt)
} else if input.peek(Token![>]) {
input.parse().map(BinOp::Gt)
} else {
Err(input.error("expected binary operator"))
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for BinOp {
#[cfg(not(feature = "full"))]
fn parse(input: ParseStream) -> Result<Self> {
parse_binop(input)
}
#[cfg(feature = "full")]
fn parse(input: ParseStream) -> Result<Self> {
if input.peek(Token![+=]) {
input.parse().map(BinOp::AddAssign)
input.parse().map(BinOp::AddEq)
} else if input.peek(Token![-=]) {
input.parse().map(BinOp::SubAssign)
input.parse().map(BinOp::SubEq)
} else if input.peek(Token![*=]) {
input.parse().map(BinOp::MulAssign)
input.parse().map(BinOp::MulEq)
} else if input.peek(Token![/=]) {
input.parse().map(BinOp::DivAssign)
input.parse().map(BinOp::DivEq)
} else if input.peek(Token![%=]) {
input.parse().map(BinOp::RemAssign)
input.parse().map(BinOp::RemEq)
} else if input.peek(Token![^=]) {
input.parse().map(BinOp::BitXorAssign)
input.parse().map(BinOp::BitXorEq)
} else if input.peek(Token![&=]) {
input.parse().map(BinOp::BitAndAssign)
input.parse().map(BinOp::BitAndEq)
} else if input.peek(Token![|=]) {
input.parse().map(BinOp::BitOrAssign)
input.parse().map(BinOp::BitOrEq)
} else if input.peek(Token![<<=]) {
input.parse().map(BinOp::ShlAssign)
input.parse().map(BinOp::ShlEq)
} else if input.peek(Token![>>=]) {
input.parse().map(BinOp::ShrAssign)
} else if input.peek(Token![&&]) {
input.parse().map(BinOp::And)
} else if input.peek(Token![||]) {
input.parse().map(BinOp::Or)
} else if input.peek(Token![<<]) {
input.parse().map(BinOp::Shl)
} else if input.peek(Token![>>]) {
input.parse().map(BinOp::Shr)
} else if input.peek(Token![==]) {
input.parse().map(BinOp::Eq)
} else if input.peek(Token![<=]) {
input.parse().map(BinOp::Le)
} else if input.peek(Token![!=]) {
input.parse().map(BinOp::Ne)
} else if input.peek(Token![>=]) {
input.parse().map(BinOp::Ge)
} else if input.peek(Token![+]) {
input.parse().map(BinOp::Add)
} else if input.peek(Token![-]) {
input.parse().map(BinOp::Sub)
} else if input.peek(Token![*]) {
input.parse().map(BinOp::Mul)
} else if input.peek(Token![/]) {
input.parse().map(BinOp::Div)
} else if input.peek(Token![%]) {
input.parse().map(BinOp::Rem)
} else if input.peek(Token![^]) {
input.parse().map(BinOp::BitXor)
} else if input.peek(Token![&]) {
input.parse().map(BinOp::BitAnd)
} else if input.peek(Token![|]) {
input.parse().map(BinOp::BitOr)
} else if input.peek(Token![<]) {
input.parse().map(BinOp::Lt)
} else if input.peek(Token![>]) {
input.parse().map(BinOp::Gt)
input.parse().map(BinOp::ShrEq)
} else {
Err(input.error("expected binary operator"))
parse_binop(input)
}
}
}
@ -191,16 +207,16 @@ mod printing {
BinOp::Ne(t) => t.to_tokens(tokens),
BinOp::Ge(t) => t.to_tokens(tokens),
BinOp::Gt(t) => t.to_tokens(tokens),
BinOp::AddAssign(t) => t.to_tokens(tokens),
BinOp::SubAssign(t) => t.to_tokens(tokens),
BinOp::MulAssign(t) => t.to_tokens(tokens),
BinOp::DivAssign(t) => t.to_tokens(tokens),
BinOp::RemAssign(t) => t.to_tokens(tokens),
BinOp::BitXorAssign(t) => t.to_tokens(tokens),
BinOp::BitAndAssign(t) => t.to_tokens(tokens),
BinOp::BitOrAssign(t) => t.to_tokens(tokens),
BinOp::ShlAssign(t) => t.to_tokens(tokens),
BinOp::ShrAssign(t) => t.to_tokens(tokens),
BinOp::AddEq(t) => t.to_tokens(tokens),
BinOp::SubEq(t) => t.to_tokens(tokens),
BinOp::MulEq(t) => t.to_tokens(tokens),
BinOp::DivEq(t) => t.to_tokens(tokens),
BinOp::RemEq(t) => t.to_tokens(tokens),
BinOp::BitXorEq(t) => t.to_tokens(tokens),
BinOp::BitAndEq(t) => t.to_tokens(tokens),
BinOp::BitOrEq(t) => t.to_tokens(tokens),
BinOp::ShlEq(t) => t.to_tokens(tokens),
BinOp::ShrEq(t) => t.to_tokens(tokens),
}
}
}

View File

@ -66,7 +66,7 @@
//! struct_token: input.parse()?,
//! ident: input.parse()?,
//! brace_token: braced!(content in input),
//! fields: content.parse_terminated(Field::parse_named, Token![,])?,
//! fields: content.parse_terminated(Field::parse_named)?,
//! })
//! }
//! }
@ -84,7 +84,7 @@
//! let input = parse_macro_input!(tokens as Item);
//!
//! /* ... */
//! # TokenStream::new()
//! # "".parse().unwrap()
//! }
//! ```
//!
@ -178,6 +178,10 @@
//! Ok(())
//! }
//! ```
//!
//! ---
//!
//! *This module is available only if Syn is built with the `"parsing"` feature.*
#[path = "discouraged.rs"]
pub mod discouraged;
@ -185,7 +189,10 @@ pub mod discouraged;
use crate::buffer::{Cursor, TokenBuffer};
use crate::error;
use crate::lookahead;
#[cfg(feature = "proc-macro")]
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "proc-macro"
))]
use crate::proc_macro;
use crate::punctuated::Punctuated;
use crate::token::Token;
@ -513,8 +520,8 @@ impl<'a> ParseBuffer<'a> {
///
/// - `input.peek(Token![struct])`
/// - `input.peek(Token![==])`
/// - `input.peek(syn::Ident)`&emsp;*(does not accept keywords)*
/// - `input.peek(syn::Ident::peek_any)`
/// - `input.peek(Ident)`&emsp;*(does not accept keywords)*
/// - `input.peek(Ident::peek_any)`
/// - `input.peek(Lifetime)`
/// - `input.peek(token::Brace)`
///
@ -678,7 +685,7 @@ impl<'a> ParseBuffer<'a> {
/// struct_token: input.parse()?,
/// ident: input.parse()?,
/// paren_token: parenthesized!(content in input),
/// fields: content.parse_terminated(Type::parse, Token![,])?,
/// fields: content.parse_terminated(Type::parse)?,
/// semi_token: input.parse()?,
/// })
/// }
@ -689,63 +696,10 @@ impl<'a> ParseBuffer<'a> {
/// # };
/// # syn::parse2::<TupleStruct>(input).unwrap();
/// ```
///
/// # See also
///
/// If your separator is anything more complicated than an invocation of the
/// `Token!` macro, this method won't be applicable and you can instead
/// directly use `Punctuated`'s parser functions: [`parse_terminated`],
/// [`parse_separated_nonempty`] etc.
///
/// [`parse_terminated`]: Punctuated::parse_terminated
/// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
///
/// ```
/// use syn::{custom_keyword, Expr, Result, Token};
/// use syn::parse::{Parse, ParseStream};
/// use syn::punctuated::Punctuated;
///
/// mod kw {
/// syn::custom_keyword!(fin);
/// }
///
/// struct Fin(kw::fin, Token![;]);
///
/// impl Parse for Fin {
/// fn parse(input: ParseStream) -> Result<Self> {
/// Ok(Self(input.parse()?, input.parse()?))
/// }
/// }
///
/// struct Thing {
/// steps: Punctuated<Expr, Fin>,
/// }
///
/// impl Parse for Thing {
/// fn parse(input: ParseStream) -> Result<Self> {
/// # if true {
/// Ok(Thing {
/// steps: Punctuated::parse_terminated(input)?,
/// })
/// # } else {
/// // or equivalently, this means the same thing:
/// # Ok(Thing {
/// steps: input.call(Punctuated::parse_terminated)?,
/// # })
/// # }
/// }
/// }
/// ```
pub fn parse_terminated<T, P>(
pub fn parse_terminated<T, P: Parse>(
&self,
parser: fn(ParseStream) -> Result<T>,
separator: P,
) -> Result<Punctuated<T, P::Token>>
where
P: Peek,
P::Token: Parse,
{
let _ = separator;
) -> Result<Punctuated<T, P>> {
Punctuated::parse_terminated_with(self, parser)
}
@ -796,7 +750,7 @@ impl<'a> ParseBuffer<'a> {
/// # Example
///
/// ```
/// use syn::{ConstParam, Ident, Lifetime, LifetimeParam, Result, Token, TypeParam};
/// use syn::{ConstParam, Ident, Lifetime, LifetimeDef, Result, Token, TypeParam};
/// use syn::parse::{Parse, ParseStream};
///
/// // A generic parameter, a single one of the comma-separated elements inside
@ -812,7 +766,7 @@ impl<'a> ParseBuffer<'a> {
/// // | ^
/// enum GenericParam {
/// Type(TypeParam),
/// Lifetime(LifetimeParam),
/// Lifetime(LifetimeDef),
/// Const(ConstParam),
/// }
///
@ -1096,58 +1050,6 @@ impl<'a> ParseBuffer<'a> {
///
/// Cursors are immutable so no operations you perform against the cursor
/// will affect the state of this parse stream.
///
/// # Example
///
/// ```
/// use proc_macro2::TokenStream;
/// use syn::buffer::Cursor;
/// use syn::parse::{ParseStream, Result};
///
/// // Run a parser that returns T, but get its output as TokenStream instead of T.
/// // This works without T needing to implement ToTokens.
/// fn recognize_token_stream<T>(
/// recognizer: fn(ParseStream) -> Result<T>,
/// ) -> impl Fn(ParseStream) -> Result<TokenStream> {
/// move |input| {
/// let begin = input.cursor();
/// recognizer(input)?;
/// let end = input.cursor();
/// Ok(tokens_between(begin, end))
/// }
/// }
///
/// // Collect tokens between two cursors as a TokenStream.
/// fn tokens_between(begin: Cursor, end: Cursor) -> TokenStream {
/// assert!(begin <= end);
///
/// let mut cursor = begin;
/// let mut tokens = TokenStream::new();
/// while cursor < end {
/// let (token, next) = cursor.token_tree().unwrap();
/// tokens.extend(std::iter::once(token));
/// cursor = next;
/// }
/// tokens
/// }
///
/// fn main() {
/// use quote::quote;
/// use syn::parse::{Parse, Parser};
/// use syn::Token;
///
/// // Parse syn::Type as a TokenStream, surrounded by angle brackets.
/// fn example(input: ParseStream) -> Result<TokenStream> {
/// let _langle: Token![<] = input.parse()?;
/// let ty = recognize_token_stream(syn::Type::parse)(input)?;
/// let _rangle: Token![>] = input.parse()?;
/// Ok(ty)
/// }
///
/// let tokens = quote! { <fn() -> u8> };
/// println!("{}", example.parse2(tokens).unwrap());
/// }
/// ```
pub fn cursor(&self) -> Cursor<'a> {
self.cell.get()
}
@ -1199,8 +1101,10 @@ impl Parse for TokenTree {
impl Parse for Group {
fn parse(input: ParseStream) -> Result<Self> {
input.step(|cursor| {
if let Some((group, rest)) = cursor.any_group_token() {
if group.delimiter() != Delimiter::None {
for delim in &[Delimiter::Parenthesis, Delimiter::Brace, Delimiter::Bracket] {
if let Some((inside, span, rest)) = cursor.group(*delim) {
let mut group = Group::new(*delim, inside.token_stream());
group.set_span(span);
return Ok((group, rest));
}
}
@ -1234,6 +1138,8 @@ impl Parse for Literal {
/// Refer to the [module documentation] for details about parsing in Syn.
///
/// [module documentation]: self
///
/// *This trait is available only if Syn is built with the `"parsing"` feature.*
pub trait Parser: Sized {
type Output;
@ -1247,8 +1153,13 @@ pub trait Parser: Sized {
///
/// This function will check that the input is fully parsed. If there are
/// any unparsed tokens at the end of the stream, an error is returned.
#[cfg(feature = "proc-macro")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))]
///
/// *This method is available only if Syn is built with both the `"parsing"` and
/// `"proc-macro"` features.*
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "proc-macro"
))]
fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output> {
self.parse2(proc_macro2::TokenStream::from(tokens))
}
@ -1273,6 +1184,13 @@ pub trait Parser: Sized {
let _ = scope;
self.parse2(tokens)
}
// Not public API.
#[doc(hidden)]
#[cfg(any(feature = "full", feature = "derive"))]
fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
input.parse().and_then(|tokens| self.parse2(tokens))
}
}
fn tokens_to_parse_buffer(tokens: &TokenBuffer) -> ParseBuffer {
@ -1314,6 +1232,11 @@ where
Ok(node)
}
}
#[cfg(any(feature = "full", feature = "derive"))]
fn __parse_stream(self, input: ParseStream) -> Result<Self::Output> {
self(input)
}
}
#[cfg(any(feature = "full", feature = "derive"))]
@ -1321,6 +1244,11 @@ pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) ->
f.__parse_scoped(scope, tokens)
}
#[cfg(any(feature = "full", feature = "derive"))]
pub(crate) fn parse_stream<F: Parser>(f: F, input: ParseStream) -> Result<F::Output> {
f.__parse_stream(input)
}
/// An empty syntax tree node that consumes no tokens when parsed.
///
/// This is useful for attribute macros that want to ensure they are not
@ -1340,7 +1268,7 @@ pub(crate) fn parse_scoped<F: Parser>(f: F, scope: Span, tokens: TokenStream) ->
/// parse_macro_input!(args as Nothing);
///
/// /* ... */
/// # TokenStream::new()
/// # "".parse().unwrap()
/// }
/// ```
///

View File

@ -40,7 +40,7 @@
/// let input = parse_macro_input!(tokens as MyMacroInput);
///
/// /* ... */
/// # TokenStream::new()
/// # "".parse().unwrap()
/// }
/// ```
///
@ -76,7 +76,7 @@
/// let input = parse_macro_input!(tokens with MyMacroInput::parse_alternate);
///
/// /* ... */
/// # TokenStream::new()
/// # "".parse().unwrap()
/// }
/// ```
///
@ -107,7 +107,7 @@
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
macro_rules! parse_macro_input {
($tokenstream:ident as $ty:ty) => {
match $crate::parse::<$ty>($tokenstream) {
match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
$crate::__private::Ok(data) => data,
$crate::__private::Err(err) => {
return $crate::__private::TokenStream::from(err.to_compile_error());
@ -126,3 +126,54 @@ macro_rules! parse_macro_input {
$crate::parse_macro_input!($tokenstream as _)
};
}
////////////////////////////////////////////////////////////////////////////////
// Can parse any type that implements Parse.
use crate::parse::{Parse, ParseStream, Parser, Result};
use proc_macro::TokenStream;
// Not public API.
#[doc(hidden)]
pub fn parse<T: ParseMacroInput>(token_stream: TokenStream) -> Result<T> {
T::parse.parse(token_stream)
}
// Not public API.
#[doc(hidden)]
pub trait ParseMacroInput: Sized {
fn parse(input: ParseStream) -> Result<Self>;
}
impl<T: Parse> ParseMacroInput for T {
fn parse(input: ParseStream) -> Result<Self> {
<T as Parse>::parse(input)
}
}
////////////////////////////////////////////////////////////////////////////////
// Any other types that we want `parse_macro_input!` to be able to parse.
#[cfg(any(feature = "full", feature = "derive"))]
use crate::AttributeArgs;
#[cfg(any(feature = "full", feature = "derive"))]
impl ParseMacroInput for AttributeArgs {
fn parse(input: ParseStream) -> Result<Self> {
let mut metas = Vec::new();
loop {
if input.is_empty() {
break;
}
let value = input.parse()?;
metas.push(value);
if input.is_empty() {
break;
}
input.parse::<Token![,]>()?;
}
Ok(metas)
}
}

View File

@ -24,8 +24,9 @@
/// }
/// ```
///
/// *This macro is available only if Syn is built with both the `"parsing"` and
/// `"printing"` features.*
/// *This macro is available only if Syn is built with the `"parsing"` feature,
/// although interpolation of syntax tree nodes into the quoted tokens is only
/// supported if Syn is built with the `"printing"` feature as well.*
///
/// # Example
///
@ -68,7 +69,7 @@
#[macro_export]
macro_rules! parse_quote {
($($tt:tt)*) => {
$crate::__private::parse_quote($crate::__private::quote::quote!($($tt)*))
$crate::parse_quote::parse($crate::__private::quote::quote!($($tt)*))
};
}
@ -100,7 +101,7 @@ macro_rules! parse_quote {
#[macro_export]
macro_rules! parse_quote_spanned {
($span:expr=> $($tt:tt)*) => {
$crate::__private::parse_quote($crate::__private::quote::quote_spanned!($span=> $($tt)*))
$crate::parse_quote::parse($crate::__private::quote::quote_spanned!($span=> $($tt)*))
};
}
@ -120,6 +121,7 @@ pub fn parse<T: ParseQuote>(token_stream: TokenStream) -> T {
}
}
// Not public API.
#[doc(hidden)]
pub trait ParseQuote: Sized {
fn parse(input: ParseStream) -> Result<Self>;
@ -136,9 +138,9 @@ impl<T: Parse> ParseQuote for T {
use crate::punctuated::Punctuated;
#[cfg(any(feature = "full", feature = "derive"))]
use crate::{attr, Attribute, Field, FieldMutability, Ident, Type, Visibility};
use crate::{attr, Attribute};
#[cfg(feature = "full")]
use crate::{Block, Pat, Stmt};
use crate::{Block, Stmt};
#[cfg(any(feature = "full", feature = "derive"))]
impl ParseQuote for Attribute {
@ -151,50 +153,6 @@ impl ParseQuote for Attribute {
}
}
#[cfg(any(feature = "full", feature = "derive"))]
impl ParseQuote for Field {
fn parse(input: ParseStream) -> Result<Self> {
let attrs = input.call(Attribute::parse_outer)?;
let vis: Visibility = input.parse()?;
let ident: Option<Ident>;
let colon_token: Option<Token![:]>;
let is_named = input.peek(Ident) && input.peek2(Token![:]) && !input.peek2(Token![::]);
if is_named {
ident = Some(input.parse()?);
colon_token = Some(input.parse()?);
} else {
ident = None;
colon_token = None;
}
let ty: Type = input.parse()?;
Ok(Field {
attrs,
vis,
mutability: FieldMutability::None,
ident,
colon_token,
ty,
})
}
}
#[cfg(feature = "full")]
impl ParseQuote for Pat {
fn parse(input: ParseStream) -> Result<Self> {
Pat::parse_multi_with_leading_vert(input)
}
}
#[cfg(feature = "full")]
impl ParseQuote for Box<Pat> {
fn parse(input: ParseStream) -> Result<Self> {
<Pat as ParseQuote>::parse(input).map(Box::new)
}
}
impl<T: Parse, P: Parse> ParseQuote for Punctuated<T, P> {
fn parse(input: ParseStream) -> Result<Self> {
Self::parse_terminated(input)

File diff suppressed because it is too large Load Diff

View File

@ -3,6 +3,9 @@ use crate::punctuated::Punctuated;
ast_struct! {
/// A path at which a named item is exported (e.g. `std::collections::HashMap`).
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Path {
pub leading_colon: Option<Token![::]>,
@ -24,81 +27,11 @@ where
}
}
impl Path {
/// Determines whether this is a path of length 1 equal to the given
/// ident.
///
/// For them to compare equal, it must be the case that:
///
/// - the path has no leading colon,
/// - the number of path segments is 1,
/// - the first path segment has no angle bracketed or parenthesized
/// path arguments, and
/// - the ident of the first path segment is equal to the given one.
///
/// # Example
///
/// ```
/// use proc_macro2::TokenStream;
/// use syn::{Attribute, Error, Meta, Result};
///
/// fn get_serde_meta_item(attr: &Attribute) -> Result<Option<&TokenStream>> {
/// if attr.path().is_ident("serde") {
/// match &attr.meta {
/// Meta::List(meta) => Ok(Some(&meta.tokens)),
/// bad => Err(Error::new_spanned(bad, "unrecognized attribute")),
/// }
/// } else {
/// Ok(None)
/// }
/// }
/// ```
pub fn is_ident<I>(&self, ident: &I) -> bool
where
I: ?Sized,
Ident: PartialEq<I>,
{
match self.get_ident() {
Some(id) => id == ident,
None => false,
}
}
/// If this path consists of a single ident, returns the ident.
///
/// A path is considered an ident if:
///
/// - the path has no leading colon,
/// - the number of path segments is 1, and
/// - the first path segment has no angle bracketed or parenthesized
/// path arguments.
pub fn get_ident(&self) -> Option<&Ident> {
if self.leading_colon.is_none()
&& self.segments.len() == 1
&& self.segments[0].arguments.is_none()
{
Some(&self.segments[0].ident)
} else {
None
}
}
/// An error if this path is not a single ident, as defined in `get_ident`.
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn require_ident(&self) -> Result<&Ident> {
self.get_ident().ok_or_else(|| {
crate::error::new2(
self.segments.first().unwrap().ident.span(),
self.segments.last().unwrap().ident.span(),
"expected this path to be an identifier",
)
})
}
}
ast_struct! {
/// A segment of a path together with any path arguments on that segment.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct PathSegment {
pub ident: Ident,
@ -121,6 +54,9 @@ where
ast_enum! {
/// Angle bracketed or parenthesized arguments of a path segment.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// ## Angle bracketed
///
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
@ -163,8 +99,10 @@ impl PathArguments {
ast_enum! {
/// An individual generic argument, like `'a`, `T`, or `Item = T`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
#[non_exhaustive]
pub enum GenericArgument {
/// A lifetime argument.
Lifetime(Lifetime),
@ -177,10 +115,7 @@ ast_enum! {
Const(Expr),
/// A binding (equality constraint) on an associated type: the `Item =
/// u8` in `Iterator<Item = u8>`.
AssocType(AssocType),
/// An equality constraint on an associated constant: the `PANIC =
/// false` in `Trait<PANIC = false>`.
AssocConst(AssocConst),
Binding(Binding),
/// An associated type bound: `Iterator<Item: Display>`.
Constraint(Constraint),
}
@ -189,6 +124,9 @@ ast_enum! {
ast_struct! {
/// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
/// V>`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct AngleBracketedGenericArguments {
pub colon2_token: Option<Token![::]>,
@ -199,35 +137,26 @@ ast_struct! {
}
ast_struct! {
/// A binding (equality constraint) on an associated type: the `Item = u8`
/// in `Iterator<Item = u8>`.
/// A binding (equality constraint) on an associated type: `Item = u8`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct AssocType {
pub struct Binding {
pub ident: Ident,
pub generics: Option<AngleBracketedGenericArguments>,
pub eq_token: Token![=],
pub ty: Type,
}
}
ast_struct! {
/// An equality constraint on an associated constant: the `PANIC = false` in
/// `Trait<PANIC = false>`.
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct AssocConst {
pub ident: Ident,
pub generics: Option<AngleBracketedGenericArguments>,
pub eq_token: Token![=],
pub value: Expr,
}
}
ast_struct! {
/// An associated type bound: `Iterator<Item: Display>`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Constraint {
pub ident: Ident,
pub generics: Option<AngleBracketedGenericArguments>,
pub colon_token: Token![:],
pub bounds: Punctuated<TypeParamBound, Token![+]>,
}
@ -236,6 +165,9 @@ ast_struct! {
ast_struct! {
/// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
/// C`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct ParenthesizedGenericArguments {
pub paren_token: token::Paren,
@ -263,6 +195,9 @@ ast_struct! {
/// ^~~~~~ ^
/// ty position = 0
/// ```
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct QSelf {
pub lt_token: Token![<],
@ -274,10 +209,10 @@ ast_struct! {
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::ext::IdentExt as _;
use crate::ext::IdentExt;
use crate::parse::{Parse, ParseStream, Result};
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
@ -294,87 +229,92 @@ pub(crate) mod parsing {
return Ok(GenericArgument::Lifetime(input.parse()?));
}
if input.peek(Ident) && input.peek2(Token![=]) {
let ident: Ident = input.parse()?;
let eq_token: Token![=] = input.parse()?;
let ty = if input.peek(Lit) {
let begin = input.fork();
input.parse::<Lit>()?;
Type::Verbatim(verbatim::between(begin, input))
} else if input.peek(token::Brace) {
let begin = input.fork();
#[cfg(feature = "full")]
{
input.parse::<ExprBlock>()?;
}
#[cfg(not(feature = "full"))]
{
let content;
braced!(content in input);
content.parse::<Expr>()?;
}
Type::Verbatim(verbatim::between(begin, input))
} else {
input.parse()?
};
return Ok(GenericArgument::Binding(Binding {
ident,
eq_token,
ty,
}));
}
#[cfg(feature = "full")]
{
if input.peek(Ident) && input.peek2(Token![:]) && !input.peek2(Token![::]) {
return Ok(GenericArgument::Constraint(input.parse()?));
}
}
if input.peek(Lit) || input.peek(token::Brace) {
return const_argument(input).map(GenericArgument::Const);
}
let mut argument: Type = input.parse()?;
#[cfg(feature = "full")]
let begin = input.fork();
match argument {
Type::Path(mut ty)
if ty.qself.is_none()
&& ty.path.leading_colon.is_none()
&& ty.path.segments.len() == 1
&& match &ty.path.segments[0].arguments {
PathArguments::None | PathArguments::AngleBracketed(_) => true,
PathArguments::Parenthesized(_) => false,
} =>
{
if let Some(eq_token) = input.parse::<Option<Token![=]>>()? {
let segment = ty.path.segments.pop().unwrap().into_value();
let ident = segment.ident;
let generics = match segment.arguments {
PathArguments::None => None,
PathArguments::AngleBracketed(arguments) => Some(arguments),
PathArguments::Parenthesized(_) => unreachable!(),
};
return if input.peek(Lit) || input.peek(token::Brace) {
Ok(GenericArgument::AssocConst(AssocConst {
ident,
generics,
eq_token,
value: const_argument(input)?,
}))
} else {
Ok(GenericArgument::AssocType(AssocType {
ident,
generics,
eq_token,
ty: input.parse()?,
}))
};
let argument: Type = input.parse()?;
#[cfg(feature = "full")]
{
if match &argument {
Type::Path(argument)
if argument.qself.is_none()
&& argument.path.leading_colon.is_none()
&& argument.path.segments.len() == 1 =>
{
match argument.path.segments[0].arguments {
PathArguments::AngleBracketed(_) => true,
_ => false,
}
}
#[cfg(feature = "full")]
if let Some(colon_token) = input.parse::<Option<Token![:]>>()? {
let segment = ty.path.segments.pop().unwrap().into_value();
return Ok(GenericArgument::Constraint(Constraint {
ident: segment.ident,
generics: match segment.arguments {
PathArguments::None => None,
PathArguments::AngleBracketed(arguments) => Some(arguments),
PathArguments::Parenthesized(_) => unreachable!(),
},
colon_token,
bounds: {
let mut bounds = Punctuated::new();
loop {
if input.peek(Token![,]) || input.peek(Token![>]) {
break;
}
let value: TypeParamBound = input.parse()?;
bounds.push_value(value);
if !input.peek(Token![+]) {
break;
}
let punct: Token![+] = input.parse()?;
bounds.push_punct(punct);
}
bounds
},
}));
}
argument = Type::Path(ty);
_ => false,
} && if input.peek(Token![=]) {
input.parse::<Token![=]>()?;
input.parse::<Type>()?;
true
} else if input.peek(Token![:]) {
input.parse::<Token![:]>()?;
input.call(constraint_bounds)?;
true
} else {
false
} {
let verbatim = verbatim::between(begin, input);
return Ok(GenericArgument::Type(Type::Verbatim(verbatim)));
}
_ => {}
}
Ok(GenericArgument::Type(argument))
}
}
pub(crate) fn const_argument(input: ParseStream) -> Result<Expr> {
pub fn const_argument(input: ParseStream) -> Result<Expr> {
let lookahead = input.lookahead1();
if input.peek(Lit) {
@ -382,13 +322,16 @@ pub(crate) mod parsing {
return Ok(Expr::Lit(lit));
}
if input.peek(Ident) {
let ident: Ident = input.parse()?;
return Ok(Expr::Path(ExprPath {
attrs: Vec::new(),
qself: None,
path: Path::from(ident),
}));
#[cfg(feature = "full")]
{
if input.peek(Ident) {
let ident: Ident = input.parse()?;
return Ok(Expr::Path(ExprPath {
attrs: Vec::new(),
qself: None,
path: Path::from(ident),
}));
}
}
if input.peek(token::Brace) {
@ -404,7 +347,7 @@ pub(crate) mod parsing {
let content;
braced!(content in input);
content.parse::<Expr>()?;
let verbatim = verbatim::between(&begin, input);
let verbatim = verbatim::between(begin, input);
return Ok(Expr::Verbatim(verbatim));
}
}
@ -412,24 +355,11 @@ pub(crate) mod parsing {
Err(lookahead.error())
}
impl AngleBracketedGenericArguments {
/// Parse `::<…>` with mandatory leading `::`.
///
/// The ordinary [`Parse`] impl for `AngleBracketedGenericArguments`
/// parses optional leading `::`.
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "full"))))]
pub fn parse_turbofish(input: ParseStream) -> Result<Self> {
let colon2_token: Token![::] = input.parse()?;
Self::do_parse(Some(colon2_token), input)
}
pub(crate) fn do_parse(
colon2_token: Option<Token![::]>,
input: ParseStream,
) -> Result<Self> {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for AngleBracketedGenericArguments {
fn parse(input: ParseStream) -> Result<Self> {
Ok(AngleBracketedGenericArguments {
colon2_token,
colon2_token: input.parse()?,
lt_token: input.parse()?,
args: {
let mut args = Punctuated::new();
@ -437,12 +367,12 @@ pub(crate) mod parsing {
if input.peek(Token![>]) {
break;
}
let value: GenericArgument = input.parse()?;
let value = input.parse()?;
args.push_value(value);
if input.peek(Token![>]) {
break;
}
let punct: Token![,] = input.parse()?;
let punct = input.parse()?;
args.push_punct(punct);
}
args
@ -452,21 +382,13 @@ pub(crate) mod parsing {
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for AngleBracketedGenericArguments {
fn parse(input: ParseStream) -> Result<Self> {
let colon2_token: Option<Token![::]> = input.parse()?;
Self::do_parse(colon2_token, input)
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for ParenthesizedGenericArguments {
fn parse(input: ParseStream) -> Result<Self> {
let content;
Ok(ParenthesizedGenericArguments {
paren_token: parenthesized!(content in input),
inputs: content.parse_terminated(Type::parse, Token![,])?,
inputs: content.parse_terminated(Type::parse)?,
output: input.call(ReturnType::without_plus)?,
})
}
@ -481,11 +403,7 @@ pub(crate) mod parsing {
impl PathSegment {
fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
if input.peek(Token![super])
|| input.peek(Token![self])
|| input.peek(Token![crate])
|| cfg!(feature = "full") && input.peek(Token![try])
{
if input.peek(Token![super]) || input.peek(Token![self]) || input.peek(Token![crate]) {
let ident = input.call(Ident::parse_any)?;
return Ok(PathSegment::from(ident));
}
@ -509,9 +427,53 @@ pub(crate) mod parsing {
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Binding {
fn parse(input: ParseStream) -> Result<Self> {
Ok(Binding {
ident: input.parse()?,
eq_token: input.parse()?,
ty: input.parse()?,
})
}
}
#[cfg(feature = "full")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Constraint {
fn parse(input: ParseStream) -> Result<Self> {
Ok(Constraint {
ident: input.parse()?,
colon_token: input.parse()?,
bounds: constraint_bounds(input)?,
})
}
}
#[cfg(feature = "full")]
fn constraint_bounds(input: ParseStream) -> Result<Punctuated<TypeParamBound, Token![+]>> {
let mut bounds = Punctuated::new();
loop {
if input.peek(Token![,]) || input.peek(Token![>]) {
break;
}
let value = input.parse()?;
bounds.push_value(value);
if !input.peek(Token![+]) {
break;
}
let punct = input.parse()?;
bounds.push_punct(punct);
}
Ok(bounds)
}
impl Path {
/// Parse a `Path` containing no path arguments on any of its segments.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
///
/// # Example
///
/// ```
@ -564,15 +526,80 @@ pub(crate) mod parsing {
segments.push_punct(punct);
}
if segments.is_empty() {
return Err(input.parse::<Ident>().unwrap_err());
return Err(input.error("expected path"));
} else if segments.trailing_punct() {
return Err(input.error("expected path segment after `::`"));
return Err(input.error("expected path segment"));
}
segments
},
})
}
/// Determines whether this is a path of length 1 equal to the given
/// ident.
///
/// For them to compare equal, it must be the case that:
///
/// - the path has no leading colon,
/// - the number of path segments is 1,
/// - the first path segment has no angle bracketed or parenthesized
/// path arguments, and
/// - the ident of the first path segment is equal to the given one.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
///
/// # Example
///
/// ```
/// use syn::{Attribute, Error, Meta, NestedMeta, Result};
/// # use std::iter::FromIterator;
///
/// fn get_serde_meta_items(attr: &Attribute) -> Result<Vec<NestedMeta>> {
/// if attr.path.is_ident("serde") {
/// match attr.parse_meta()? {
/// Meta::List(meta) => Ok(Vec::from_iter(meta.nested)),
/// bad => Err(Error::new_spanned(bad, "unrecognized attribute")),
/// }
/// } else {
/// Ok(Vec::new())
/// }
/// }
/// ```
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn is_ident<I: ?Sized>(&self, ident: &I) -> bool
where
Ident: PartialEq<I>,
{
match self.get_ident() {
Some(id) => id == ident,
None => false,
}
}
/// If this path consists of a single ident, returns the ident.
///
/// A path is considered an ident if:
///
/// - the path has no leading colon,
/// - the number of path segments is 1, and
/// - the first path segment has no angle bracketed or parenthesized
/// path arguments.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn get_ident(&self) -> Option<&Ident> {
if self.leading_colon.is_none()
&& self.segments.len() == 1
&& self.segments[0].arguments.is_none()
{
Some(&self.segments[0].ident)
} else {
None
}
}
pub(crate) fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
let mut path = Path {
leading_colon: input.parse()?,
@ -600,15 +627,9 @@ pub(crate) mod parsing {
}
Ok(())
}
pub(crate) fn is_mod_style(&self) -> bool {
self.segments
.iter()
.all(|segment| segment.arguments.is_none())
}
}
pub(crate) fn qpath(input: ParseStream, expr_style: bool) -> Result<(Option<QSelf>, Path)> {
pub fn qpath(input: ParseStream, expr_style: bool) -> Result<(Option<QSelf>, Path)> {
if input.peek(Token![<]) {
let lt_token: Token![<] = input.parse()?;
let this: Type = input.parse()?;
@ -665,10 +686,6 @@ pub(crate) mod parsing {
pub(crate) mod printing {
use super::*;
use crate::print::TokensOrDefault;
#[cfg(feature = "parsing")]
use crate::spanned::Spanned;
#[cfg(feature = "parsing")]
use proc_macro2::Span;
use proc_macro2::TokenStream;
use quote::ToTokens;
use std::cmp;
@ -711,32 +728,23 @@ pub(crate) mod printing {
match self {
GenericArgument::Lifetime(lt) => lt.to_tokens(tokens),
GenericArgument::Type(ty) => ty.to_tokens(tokens),
GenericArgument::Const(expr) => match expr {
Expr::Lit(expr) => expr.to_tokens(tokens),
Expr::Path(expr)
if expr.attrs.is_empty()
&& expr.qself.is_none()
&& expr.path.get_ident().is_some() =>
{
expr.to_tokens(tokens);
}
GenericArgument::Const(e) => match *e {
Expr::Lit(_) => e.to_tokens(tokens),
// NOTE: We should probably support parsing blocks with only
// expressions in them without the full feature for const
// generics.
#[cfg(feature = "full")]
Expr::Block(expr) => expr.to_tokens(tokens),
#[cfg(not(feature = "full"))]
Expr::Verbatim(expr) => expr.to_tokens(tokens),
Expr::Block(_) => e.to_tokens(tokens),
// ERROR CORRECTION: Add braces to make sure that the
// generated code is valid.
_ => token::Brace::default().surround(tokens, |tokens| {
expr.to_tokens(tokens);
e.to_tokens(tokens);
}),
},
GenericArgument::AssocType(assoc) => assoc.to_tokens(tokens),
GenericArgument::AssocConst(assoc) => assoc.to_tokens(tokens),
GenericArgument::Constraint(constraint) => constraint.to_tokens(tokens),
GenericArgument::Binding(tb) => tb.to_tokens(tokens),
GenericArgument::Constraint(tc) => tc.to_tokens(tokens),
}
}
}
@ -751,24 +759,22 @@ pub(crate) mod printing {
// order in self.args.
let mut trailing_or_empty = true;
for param in self.args.pairs() {
match param.value() {
match **param.value() {
GenericArgument::Lifetime(_) => {
param.to_tokens(tokens);
trailing_or_empty = param.punct().is_some();
}
GenericArgument::Type(_)
| GenericArgument::Const(_)
| GenericArgument::AssocType(_)
| GenericArgument::AssocConst(_)
| GenericArgument::Binding(_)
| GenericArgument::Constraint(_) => {}
}
}
for param in self.args.pairs() {
match param.value() {
match **param.value() {
GenericArgument::Type(_)
| GenericArgument::Const(_)
| GenericArgument::AssocType(_)
| GenericArgument::AssocConst(_)
| GenericArgument::Binding(_)
| GenericArgument::Constraint(_) => {
if !trailing_or_empty {
<Token![,]>::default().to_tokens(tokens);
@ -785,30 +791,18 @@ pub(crate) mod printing {
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for AssocType {
impl ToTokens for Binding {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.ty.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for AssocConst {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
self.eq_token.to_tokens(tokens);
self.value.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for Constraint {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
self.colon_token.to_tokens(tokens);
self.bounds.to_tokens(tokens);
}
@ -857,21 +851,4 @@ pub(crate) mod printing {
segment.to_tokens(tokens);
}
}
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
impl Spanned for QSelf {
fn span(&self) -> Span {
struct QSelfDelimiters<'a>(&'a QSelf);
impl<'a> ToTokens for QSelfDelimiters<'a> {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.0.lt_token.to_tokens(tokens);
self.0.gt_token.to_tokens(tokens);
}
}
QSelfDelimiters(self).span()
}
}
}

View File

@ -1,7 +1,7 @@
use proc_macro2::TokenStream;
use quote::ToTokens;
pub(crate) struct TokensOrDefault<'a, T: 'a>(pub &'a Option<T>);
pub struct TokensOrDefault<'a, T: 'a>(pub &'a Option<T>);
impl<'a, T> ToTokens for TokensOrDefault<'a, T>
where

View File

@ -26,6 +26,7 @@ use std::fmt::{self, Debug};
use std::hash::{Hash, Hasher};
#[cfg(any(feature = "full", feature = "derive"))]
use std::iter;
use std::iter::FromIterator;
use std::ops::{Index, IndexMut};
use std::option;
use std::slice;
@ -37,8 +38,8 @@ use crate::parse::{Parse, ParseStream, Result};
#[cfg(feature = "parsing")]
use crate::token::Token;
/// **A punctuated sequence of syntax tree nodes of type `T` separated by
/// punctuation of type `P`.**
/// A punctuated sequence of syntax tree nodes of type `T` separated by
/// punctuation of type `P`.
///
/// Refer to the [module documentation] for details about punctuated sequences.
///
@ -50,6 +51,7 @@ pub struct Punctuated<T, P> {
impl<T, P> Punctuated<T, P> {
/// Creates an empty punctuated sequence.
#[cfg(not(syn_no_const_vec_new))]
pub const fn new() -> Self {
Punctuated {
inner: Vec::new(),
@ -57,6 +59,15 @@ impl<T, P> Punctuated<T, P> {
}
}
/// Creates an empty punctuated sequence.
#[cfg(syn_no_const_vec_new)]
pub fn new() -> Self {
Punctuated {
inner: Vec::new(),
last: None,
}
}
/// Determines whether this punctuated sequence is empty, meaning it
/// contains no syntax tree nodes or punctuation.
pub fn is_empty(&self) -> bool {
@ -140,7 +151,7 @@ impl<T, P> Punctuated<T, P> {
}
/// Appends a syntax tree node onto the end of this punctuated sequence. The
/// sequence must already have a trailing punctuation, or be empty.
/// sequence must previously have a trailing punctuation.
///
/// Use [`push`] instead if the punctuated sequence may or may not already
/// have trailing punctuation.
@ -149,8 +160,8 @@ impl<T, P> Punctuated<T, P> {
///
/// # Panics
///
/// Panics if the sequence is nonempty and does not already have a trailing
/// punctuation.
/// Panics if the sequence does not already have a trailing punctuation when
/// this method is called.
pub fn push_value(&mut self, value: T) {
assert!(
self.empty_or_trailing(),
@ -187,18 +198,6 @@ impl<T, P> Punctuated<T, P> {
}
}
/// Removes the trailing punctuation from this punctuated sequence, or
/// `None` if there isn't any.
pub fn pop_punct(&mut self) -> Option<P> {
if self.last.is_some() {
None
} else {
let (t, p) = self.inner.pop()?;
self.last = Some(Box::new(t));
Some(p)
}
}
/// Determines whether this punctuated sequence ends with a trailing
/// punctuation.
pub fn trailing_punct(&self) -> bool {
@ -261,6 +260,9 @@ impl<T, P> Punctuated<T, P> {
///
/// Parsing continues until the end of this parse stream. The entire content
/// of this parse stream must consist of `T` and `P`.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_terminated(input: ParseStream) -> Result<Self>
@ -279,6 +281,9 @@ impl<T, P> Punctuated<T, P> {
/// to be parsed.
///
/// [`parse_terminated`]: Punctuated::parse_terminated
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_terminated_with(
@ -313,6 +318,9 @@ impl<T, P> Punctuated<T, P> {
/// the stream. This method returns upon parsing a `T` and observing that it
/// is not followed by a `P`, even if there are remaining tokens in the
/// stream.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
@ -331,6 +339,9 @@ impl<T, P> Punctuated<T, P> {
/// the entire content of this stream.
///
/// [`parse_separated_nonempty`]: Punctuated::parse_separated_nonempty
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_separated_nonempty_with(
@ -369,11 +380,6 @@ where
last: self.last.clone(),
}
}
fn clone_from(&mut self, other: &Self) {
self.inner.clone_from(&other.inner);
self.last.clone_from(&other.last);
}
}
#[cfg(feature = "extra-traits")]
@ -453,37 +459,29 @@ where
impl<T, P> FromIterator<Pair<T, P>> for Punctuated<T, P> {
fn from_iter<I: IntoIterator<Item = Pair<T, P>>>(i: I) -> Self {
let mut ret = Punctuated::new();
do_extend(&mut ret, i.into_iter());
ret.extend(i);
ret
}
}
impl<T, P> Extend<Pair<T, P>> for Punctuated<T, P>
where
P: Default,
{
impl<T, P> Extend<Pair<T, P>> for Punctuated<T, P> {
fn extend<I: IntoIterator<Item = Pair<T, P>>>(&mut self, i: I) {
if !self.empty_or_trailing() {
self.push_punct(P::default());
}
do_extend(self, i.into_iter());
}
}
assert!(
self.empty_or_trailing(),
"Punctuated::extend: Punctuated is not empty or does not have a trailing punctuation",
);
fn do_extend<T, P, I>(punctuated: &mut Punctuated<T, P>, i: I)
where
I: Iterator<Item = Pair<T, P>>,
{
let mut nomore = false;
for pair in i {
if nomore {
panic!("Punctuated extended with items after a Pair::End");
}
match pair {
Pair::Punctuated(a, b) => punctuated.inner.push((a, b)),
Pair::End(a) => {
punctuated.last = Some(Box::new(a));
nomore = true;
let mut nomore = false;
for pair in i {
if nomore {
panic!("Punctuated extended with items after a Pair::End");
}
match pair {
Pair::Punctuated(a, b) => self.inner.push((a, b)),
Pair::End(a) => {
self.last = Some(Box::new(a));
nomore = true;
}
}
}
}
@ -721,11 +719,16 @@ where
///
/// [module documentation]: self
pub struct Iter<'a, T: 'a> {
inner: Box<NoDrop<dyn IterTrait<'a, T> + 'a>>,
// The `Item = &'a T` needs to be specified to support rustc 1.31 and older.
// On modern compilers we would be able to write just IterTrait<'a, T> where
// Item can be inferred unambiguously from the supertrait.
inner: Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>>,
}
trait IterTrait<'a, T: 'a>: Iterator<Item = &'a T> + DoubleEndedIterator + ExactSizeIterator {
fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T> + 'a>>;
trait IterTrait<'a, T: 'a>:
DoubleEndedIterator<Item = &'a T> + ExactSizeIterator<Item = &'a T>
{
fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>>;
}
struct PrivateIter<'a, T: 'a, P: 'a> {
@ -824,7 +827,7 @@ where
+ TrivialDrop
+ 'a,
{
fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T> + 'a>> {
fn clone_box(&self) -> Box<NoDrop<dyn IterTrait<'a, T, Item = &'a T> + 'a>> {
Box::new(NoDrop::new(self.clone()))
}
}
@ -1005,21 +1008,6 @@ impl<T, P> Pair<T, P> {
}
}
#[cfg(feature = "clone-impls")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl<T, P> Pair<&T, &P> {
pub fn cloned(self) -> Pair<T, P>
where
T: Clone,
P: Clone,
{
match self {
Pair::Punctuated(t, p) => Pair::Punctuated(t.clone(), p.clone()),
Pair::End(t) => Pair::End(t.clone()),
}
}
}
#[cfg(feature = "clone-impls")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl<T, P> Clone for Pair<T, P>
@ -1035,15 +1023,6 @@ where
}
}
#[cfg(feature = "clone-impls")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl<T, P> Copy for Pair<T, P>
where
T: Copy,
P: Copy,
{
}
impl<T, P> Index<usize> for Punctuated<T, P> {
type Output = T;

44
src/reserved.rs Normal file
View File

@ -0,0 +1,44 @@
// Type for a syntax tree node that is reserved for future use.
//
// For example ExprReference contains a field `raw` of type Reserved. If `&raw
// place` syntax becomes a thing as per https://github.com/rust-lang/rfcs/pull/2582,
// we can backward compatibly change `raw`'s type to Option<Token![raw]> without
// the possibility of breaking any code.
use proc_macro2::Span;
use std::marker::PhantomData;
#[cfg(feature = "extra-traits")]
use std::fmt::{self, Debug};
ast_struct! {
pub struct Reserved {
_private: PhantomData<Span>,
}
}
impl Default for Reserved {
fn default() -> Self {
Reserved {
_private: PhantomData,
}
}
}
#[cfg(feature = "clone-impls")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for Reserved {
fn clone(&self) -> Self {
Reserved {
_private: self._private,
}
}
}
#[cfg(feature = "extra-traits")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for Reserved {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Reserved").finish()
}
}

View File

@ -1,171 +0,0 @@
use super::*;
ast_enum! {
/// The visibility level of an item: inherited or `pub` or
/// `pub(restricted)`.
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: Expr#syntax-tree-enums
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum Visibility {
/// A public visibility level: `pub`.
Public(Token![pub]),
/// A visibility level restricted to some path: `pub(self)` or
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
Restricted(VisRestricted),
/// An inherited visibility, which usually means private.
Inherited,
}
}
ast_struct! {
/// A visibility level restricted to some path: `pub(self)` or
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct VisRestricted {
pub pub_token: Token![pub],
pub paren_token: token::Paren,
pub in_token: Option<Token![in]>,
pub path: Box<Path>,
}
}
ast_enum! {
/// Unused, but reserved for RFC 3323 restrictions.
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
#[non_exhaustive]
pub enum FieldMutability {
None,
// TODO: https://rust-lang.github.io/rfcs/3323-restrictions.html
//
// FieldMutability::Restricted(MutRestricted)
//
// pub struct MutRestricted {
// pub mut_token: Token![mut],
// pub paren_token: token::Paren,
// pub in_token: Option<Token![in]>,
// pub path: Box<Path>,
// }
}
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
use super::*;
use crate::ext::IdentExt as _;
use crate::parse::discouraged::Speculative as _;
use crate::parse::{Parse, ParseStream, Result};
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Visibility {
fn parse(input: ParseStream) -> Result<Self> {
// Recognize an empty None-delimited group, as produced by a $:vis
// matcher that matched no tokens.
if input.peek(token::Group) {
let ahead = input.fork();
let group = crate::group::parse_group(&ahead)?;
if group.content.is_empty() {
input.advance_to(&ahead);
return Ok(Visibility::Inherited);
}
}
if input.peek(Token![pub]) {
Self::parse_pub(input)
} else {
Ok(Visibility::Inherited)
}
}
}
impl Visibility {
fn parse_pub(input: ParseStream) -> Result<Self> {
let pub_token = input.parse::<Token![pub]>()?;
if input.peek(token::Paren) {
let ahead = input.fork();
let content;
let paren_token = parenthesized!(content in ahead);
if content.peek(Token![crate])
|| content.peek(Token![self])
|| content.peek(Token![super])
{
let path = content.call(Ident::parse_any)?;
// Ensure there are no additional tokens within `content`.
// Without explicitly checking, we may misinterpret a tuple
// field as a restricted visibility, causing a parse error.
// e.g. `pub (crate::A, crate::B)` (Issue #720).
if content.is_empty() {
input.advance_to(&ahead);
return Ok(Visibility::Restricted(VisRestricted {
pub_token,
paren_token,
in_token: None,
path: Box::new(Path::from(path)),
}));
}
} else if content.peek(Token![in]) {
let in_token: Token![in] = content.parse()?;
let path = content.call(Path::parse_mod_style)?;
input.advance_to(&ahead);
return Ok(Visibility::Restricted(VisRestricted {
pub_token,
paren_token,
in_token: Some(in_token),
path: Box::new(path),
}));
}
}
Ok(Visibility::Public(pub_token))
}
#[cfg(feature = "full")]
pub(crate) fn is_some(&self) -> bool {
match self {
Visibility::Inherited => false,
_ => true,
}
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use proc_macro2::TokenStream;
use quote::ToTokens;
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for Visibility {
fn to_tokens(&self, tokens: &mut TokenStream) {
match self {
Visibility::Public(pub_token) => pub_token.to_tokens(tokens),
Visibility::Restricted(vis_restricted) => vis_restricted.to_tokens(tokens),
Visibility::Inherited => {}
}
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for VisRestricted {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.pub_token.to_tokens(tokens);
self.paren_token.surround(tokens, |tokens| {
// TODO: If we have a path which is not "self" or "super" or
// "crate", automatically add the "in" token.
self.in_token.to_tokens(tokens);
self.path.to_tokens(tokens);
});
}
}
}

View File

@ -1,4 +1,4 @@
#[cfg(feature = "parsing")]
pub(crate) mod lookahead {
pub mod lookahead {
pub trait Sealed: Copy {}
}

View File

@ -1,17 +1,9 @@
use proc_macro2::extra::DelimSpan;
use proc_macro2::{Delimiter, Group, Span, TokenStream};
use proc_macro2::Span;
#[doc(hidden)]
pub trait IntoSpans<S> {
fn into_spans(self) -> S;
}
impl IntoSpans<Span> for Span {
fn into_spans(self) -> Span {
self
}
}
impl IntoSpans<[Span; 1]> for Span {
fn into_spans(self) -> [Span; 1] {
[self]
@ -48,16 +40,28 @@ impl IntoSpans<[Span; 3]> for [Span; 3] {
}
}
impl IntoSpans<DelimSpan> for Span {
fn into_spans(self) -> DelimSpan {
let mut group = Group::new(Delimiter::None, TokenStream::new());
group.set_span(self);
group.delim_span()
#[cfg(feature = "parsing")]
pub trait FromSpans: Sized {
fn from_spans(spans: &[Span]) -> Self;
}
#[cfg(feature = "parsing")]
impl FromSpans for [Span; 1] {
fn from_spans(spans: &[Span]) -> Self {
[spans[0]]
}
}
impl IntoSpans<DelimSpan> for DelimSpan {
fn into_spans(self) -> DelimSpan {
self
#[cfg(feature = "parsing")]
impl FromSpans for [Span; 2] {
fn from_spans(spans: &[Span]) -> Self {
[spans[0], spans[1]]
}
}
#[cfg(feature = "parsing")]
impl FromSpans for [Span; 3] {
fn from_spans(spans: &[Span]) -> Self {
[spans[0], spans[1], spans[2]]
}
}

View File

@ -1,6 +1,9 @@
//! A trait that can provide the `Span` of the complete contents of a syntax
//! tree node.
//!
//! *This module is available only if Syn is built with both the `"parsing"` and
//! `"printing"` features.*
//!
//! <br>
//!
//! # Example
@ -93,7 +96,10 @@ use quote::spanned::Spanned as ToTokens;
/// See the [module documentation] for an example.
///
/// [module documentation]: self
pub trait Spanned: private::Sealed {
///
/// *This trait is available only if Syn is built with both the `"parsing"` and
/// `"printing"` features.*
pub trait Spanned {
/// Returns a `Span` covering the complete contents of this syntax tree
/// node, or [`Span::call_site()`] if this node is empty.
///
@ -106,13 +112,3 @@ impl<T: ?Sized + ToTokens> Spanned for T {
self.__span()
}
}
mod private {
use super::*;
pub trait Sealed {}
impl<T: ?Sized + ToTokens> Sealed for T {}
#[cfg(any(feature = "full", feature = "derive"))]
impl Sealed for crate::QSelf {}
}

View File

@ -2,6 +2,8 @@ use super::*;
ast_struct! {
/// A braced block containing Rust statements.
///
/// *This type is available only if Syn is built with the `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub struct Block {
pub brace_token: token::Brace,
@ -12,6 +14,8 @@ ast_struct! {
ast_enum! {
/// A statement, usually ending in a semicolon.
///
/// *This type is available only if Syn is built with the `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub enum Stmt {
/// A local (let) binding.
@ -20,71 +24,42 @@ ast_enum! {
/// An item definition.
Item(Item),
/// Expression, with or without trailing semicolon.
Expr(Expr, Option<Token![;]>),
/// Expr without trailing semicolon.
Expr(Expr),
/// A macro invocation in statement position.
///
/// Syntactically it's ambiguous which other kind of statement this
/// macro would expand to. It can be any of local variable (`let`),
/// item, or expression.
Macro(StmtMacro),
/// Expression with trailing semicolon.
Semi(Expr, Token![;]),
}
}
ast_struct! {
/// A local `let` binding: `let x: u64 = s.parse()?`.
///
/// *This type is available only if Syn is built with the `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub struct Local {
pub attrs: Vec<Attribute>,
pub let_token: Token![let],
pub pat: Pat,
pub init: Option<LocalInit>,
pub init: Option<(Token![=], Box<Expr>)>,
pub semi_token: Token![;],
}
}
ast_struct! {
/// The expression assigned in a local `let` binding, including optional
/// diverging `else` block.
///
/// `LocalInit` represents `= s.parse()?` in `let x: u64 = s.parse()?` and
/// `= r else { return }` in `let Ok(x) = r else { return }`.
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub struct LocalInit {
pub eq_token: Token![=],
pub expr: Box<Expr>,
pub diverge: Option<(Token![else], Box<Expr>)>,
}
}
ast_struct! {
/// A macro invocation in statement position.
///
/// Syntactically it's ambiguous which other kind of statement this macro
/// would expand to. It can be any of local variable (`let`), item, or
/// expression.
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
pub struct StmtMacro {
pub attrs: Vec<Attribute>,
pub mac: Macro,
pub semi_token: Option<Token![;]>,
}
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::parse::discouraged::Speculative as _;
use crate::parse::{Parse, ParseStream, Result};
use crate::parse::discouraged::Speculative;
use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
use proc_macro2::TokenStream;
struct AllowNoSemi(bool);
impl Block {
/// Parse the body of a block as zero or more statements, possibly
/// including one trailing expression.
///
/// *This function is available only if Syn is built with the `"parsing"`
/// feature.*
///
/// # Example
///
/// ```
@ -135,25 +110,23 @@ pub(crate) mod parsing {
pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
let mut stmts = Vec::new();
loop {
while let semi @ Some(_) = input.parse()? {
stmts.push(Stmt::Expr(Expr::Verbatim(TokenStream::new()), semi));
while let Some(semi) = input.parse::<Option<Token![;]>>()? {
stmts.push(Stmt::Semi(Expr::Verbatim(TokenStream::new()), semi));
}
if input.is_empty() {
break;
}
let stmt = parse_stmt(input, AllowNoSemi(true))?;
let requires_semicolon = match &stmt {
Stmt::Expr(stmt, None) => expr::requires_terminator(stmt),
Stmt::Macro(stmt) => {
stmt.semi_token.is_none() && !stmt.mac.delimiter.is_brace()
}
Stmt::Local(_) | Stmt::Item(_) | Stmt::Expr(_, Some(_)) => false,
let s = parse_stmt(input, true)?;
let requires_semicolon = if let Stmt::Expr(s) = &s {
expr::requires_terminator(s)
} else {
false
};
stmts.push(stmt);
stmts.push(s);
if input.is_empty() {
break;
} else if requires_semicolon {
return Err(input.error("unexpected token, expected `;`"));
return Err(input.error("unexpected token"));
}
}
Ok(stmts)
@ -174,34 +147,30 @@ pub(crate) mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Stmt {
fn parse(input: ParseStream) -> Result<Self> {
let allow_nosemi = AllowNoSemi(false);
parse_stmt(input, allow_nosemi)
parse_stmt(input, false)
}
}
fn parse_stmt(input: ParseStream, allow_nosemi: AllowNoSemi) -> Result<Stmt> {
fn parse_stmt(input: ParseStream, allow_nosemi: bool) -> Result<Stmt> {
let begin = input.fork();
let attrs = input.call(Attribute::parse_outer)?;
let mut attrs = input.call(Attribute::parse_outer)?;
// brace-style macros; paren and bracket macros get parsed as
// expression statements.
let ahead = input.fork();
let mut is_item_macro = false;
if let Ok(path) = ahead.call(Path::parse_mod_style) {
if ahead.peek(Token![!]) {
if ahead.peek2(Ident) || ahead.peek2(Token![try]) {
is_item_macro = true;
} else if ahead.peek2(token::Brace)
if ahead.peek(Token![!])
&& (ahead.peek2(token::Brace)
&& !(ahead.peek3(Token![.]) || ahead.peek3(Token![?]))
{
input.advance_to(&ahead);
return stmt_mac(input, attrs, path).map(Stmt::Macro);
}
|| ahead.peek2(Ident))
{
input.advance_to(&ahead);
return stmt_mac(input, attrs, path);
}
}
if input.peek(Token![let]) && !input.peek(token::Group) {
stmt_local(input, attrs).map(Stmt::Local)
if input.peek(Token![let]) {
stmt_local(input, attrs, begin)
} else if input.peek(Token![pub])
|| input.peek(Token![crate]) && !input.peek2(Token![::])
|| input.peek(Token![extern])
@ -211,15 +180,7 @@ pub(crate) mod parsing {
|| input.peek2(Ident)
&& !(input.peek2(Token![async])
&& (input.peek3(Token![move]) || input.peek3(Token![|]))))
|| input.peek(Token![const])
&& !(input.peek2(token::Brace)
|| input.peek2(Token![static])
|| input.peek2(Token![async])
&& !(input.peek3(Token![unsafe])
|| input.peek3(Token![extern])
|| input.peek3(Token![fn]))
|| input.peek2(Token![move])
|| input.peek2(Token![|]))
|| input.peek(Token![const]) && !input.peek2(token::Brace)
|| input.peek(Token![unsafe]) && !input.peek2(token::Brace)
|| input.peek(Token![async])
&& (input.peek2(Token![unsafe])
@ -237,22 +198,25 @@ pub(crate) mod parsing {
&& (input.peek2(Token![unsafe]) || input.peek2(Token![impl]))
|| input.peek(Token![impl])
|| input.peek(Token![macro])
|| is_item_macro
{
let item = item::parsing::parse_rest_of_item(begin, attrs, input)?;
let mut item: Item = input.parse()?;
attrs.extend(item.replace_attrs(Vec::new()));
item.replace_attrs(attrs);
Ok(Stmt::Item(item))
} else {
stmt_expr(input, allow_nosemi, attrs)
}
}
fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<StmtMacro> {
fn stmt_mac(input: ParseStream, attrs: Vec<Attribute>, path: Path) -> Result<Stmt> {
let bang_token: Token![!] = input.parse()?;
let ident: Option<Ident> = input.parse()?;
let (delimiter, tokens) = mac::parse_delimiter(input)?;
let semi_token: Option<Token![;]> = input.parse()?;
Ok(StmtMacro {
Ok(Stmt::Item(Item::Macro(ItemMacro {
attrs,
ident,
mac: Macro {
path,
bang_token,
@ -260,13 +224,13 @@ pub(crate) mod parsing {
tokens,
},
semi_token,
})
})))
}
fn stmt_local(input: ParseStream, attrs: Vec<Attribute>) -> Result<Local> {
fn stmt_local(input: ParseStream, attrs: Vec<Attribute>, begin: ParseBuffer) -> Result<Stmt> {
let let_token: Token![let] = input.parse()?;
let mut pat = Pat::parse_single(input)?;
let mut pat: Pat = pat::parsing::multi_pat_with_leading_vert(input)?;
if input.peek(Token![:]) {
let colon_token: Token![:] = input.parse()?;
let ty: Type = input.parse()?;
@ -278,45 +242,39 @@ pub(crate) mod parsing {
});
}
let init = if let Some(eq_token) = input.parse()? {
let eq_token: Token![=] = eq_token;
let expr: Expr = input.parse()?;
let init = if input.peek(Token![=]) {
let eq_token: Token![=] = input.parse()?;
let init: Expr = input.parse()?;
let diverge = if let Some(else_token) = input.parse()? {
let else_token: Token![else] = else_token;
let diverge = ExprBlock {
attrs: Vec::new(),
label: None,
block: input.parse()?,
};
Some((else_token, Box::new(Expr::Block(diverge))))
} else {
None
};
if input.peek(Token![else]) {
input.parse::<Token![else]>()?;
let content;
braced!(content in input);
content.call(Block::parse_within)?;
let verbatim = Expr::Verbatim(verbatim::between(begin, input));
let semi_token: Token![;] = input.parse()?;
return Ok(Stmt::Semi(verbatim, semi_token));
}
Some(LocalInit {
eq_token,
expr: Box::new(expr),
diverge,
})
Some((eq_token, Box::new(init)))
} else {
None
};
let semi_token: Token![;] = input.parse()?;
Ok(Local {
Ok(Stmt::Local(Local {
attrs,
let_token,
pat,
init,
semi_token,
})
}))
}
fn stmt_expr(
input: ParseStream,
allow_nosemi: AllowNoSemi,
allow_nosemi: bool,
mut attrs: Vec<Attribute>,
) -> Result<Stmt> {
let mut e = expr::parsing::expr_early(input)?;
@ -325,68 +283,20 @@ pub(crate) mod parsing {
loop {
attr_target = match attr_target {
Expr::Assign(e) => &mut e.left,
Expr::AssignOp(e) => &mut e.left,
Expr::Binary(e) => &mut e.left,
Expr::Cast(e) => &mut e.expr,
Expr::Array(_)
| Expr::Async(_)
| Expr::Await(_)
| Expr::Block(_)
| Expr::Break(_)
| Expr::Call(_)
| Expr::Closure(_)
| Expr::Const(_)
| Expr::Continue(_)
| Expr::Field(_)
| Expr::ForLoop(_)
| Expr::Group(_)
| Expr::If(_)
| Expr::Index(_)
| Expr::Infer(_)
| Expr::Let(_)
| Expr::Lit(_)
| Expr::Loop(_)
| Expr::Macro(_)
| Expr::Match(_)
| Expr::MethodCall(_)
| Expr::Paren(_)
| Expr::Path(_)
| Expr::Range(_)
| Expr::Reference(_)
| Expr::Repeat(_)
| Expr::Return(_)
| Expr::Struct(_)
| Expr::Try(_)
| Expr::TryBlock(_)
| Expr::Tuple(_)
| Expr::Unary(_)
| Expr::Unsafe(_)
| Expr::While(_)
| Expr::Yield(_)
| Expr::Verbatim(_) => break,
_ => break,
};
}
attrs.extend(attr_target.replace_attrs(Vec::new()));
attr_target.replace_attrs(attrs);
let semi_token: Option<Token![;]> = input.parse()?;
match e {
Expr::Macro(ExprMacro { attrs, mac })
if semi_token.is_some() || mac.delimiter.is_brace() =>
{
return Ok(Stmt::Macro(StmtMacro {
attrs,
mac,
semi_token,
}));
}
_ => {}
if input.peek(Token![;]) {
return Ok(Stmt::Semi(e, input.parse()?));
}
if semi_token.is_some() {
Ok(Stmt::Expr(e, semi_token))
} else if allow_nosemi.0 || !expr::requires_terminator(&e) {
Ok(Stmt::Expr(e, None))
if allow_nosemi || !expr::requires_terminator(&e) {
Ok(Stmt::Expr(e))
} else {
Err(input.error("expected semicolon"))
}
@ -414,11 +324,11 @@ mod printing {
match self {
Stmt::Local(local) => local.to_tokens(tokens),
Stmt::Item(item) => item.to_tokens(tokens),
Stmt::Expr(expr, semi) => {
Stmt::Expr(expr) => expr.to_tokens(tokens),
Stmt::Semi(expr, semi) => {
expr.to_tokens(tokens);
semi.to_tokens(tokens);
}
Stmt::Macro(mac) => mac.to_tokens(tokens),
}
}
}
@ -429,24 +339,11 @@ mod printing {
expr::printing::outer_attrs_to_tokens(&self.attrs, tokens);
self.let_token.to_tokens(tokens);
self.pat.to_tokens(tokens);
if let Some(init) = &self.init {
init.eq_token.to_tokens(tokens);
init.expr.to_tokens(tokens);
if let Some((else_token, diverge)) = &init.diverge {
else_token.to_tokens(tokens);
diverge.to_tokens(tokens);
}
if let Some((eq_token, init)) = &self.init {
eq_token.to_tokens(tokens);
init.to_tokens(tokens);
}
self.semi_token.to_tokens(tokens);
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for StmtMacro {
fn to_tokens(&self, tokens: &mut TokenStream) {
expr::printing::outer_attrs_to_tokens(&self.attrs, tokens);
self.mac.to_tokens(tokens);
self.semi_token.to_tokens(tokens);
}
}
}

View File

@ -4,7 +4,7 @@ use std::thread::{self, ThreadId};
/// ThreadBound is a Sync-maker and Send-maker that allows accessing a value
/// of type T only from the original thread on which the ThreadBound was
/// constructed.
pub(crate) struct ThreadBound<T> {
pub struct ThreadBound<T> {
value: T,
thread_id: ThreadId,
}
@ -12,20 +12,17 @@ pub(crate) struct ThreadBound<T> {
unsafe impl<T> Sync for ThreadBound<T> {}
// Send bound requires Copy, as otherwise Drop could run in the wrong place.
//
// Today Copy and Drop are mutually exclusive so `T: Copy` implies `T: !Drop`.
// This impl needs to be revisited if that restriction is relaxed in the future.
unsafe impl<T: Copy> Send for ThreadBound<T> {}
impl<T> ThreadBound<T> {
pub(crate) fn new(value: T) -> Self {
pub fn new(value: T) -> Self {
ThreadBound {
value,
thread_id: thread::current().id(),
}
}
pub(crate) fn get(&self) -> Option<&T> {
pub fn get(&self) -> Option<&T> {
if thread::current().id() == self.thread_id {
Some(&self.value)
} else {
@ -42,19 +39,3 @@ impl<T: Debug> Debug for ThreadBound<T> {
}
}
}
// Copy the bytes of T, even if the currently running thread is the "wrong"
// thread. This is fine as long as the original thread is not simultaneously
// mutating this value via interior mutability, which would be a data race.
//
// Currently `T: Copy` is sufficient to guarantee that T contains no interior
// mutability, because _all_ interior mutability in Rust is built on
// std::cell::UnsafeCell, which has no Copy impl. This impl needs to be
// revisited if that restriction is relaxed in the future.
impl<T: Copy> Copy for ThreadBound<T> {}
impl<T: Copy> Clone for ThreadBound<T> {
fn clone(&self) -> Self {
*self
}
}

View File

@ -88,8 +88,6 @@
//! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
//! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
#[cfg(feature = "parsing")]
pub(crate) use self::private::CustomToken;
use self::private::WithSpan;
#[cfg(feature = "parsing")]
use crate::buffer::Cursor;
@ -104,14 +102,13 @@ use crate::lookahead;
#[cfg(feature = "parsing")]
use crate::parse::{Parse, ParseStream};
use crate::span::IntoSpans;
use proc_macro2::extra::DelimSpan;
#[cfg(any(feature = "parsing", feature = "printing"))]
use proc_macro2::Ident;
use proc_macro2::Span;
#[cfg(feature = "printing")]
use proc_macro2::TokenStream;
#[cfg(any(feature = "parsing", feature = "printing"))]
use proc_macro2::{Delimiter, Ident};
#[cfg(feature = "parsing")]
use proc_macro2::{Literal, Punct, TokenTree};
use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
#[cfg(feature = "printing")]
use quote::{ToTokens, TokenStreamExt};
#[cfg(feature = "extra-traits")]
@ -136,9 +133,7 @@ pub trait Token: private::Sealed {
fn display() -> &'static str;
}
pub(crate) mod private {
#[cfg(feature = "parsing")]
use crate::buffer::Cursor;
mod private {
use proc_macro2::Span;
#[cfg(feature = "parsing")]
@ -146,19 +141,10 @@ pub(crate) mod private {
/// Support writing `token.span` rather than `token.spans[0]` on tokens that
/// hold a single span.
#[repr(transparent)]
#[allow(unknown_lints, repr_transparent_external_private_fields)] // False positive: https://github.com/rust-lang/rust/issues/78586#issuecomment-1722680482
#[repr(C)]
pub struct WithSpan {
pub span: Span,
}
// Not public API.
#[doc(hidden)]
#[cfg(feature = "parsing")]
pub trait CustomToken {
fn peek(cursor: Cursor) -> bool;
fn display() -> &'static str;
}
}
#[cfg(feature = "parsing")]
@ -177,7 +163,7 @@ fn peek_impl(cursor: Cursor, peek: fn(ParseStream) -> bool) -> bool {
}
macro_rules! impl_token {
($display:literal $name:ty) => {
($display:tt $name:ty) => {
#[cfg(feature = "parsing")]
impl Token for $name {
fn peek(cursor: Cursor) -> bool {
@ -209,7 +195,7 @@ impl_token!("boolean literal" LitBool);
impl_token!("group token" proc_macro2::Group);
macro_rules! impl_low_level_token {
($display:literal $ty:ident $get:ident) => {
($display:tt $ty:ident $get:ident) => {
#[cfg(feature = "parsing")]
impl Token for $ty {
fn peek(cursor: Cursor) -> bool {
@ -230,6 +216,14 @@ impl_low_level_token!("punctuation token" Punct punct);
impl_low_level_token!("literal" Literal literal);
impl_low_level_token!("token" TokenTree token_tree);
// Not public API.
#[doc(hidden)]
#[cfg(feature = "parsing")]
pub trait CustomToken {
fn peek(cursor: Cursor) -> bool;
fn display() -> &'static str;
}
#[cfg(feature = "parsing")]
impl<T: CustomToken> private::Sealed for T {}
@ -245,9 +239,9 @@ impl<T: CustomToken> Token for T {
}
macro_rules! define_keywords {
($($token:literal pub struct $name:ident)*) => {
($($token:tt pub struct $name:ident #[$doc:meta])*) => {
$(
#[doc = concat!('`', $token, '`')]
#[$doc]
///
/// Don't try to remember the name of this type &mdash; use the
/// [`Token!`] macro instead.
@ -259,9 +253,9 @@ macro_rules! define_keywords {
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn $name<S: IntoSpans<Span>>(span: S) -> $name {
pub fn $name<S: IntoSpans<[Span; 1]>>(span: S) -> $name {
$name {
span: span.into_spans(),
span: span.into_spans()[0],
}
}
@ -352,29 +346,25 @@ macro_rules! impl_deref_if_len_is_1 {
type Target = WithSpan;
fn deref(&self) -> &Self::Target {
unsafe { &*(self as *const Self).cast::<WithSpan>() }
unsafe { &*(self as *const Self as *const WithSpan) }
}
}
impl DerefMut for $name {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *(self as *mut Self).cast::<WithSpan>() }
unsafe { &mut *(self as *mut Self as *mut WithSpan) }
}
}
};
($name:ident/$len:literal) => {};
($name:ident/$len:tt) => {};
}
macro_rules! define_punctuation_structs {
($($token:literal pub struct $name:ident/$len:tt #[doc = $usage:literal])*) => {
($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
$(
#[cfg_attr(not(doc), repr(transparent))]
#[allow(unknown_lints, repr_transparent_external_private_fields)] // False positive: https://github.com/rust-lang/rust/issues/78586#issuecomment-1722680482
#[doc = concat!('`', $token, '`')]
///
/// Usage:
#[doc = concat!($usage, '.')]
#[repr(C)]
#[$doc]
///
/// Don't try to remember the name of this type &mdash; use the
/// [`Token!`] macro instead.
@ -444,10 +434,10 @@ macro_rules! define_punctuation_structs {
}
macro_rules! define_punctuation {
($($token:literal pub struct $name:ident/$len:tt #[doc = $usage:literal])*) => {
($($token:tt pub struct $name:ident/$len:tt #[$doc:meta])*) => {
$(
define_punctuation_structs! {
$token pub struct $name/$len #[doc = $usage]
$token pub struct $name/$len #[$doc]
}
#[cfg(feature = "printing")]
@ -486,24 +476,26 @@ macro_rules! define_punctuation {
}
macro_rules! define_delimiters {
($($delim:ident pub struct $name:ident #[$doc:meta])*) => {
($($token:tt pub struct $name:ident #[$doc:meta])*) => {
$(
#[$doc]
pub struct $name {
pub span: DelimSpan,
pub span: Span,
}
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn $name<S: IntoSpans<DelimSpan>>(span: S) -> $name {
pub fn $name<S: IntoSpans<[Span; 1]>>(span: S) -> $name {
$name {
span: span.into_spans(),
span: span.into_spans()[0],
}
}
impl std::default::Default for $name {
fn default() -> Self {
$name(Span::call_site())
$name {
span: Span::call_site(),
}
}
}
@ -551,9 +543,7 @@ macro_rules! define_delimiters {
where
F: FnOnce(&mut TokenStream),
{
let mut inner = TokenStream::new();
f(&mut inner);
printing::delim(Delimiter::$delim, self.span.join(), tokens, inner);
printing::delim($token, self.span, tokens, f);
}
}
@ -564,7 +554,7 @@ macro_rules! define_delimiters {
}
define_punctuation_structs! {
"_" pub struct Underscore/1 /// wildcard patterns, inferred types, unnamed items in constants, extern crates, use declarations, and destructuring assignment
"_" pub struct Underscore/1 /// `_`
}
#[cfg(feature = "printing")]
@ -615,80 +605,6 @@ impl Token for Underscore {
#[cfg(feature = "parsing")]
impl private::Sealed for Underscore {}
/// None-delimited group
pub struct Group {
pub span: Span,
}
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn Group<S: IntoSpans<Span>>(span: S) -> Group {
Group {
span: span.into_spans(),
}
}
impl std::default::Default for Group {
fn default() -> Self {
Group {
span: Span::call_site(),
}
}
}
#[cfg(feature = "clone-impls")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Copy for Group {}
#[cfg(feature = "clone-impls")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
impl Clone for Group {
fn clone(&self) -> Self {
*self
}
}
#[cfg(feature = "extra-traits")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Debug for Group {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Group")
}
}
#[cfg(feature = "extra-traits")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl cmp::Eq for Group {}
#[cfg(feature = "extra-traits")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl PartialEq for Group {
fn eq(&self, _other: &Group) -> bool {
true
}
}
#[cfg(feature = "extra-traits")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
impl Hash for Group {
fn hash<H: Hasher>(&self, _state: &mut H) {}
}
impl Group {
#[cfg(feature = "printing")]
pub fn surround<F>(&self, tokens: &mut TokenStream, f: F)
where
F: FnOnce(&mut TokenStream),
{
let mut inner = TokenStream::new();
f(&mut inner);
printing::delim(Delimiter::None, self.span, tokens, inner);
}
}
#[cfg(feature = "parsing")]
impl private::Sealed for Group {}
#[cfg(feature = "parsing")]
impl Token for Paren {
fn peek(cursor: Cursor) -> bool {
@ -734,295 +650,250 @@ impl Token for Group {
}
define_keywords! {
"abstract" pub struct Abstract
"as" pub struct As
"async" pub struct Async
"auto" pub struct Auto
"await" pub struct Await
"become" pub struct Become
"box" pub struct Box
"break" pub struct Break
"const" pub struct Const
"continue" pub struct Continue
"crate" pub struct Crate
"default" pub struct Default
"do" pub struct Do
"dyn" pub struct Dyn
"else" pub struct Else
"enum" pub struct Enum
"extern" pub struct Extern
"final" pub struct Final
"fn" pub struct Fn
"for" pub struct For
"if" pub struct If
"impl" pub struct Impl
"in" pub struct In
"let" pub struct Let
"loop" pub struct Loop
"macro" pub struct Macro
"match" pub struct Match
"mod" pub struct Mod
"move" pub struct Move
"mut" pub struct Mut
"override" pub struct Override
"priv" pub struct Priv
"pub" pub struct Pub
"ref" pub struct Ref
"return" pub struct Return
"Self" pub struct SelfType
"self" pub struct SelfValue
"static" pub struct Static
"struct" pub struct Struct
"super" pub struct Super
"trait" pub struct Trait
"try" pub struct Try
"type" pub struct Type
"typeof" pub struct Typeof
"union" pub struct Union
"unsafe" pub struct Unsafe
"unsized" pub struct Unsized
"use" pub struct Use
"virtual" pub struct Virtual
"where" pub struct Where
"while" pub struct While
"yield" pub struct Yield
"abstract" pub struct Abstract /// `abstract`
"as" pub struct As /// `as`
"async" pub struct Async /// `async`
"auto" pub struct Auto /// `auto`
"await" pub struct Await /// `await`
"become" pub struct Become /// `become`
"box" pub struct Box /// `box`
"break" pub struct Break /// `break`
"const" pub struct Const /// `const`
"continue" pub struct Continue /// `continue`
"crate" pub struct Crate /// `crate`
"default" pub struct Default /// `default`
"do" pub struct Do /// `do`
"dyn" pub struct Dyn /// `dyn`
"else" pub struct Else /// `else`
"enum" pub struct Enum /// `enum`
"extern" pub struct Extern /// `extern`
"final" pub struct Final /// `final`
"fn" pub struct Fn /// `fn`
"for" pub struct For /// `for`
"if" pub struct If /// `if`
"impl" pub struct Impl /// `impl`
"in" pub struct In /// `in`
"let" pub struct Let /// `let`
"loop" pub struct Loop /// `loop`
"macro" pub struct Macro /// `macro`
"match" pub struct Match /// `match`
"mod" pub struct Mod /// `mod`
"move" pub struct Move /// `move`
"mut" pub struct Mut /// `mut`
"override" pub struct Override /// `override`
"priv" pub struct Priv /// `priv`
"pub" pub struct Pub /// `pub`
"ref" pub struct Ref /// `ref`
"return" pub struct Return /// `return`
"Self" pub struct SelfType /// `Self`
"self" pub struct SelfValue /// `self`
"static" pub struct Static /// `static`
"struct" pub struct Struct /// `struct`
"super" pub struct Super /// `super`
"trait" pub struct Trait /// `trait`
"try" pub struct Try /// `try`
"type" pub struct Type /// `type`
"typeof" pub struct Typeof /// `typeof`
"union" pub struct Union /// `union`
"unsafe" pub struct Unsafe /// `unsafe`
"unsized" pub struct Unsized /// `unsized`
"use" pub struct Use /// `use`
"virtual" pub struct Virtual /// `virtual`
"where" pub struct Where /// `where`
"while" pub struct While /// `while`
"yield" pub struct Yield /// `yield`
}
define_punctuation! {
"&" pub struct And/1 /// bitwise and logical AND, borrow, references, reference patterns
"&&" pub struct AndAnd/2 /// lazy AND, borrow, references, reference patterns
"&=" pub struct AndEq/2 /// bitwise AND assignment
"@" pub struct At/1 /// subpattern binding
"^" pub struct Caret/1 /// bitwise and logical XOR
"^=" pub struct CaretEq/2 /// bitwise XOR assignment
":" pub struct Colon/1 /// various separators
"," pub struct Comma/1 /// various separators
"$" pub struct Dollar/1 /// macros
"." pub struct Dot/1 /// field access, tuple index
".." pub struct DotDot/2 /// range, struct expressions, patterns, range patterns
"..." pub struct DotDotDot/3 /// variadic functions, range patterns
"..=" pub struct DotDotEq/3 /// inclusive range, range patterns
"=" pub struct Eq/1 /// assignment, attributes, various type definitions
"==" pub struct EqEq/2 /// equal
"=>" pub struct FatArrow/2 /// match arms, macros
">=" pub struct Ge/2 /// greater than or equal to, generics
">" pub struct Gt/1 /// greater than, generics, paths
"<-" pub struct LArrow/2 /// unused
"<=" pub struct Le/2 /// less than or equal to
"<" pub struct Lt/1 /// less than, generics, paths
"-" pub struct Minus/1 /// subtraction, negation
"-=" pub struct MinusEq/2 /// subtraction assignment
"!=" pub struct Ne/2 /// not equal
"!" pub struct Not/1 /// bitwise and logical NOT, macro calls, inner attributes, never type, negative impls
"|" pub struct Or/1 /// bitwise and logical OR, closures, patterns in match, if let, and while let
"|=" pub struct OrEq/2 /// bitwise OR assignment
"||" pub struct OrOr/2 /// lazy OR, closures
"::" pub struct PathSep/2 /// path separator
"%" pub struct Percent/1 /// remainder
"%=" pub struct PercentEq/2 /// remainder assignment
"+" pub struct Plus/1 /// addition, trait bounds, macro Kleene matcher
"+=" pub struct PlusEq/2 /// addition assignment
"#" pub struct Pound/1 /// attributes
"?" pub struct Question/1 /// question mark operator, questionably sized, macro Kleene matcher
"->" pub struct RArrow/2 /// function return type, closure return type, function pointer type
";" pub struct Semi/1 /// terminator for various items and statements, array types
"<<" pub struct Shl/2 /// shift left, nested generics
"<<=" pub struct ShlEq/3 /// shift left assignment
">>" pub struct Shr/2 /// shift right, nested generics
">>=" pub struct ShrEq/3 /// shift right assignment, nested generics
"/" pub struct Slash/1 /// division
"/=" pub struct SlashEq/2 /// division assignment
"*" pub struct Star/1 /// multiplication, dereference, raw pointers, macro Kleene matcher, use wildcards
"*=" pub struct StarEq/2 /// multiplication assignment
"~" pub struct Tilde/1 /// unused since before Rust 1.0
"+" pub struct Add/1 /// `+`
"+=" pub struct AddEq/2 /// `+=`
"&" pub struct And/1 /// `&`
"&&" pub struct AndAnd/2 /// `&&`
"&=" pub struct AndEq/2 /// `&=`
"@" pub struct At/1 /// `@`
"!" pub struct Bang/1 /// `!`
"^" pub struct Caret/1 /// `^`
"^=" pub struct CaretEq/2 /// `^=`
":" pub struct Colon/1 /// `:`
"::" pub struct Colon2/2 /// `::`
"," pub struct Comma/1 /// `,`
"/" pub struct Div/1 /// `/`
"/=" pub struct DivEq/2 /// `/=`
"$" pub struct Dollar/1 /// `$`
"." pub struct Dot/1 /// `.`
".." pub struct Dot2/2 /// `..`
"..." pub struct Dot3/3 /// `...`
"..=" pub struct DotDotEq/3 /// `..=`
"=" pub struct Eq/1 /// `=`
"==" pub struct EqEq/2 /// `==`
">=" pub struct Ge/2 /// `>=`
">" pub struct Gt/1 /// `>`
"<=" pub struct Le/2 /// `<=`
"<" pub struct Lt/1 /// `<`
"*=" pub struct MulEq/2 /// `*=`
"!=" pub struct Ne/2 /// `!=`
"|" pub struct Or/1 /// `|`
"|=" pub struct OrEq/2 /// `|=`
"||" pub struct OrOr/2 /// `||`
"#" pub struct Pound/1 /// `#`
"?" pub struct Question/1 /// `?`
"->" pub struct RArrow/2 /// `->`
"<-" pub struct LArrow/2 /// `<-`
"%" pub struct Rem/1 /// `%`
"%=" pub struct RemEq/2 /// `%=`
"=>" pub struct FatArrow/2 /// `=>`
";" pub struct Semi/1 /// `;`
"<<" pub struct Shl/2 /// `<<`
"<<=" pub struct ShlEq/3 /// `<<=`
">>" pub struct Shr/2 /// `>>`
">>=" pub struct ShrEq/3 /// `>>=`
"*" pub struct Star/1 /// `*`
"-" pub struct Sub/1 /// `-`
"-=" pub struct SubEq/2 /// `-=`
"~" pub struct Tilde/1 /// `~`
}
define_delimiters! {
Brace pub struct Brace /// `{`&hellip;`}`
Bracket pub struct Bracket /// `[`&hellip;`]`
Parenthesis pub struct Paren /// `(`&hellip;`)`
"{" pub struct Brace /// `{...}`
"[" pub struct Bracket /// `[...]`
"(" pub struct Paren /// `(...)`
" " pub struct Group /// None-delimited group
}
/// A type-macro that expands to the name of the Rust type representation of a
/// given token.
///
/// As a type, `Token!` is commonly used in the type of struct fields, the type
/// of a `let` statement, or in turbofish for a `parse` function.
///
/// ```
/// use syn::{Ident, Token};
/// use syn::parse::{Parse, ParseStream, Result};
///
/// // `struct Foo;`
/// pub struct UnitStruct {
/// struct_token: Token![struct],
/// ident: Ident,
/// semi_token: Token![;],
/// }
///
/// impl Parse for UnitStruct {
/// fn parse(input: ParseStream) -> Result<Self> {
/// let struct_token: Token![struct] = input.parse()?;
/// let ident: Ident = input.parse()?;
/// let semi_token = input.parse::<Token![;]>()?;
/// Ok(UnitStruct { struct_token, ident, semi_token })
/// }
/// }
/// ```
///
/// As an expression, `Token!` is used for peeking tokens or instantiating
/// tokens from a span.
///
/// ```
/// # use syn::{Ident, Token};
/// # use syn::parse::{Parse, ParseStream, Result};
/// #
/// # struct UnitStruct {
/// # struct_token: Token![struct],
/// # ident: Ident,
/// # semi_token: Token![;],
/// # }
/// #
/// # impl Parse for UnitStruct {
/// # fn parse(input: ParseStream) -> Result<Self> {
/// # unimplemented!()
/// # }
/// # }
/// #
/// fn make_unit_struct(name: Ident) -> UnitStruct {
/// let span = name.span();
/// UnitStruct {
/// struct_token: Token![struct](span),
/// ident: name,
/// semi_token: Token![;](span),
/// }
/// }
///
/// # fn parse(input: ParseStream) -> Result<()> {
/// if input.peek(Token![struct]) {
/// let unit_struct: UnitStruct = input.parse()?;
/// /* ... */
/// }
/// # Ok(())
/// # }
/// ```
///
/// See the [token module] documentation for details and examples.
///
/// [token module]: crate::token
#[macro_export]
macro_rules! Token {
[abstract] => { $crate::token::Abstract };
[as] => { $crate::token::As };
[async] => { $crate::token::Async };
[auto] => { $crate::token::Auto };
[await] => { $crate::token::Await };
[become] => { $crate::token::Become };
[box] => { $crate::token::Box };
[break] => { $crate::token::Break };
[const] => { $crate::token::Const };
[continue] => { $crate::token::Continue };
[crate] => { $crate::token::Crate };
[default] => { $crate::token::Default };
[do] => { $crate::token::Do };
[dyn] => { $crate::token::Dyn };
[else] => { $crate::token::Else };
[enum] => { $crate::token::Enum };
[extern] => { $crate::token::Extern };
[final] => { $crate::token::Final };
[fn] => { $crate::token::Fn };
[for] => { $crate::token::For };
[if] => { $crate::token::If };
[impl] => { $crate::token::Impl };
[in] => { $crate::token::In };
[let] => { $crate::token::Let };
[loop] => { $crate::token::Loop };
[macro] => { $crate::token::Macro };
[match] => { $crate::token::Match };
[mod] => { $crate::token::Mod };
[move] => { $crate::token::Move };
[mut] => { $crate::token::Mut };
[override] => { $crate::token::Override };
[priv] => { $crate::token::Priv };
[pub] => { $crate::token::Pub };
[ref] => { $crate::token::Ref };
[return] => { $crate::token::Return };
[Self] => { $crate::token::SelfType };
[self] => { $crate::token::SelfValue };
[static] => { $crate::token::Static };
[struct] => { $crate::token::Struct };
[super] => { $crate::token::Super };
[trait] => { $crate::token::Trait };
[try] => { $crate::token::Try };
[type] => { $crate::token::Type };
[typeof] => { $crate::token::Typeof };
[union] => { $crate::token::Union };
[unsafe] => { $crate::token::Unsafe };
[unsized] => { $crate::token::Unsized };
[use] => { $crate::token::Use };
[virtual] => { $crate::token::Virtual };
[where] => { $crate::token::Where };
[while] => { $crate::token::While };
[yield] => { $crate::token::Yield };
[&] => { $crate::token::And };
[&&] => { $crate::token::AndAnd };
[&=] => { $crate::token::AndEq };
[@] => { $crate::token::At };
[^] => { $crate::token::Caret };
[^=] => { $crate::token::CaretEq };
[:] => { $crate::token::Colon };
[,] => { $crate::token::Comma };
[$] => { $crate::token::Dollar };
[.] => { $crate::token::Dot };
[..] => { $crate::token::DotDot };
[...] => { $crate::token::DotDotDot };
[..=] => { $crate::token::DotDotEq };
[=] => { $crate::token::Eq };
[==] => { $crate::token::EqEq };
[=>] => { $crate::token::FatArrow };
[>=] => { $crate::token::Ge };
[>] => { $crate::token::Gt };
[<-] => { $crate::token::LArrow };
[<=] => { $crate::token::Le };
[<] => { $crate::token::Lt };
[-] => { $crate::token::Minus };
[-=] => { $crate::token::MinusEq };
[!=] => { $crate::token::Ne };
[!] => { $crate::token::Not };
[|] => { $crate::token::Or };
[|=] => { $crate::token::OrEq };
[||] => { $crate::token::OrOr };
[::] => { $crate::token::PathSep };
[%] => { $crate::token::Percent };
[%=] => { $crate::token::PercentEq };
[+] => { $crate::token::Plus };
[+=] => { $crate::token::PlusEq };
[#] => { $crate::token::Pound };
[?] => { $crate::token::Question };
[->] => { $crate::token::RArrow };
[;] => { $crate::token::Semi };
[<<] => { $crate::token::Shl };
[<<=] => { $crate::token::ShlEq };
[>>] => { $crate::token::Shr };
[>>=] => { $crate::token::ShrEq };
[/] => { $crate::token::Slash };
[/=] => { $crate::token::SlashEq };
[*] => { $crate::token::Star };
[*=] => { $crate::token::StarEq };
[~] => { $crate::token::Tilde };
[_] => { $crate::token::Underscore };
macro_rules! export_token_macro {
($($await_rule:tt)*) => {
/// A type-macro that expands to the name of the Rust type representation of a
/// given token.
///
/// See the [token module] documentation for details and examples.
///
/// [token module]: crate::token
// Unfortunate duplication due to a rustdoc bug.
// https://github.com/rust-lang/rust/issues/45939
#[macro_export]
macro_rules! Token {
[abstract] => { $crate::token::Abstract };
[as] => { $crate::token::As };
[async] => { $crate::token::Async };
[auto] => { $crate::token::Auto };
$($await_rule => { $crate::token::Await };)*
[become] => { $crate::token::Become };
[box] => { $crate::token::Box };
[break] => { $crate::token::Break };
[const] => { $crate::token::Const };
[continue] => { $crate::token::Continue };
[crate] => { $crate::token::Crate };
[default] => { $crate::token::Default };
[do] => { $crate::token::Do };
[dyn] => { $crate::token::Dyn };
[else] => { $crate::token::Else };
[enum] => { $crate::token::Enum };
[extern] => { $crate::token::Extern };
[final] => { $crate::token::Final };
[fn] => { $crate::token::Fn };
[for] => { $crate::token::For };
[if] => { $crate::token::If };
[impl] => { $crate::token::Impl };
[in] => { $crate::token::In };
[let] => { $crate::token::Let };
[loop] => { $crate::token::Loop };
[macro] => { $crate::token::Macro };
[match] => { $crate::token::Match };
[mod] => { $crate::token::Mod };
[move] => { $crate::token::Move };
[mut] => { $crate::token::Mut };
[override] => { $crate::token::Override };
[priv] => { $crate::token::Priv };
[pub] => { $crate::token::Pub };
[ref] => { $crate::token::Ref };
[return] => { $crate::token::Return };
[Self] => { $crate::token::SelfType };
[self] => { $crate::token::SelfValue };
[static] => { $crate::token::Static };
[struct] => { $crate::token::Struct };
[super] => { $crate::token::Super };
[trait] => { $crate::token::Trait };
[try] => { $crate::token::Try };
[type] => { $crate::token::Type };
[typeof] => { $crate::token::Typeof };
[union] => { $crate::token::Union };
[unsafe] => { $crate::token::Unsafe };
[unsized] => { $crate::token::Unsized };
[use] => { $crate::token::Use };
[virtual] => { $crate::token::Virtual };
[where] => { $crate::token::Where };
[while] => { $crate::token::While };
[yield] => { $crate::token::Yield };
[+] => { $crate::token::Add };
[+=] => { $crate::token::AddEq };
[&] => { $crate::token::And };
[&&] => { $crate::token::AndAnd };
[&=] => { $crate::token::AndEq };
[@] => { $crate::token::At };
[!] => { $crate::token::Bang };
[^] => { $crate::token::Caret };
[^=] => { $crate::token::CaretEq };
[:] => { $crate::token::Colon };
[::] => { $crate::token::Colon2 };
[,] => { $crate::token::Comma };
[/] => { $crate::token::Div };
[/=] => { $crate::token::DivEq };
[$] => { $crate::token::Dollar };
[.] => { $crate::token::Dot };
[..] => { $crate::token::Dot2 };
[...] => { $crate::token::Dot3 };
[..=] => { $crate::token::DotDotEq };
[=] => { $crate::token::Eq };
[==] => { $crate::token::EqEq };
[>=] => { $crate::token::Ge };
[>] => { $crate::token::Gt };
[<=] => { $crate::token::Le };
[<] => { $crate::token::Lt };
[*=] => { $crate::token::MulEq };
[!=] => { $crate::token::Ne };
[|] => { $crate::token::Or };
[|=] => { $crate::token::OrEq };
[||] => { $crate::token::OrOr };
[#] => { $crate::token::Pound };
[?] => { $crate::token::Question };
[->] => { $crate::token::RArrow };
[<-] => { $crate::token::LArrow };
[%] => { $crate::token::Rem };
[%=] => { $crate::token::RemEq };
[=>] => { $crate::token::FatArrow };
[;] => { $crate::token::Semi };
[<<] => { $crate::token::Shl };
[<<=] => { $crate::token::ShlEq };
[>>] => { $crate::token::Shr };
[>>=] => { $crate::token::ShrEq };
[*] => { $crate::token::Star };
[-] => { $crate::token::Sub };
[-=] => { $crate::token::SubEq };
[~] => { $crate::token::Tilde };
[_] => { $crate::token::Underscore };
}
};
}
// Old rustc does not permit `await` appearing anywhere in the source file.
// https://github.com/rust-lang/rust/issues/57919
// We put the Token![await] rule in a place that is not lexed by old rustc.
#[cfg(not(syn_omit_await_from_token_macro))]
include!("await.rs"); // export_token_macro! {[await]}
#[cfg(syn_omit_await_from_token_macro)]
export_token_macro! {}
// Not public API.
#[doc(hidden)]
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use crate::buffer::Cursor;
use crate::error::{Error, Result};
use crate::parse::ParseStream;
use crate::span::FromSpans;
use proc_macro2::{Spacing, Span};
pub(crate) fn keyword(input: ParseStream, token: &str) -> Result<Span> {
pub fn keyword(input: ParseStream, token: &str) -> Result<Span> {
input.step(|cursor| {
if let Some((ident, rest)) = cursor.ident() {
if ident == token {
@ -1033,7 +904,7 @@ pub(crate) mod parsing {
})
}
pub(crate) fn peek_keyword(cursor: Cursor, token: &str) -> bool {
pub fn peek_keyword(cursor: Cursor, token: &str) -> bool {
if let Some((ident, _rest)) = cursor.ident() {
ident == token
} else {
@ -1041,17 +912,16 @@ pub(crate) mod parsing {
}
}
#[doc(hidden)]
pub fn punct<const N: usize>(input: ParseStream, token: &str) -> Result<[Span; N]> {
let mut spans = [input.span(); N];
pub fn punct<S: FromSpans>(input: ParseStream, token: &str) -> Result<S> {
let mut spans = [input.span(); 3];
punct_helper(input, token, &mut spans)?;
Ok(spans)
Ok(S::from_spans(&spans))
}
fn punct_helper(input: ParseStream, token: &str, spans: &mut [Span]) -> Result<()> {
fn punct_helper(input: ParseStream, token: &str, spans: &mut [Span; 3]) -> Result<()> {
input.step(|cursor| {
let mut cursor = *cursor;
assert_eq!(token.len(), spans.len());
assert!(token.len() <= spans.len());
for (i, ch) in token.chars().enumerate() {
match cursor.punct() {
@ -1074,7 +944,6 @@ pub(crate) mod parsing {
})
}
#[doc(hidden)]
pub fn peek_punct(mut cursor: Cursor, token: &str) -> bool {
for (i, ch) in token.chars().enumerate() {
match cursor.punct() {
@ -1098,11 +967,10 @@ pub(crate) mod parsing {
// Not public API.
#[doc(hidden)]
#[cfg(feature = "printing")]
pub(crate) mod printing {
pub mod printing {
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream};
use quote::TokenStreamExt;
#[doc(hidden)]
pub fn punct(s: &str, spans: &[Span], tokens: &mut TokenStream) {
assert_eq!(s.len(), spans.len());
@ -1121,16 +989,23 @@ pub(crate) mod printing {
tokens.append(op);
}
pub(crate) fn keyword(s: &str, span: Span, tokens: &mut TokenStream) {
pub fn keyword(s: &str, span: Span, tokens: &mut TokenStream) {
tokens.append(Ident::new(s, span));
}
pub(crate) fn delim(
delim: Delimiter,
span: Span,
tokens: &mut TokenStream,
inner: TokenStream,
) {
pub fn delim<F>(s: &str, span: Span, tokens: &mut TokenStream, f: F)
where
F: FnOnce(&mut TokenStream),
{
let delim = match s {
"(" => Delimiter::Parenthesis,
"[" => Delimiter::Bracket,
"{" => Delimiter::Brace,
" " => Delimiter::None,
_ => panic!("unknown delimiter: {}", s),
};
let mut inner = TokenStream::new();
f(&mut inner);
let mut g = Group::new(delim, inner);
g.set_span(span);
tokens.append(g);

View File

@ -1,7 +1,7 @@
use proc_macro2::{Delimiter, TokenStream, TokenTree};
use std::hash::{Hash, Hasher};
pub(crate) struct TokenTreeHelper<'a>(pub &'a TokenTree);
pub struct TokenTreeHelper<'a>(pub &'a TokenTree);
impl<'a> PartialEq for TokenTreeHelper<'a> {
fn eq(&self, other: &Self) -> bool {
@ -78,7 +78,7 @@ impl<'a> Hash for TokenTreeHelper<'a> {
}
}
pub(crate) struct TokenStreamHelper<'a>(pub &'a TokenStream);
pub struct TokenStreamHelper<'a>(pub &'a TokenStream);
impl<'a> PartialEq for TokenStreamHelper<'a> {
fn eq(&self, other: &Self) -> bool {

419
src/ty.rs
View File

@ -5,13 +5,16 @@ use proc_macro2::TokenStream;
ast_enum_of_structs! {
/// The possible types that a Rust value could have.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
///
/// # Syntax tree enum
///
/// This type is a [syntax tree enum].
///
/// [syntax tree enum]: Expr#syntax-tree-enums
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
#[non_exhaustive]
#[cfg_attr(not(syn_no_non_exhaustive), non_exhaustive)]
pub enum Type {
/// A fixed size array type: `[T; n]`.
Array(TypeArray),
@ -61,16 +64,17 @@ ast_enum_of_structs! {
/// Tokens in type position not interpreted by Syn.
Verbatim(TokenStream),
// Not public API.
//
// For testing exhaustiveness in downstream code, use the following idiom:
//
// match ty {
// #![cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
//
// Type::Array(ty) => {...}
// Type::BareFn(ty) => {...}
// ...
// Type::Verbatim(ty) => {...}
//
// #[cfg_attr(test, deny(non_exhaustive_omitted_patterns))]
// _ => { /* some sane fallback */ }
// }
//
@ -78,11 +82,17 @@ ast_enum_of_structs! {
// a variant. You will be notified by a test failure when a variant is
// added, so that you can add code to handle it, but your library will
// continue to compile and work for downstream users in the interim.
#[cfg(syn_no_non_exhaustive)]
#[doc(hidden)]
__NonExhaustive,
}
}
ast_struct! {
/// A fixed size array type: `[T; n]`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeArray {
pub bracket_token: token::Bracket,
@ -94,6 +104,9 @@ ast_struct! {
ast_struct! {
/// A bare function type: `fn(usize) -> bool`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeBareFn {
pub lifetimes: Option<BoundLifetimes>,
@ -102,13 +115,16 @@ ast_struct! {
pub fn_token: Token![fn],
pub paren_token: token::Paren,
pub inputs: Punctuated<BareFnArg, Token![,]>,
pub variadic: Option<BareVariadic>,
pub variadic: Option<Variadic>,
pub output: ReturnType,
}
}
ast_struct! {
/// A type contained within invisible delimiters.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeGroup {
pub group_token: token::Group,
@ -119,6 +135,9 @@ ast_struct! {
ast_struct! {
/// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
/// a lifetime.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeImplTrait {
pub impl_token: Token![impl],
@ -128,6 +147,9 @@ ast_struct! {
ast_struct! {
/// Indication that a type should be inferred by the compiler: `_`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeInfer {
pub underscore_token: Token![_],
@ -136,6 +158,9 @@ ast_struct! {
ast_struct! {
/// A macro in the type position.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeMacro {
pub mac: Macro,
@ -144,6 +169,9 @@ ast_struct! {
ast_struct! {
/// The never type: `!`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeNever {
pub bang_token: Token![!],
@ -152,6 +180,9 @@ ast_struct! {
ast_struct! {
/// A parenthesized type equivalent to the inner type.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeParen {
pub paren_token: token::Paren,
@ -162,6 +193,9 @@ ast_struct! {
ast_struct! {
/// A path like `std::slice::Iter`, optionally qualified with a
/// self-type as in `<Vec<T> as SomeTrait>::Associated`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypePath {
pub qself: Option<QSelf>,
@ -171,6 +205,9 @@ ast_struct! {
ast_struct! {
/// A raw pointer type: `*const T` or `*mut T`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypePtr {
pub star_token: Token![*],
@ -182,6 +219,9 @@ ast_struct! {
ast_struct! {
/// A reference type: `&'a T` or `&'a mut T`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeReference {
pub and_token: Token![&],
@ -193,6 +233,9 @@ ast_struct! {
ast_struct! {
/// A dynamically sized slice type: `[T]`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeSlice {
pub bracket_token: token::Bracket,
@ -203,6 +246,9 @@ ast_struct! {
ast_struct! {
/// A trait object type `dyn Bound1 + Bound2 + Bound3` where `Bound` is a
/// trait or a lifetime.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeTraitObject {
pub dyn_token: Option<Token![dyn]>,
@ -212,6 +258,9 @@ ast_struct! {
ast_struct! {
/// A tuple type: `(A, B, C, String)`.
///
/// *This type is available only if Syn is built with the `"derive"` or
/// `"full"` feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct TypeTuple {
pub paren_token: token::Paren,
@ -221,6 +270,9 @@ ast_struct! {
ast_struct! {
/// The binary interface of a function: `extern "C"`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct Abi {
pub extern_token: Token![extern],
@ -230,6 +282,9 @@ ast_struct! {
ast_struct! {
/// An argument in a function type: the `usize` in `fn(usize) -> bool`.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct BareFnArg {
pub attrs: Vec<Attribute>,
@ -239,18 +294,32 @@ ast_struct! {
}
ast_struct! {
/// The variadic argument of a function pointer like `fn(usize, ...)`.
/// The variadic argument of a foreign function.
///
/// ```rust
/// # struct c_char;
/// # struct c_int;
/// #
/// extern "C" {
/// fn printf(format: *const c_char, ...) -> c_int;
/// // ^^^
/// }
/// ```
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub struct BareVariadic {
pub struct Variadic {
pub attrs: Vec<Attribute>,
pub name: Option<(Ident, Token![:])>,
pub dots: Token![...],
pub comma: Option<Token![,]>,
}
}
ast_enum! {
/// Return type of a function signature.
///
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
/// feature.*
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
pub enum ReturnType {
/// Return type is not specified.
@ -263,12 +332,12 @@ ast_enum! {
}
#[cfg(feature = "parsing")]
pub(crate) mod parsing {
pub mod parsing {
use super::*;
use crate::ext::IdentExt as _;
use crate::ext::IdentExt;
use crate::parse::{Parse, ParseStream, Result};
use crate::path;
use proc_macro2::Span;
use proc_macro2::{Punct, Spacing, Span, TokenTree};
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for Type {
@ -323,7 +392,7 @@ pub(crate) mod parsing {
{
if let Type::Path(mut ty) = *group.elem {
let arguments = &mut ty.path.segments.last_mut().unwrap().arguments;
if arguments.is_none() {
if let PathArguments::None = arguments {
*arguments = PathArguments::AngleBracketed(input.parse()?);
Path::parse_rest(input, &mut ty.path, false)?;
return Ok(Type::Path(ty));
@ -434,8 +503,7 @@ pub(crate) mod parsing {
..trait_bound
})
}
other @ (TypeParamBound::Lifetime(_)
| TypeParamBound::Verbatim(_)) => other,
other @ TypeParamBound::Lifetime(_) => other,
}
}
_ => break,
@ -462,9 +530,13 @@ pub(crate) mod parsing {
|| lookahead.peek(Token![unsafe])
|| lookahead.peek(Token![extern])
{
let mut bare_fn: TypeBareFn = input.parse()?;
bare_fn.lifetimes = lifetimes;
Ok(Type::BareFn(bare_fn))
let allow_mut_self = true;
if let Some(mut bare_fn) = parse_bare_fn(input, allow_mut_self)? {
bare_fn.lifetimes = lifetimes;
Ok(Type::BareFn(bare_fn))
} else {
Ok(Type::Verbatim(verbatim::between(begin, input)))
}
} else if lookahead.peek(Ident)
|| input.peek(Token![super])
|| input.peek(Token![self])
@ -473,22 +545,49 @@ pub(crate) mod parsing {
|| lookahead.peek(Token![::])
|| lookahead.peek(Token![<])
{
let dyn_token: Option<Token![dyn]> = input.parse()?;
if let Some(dyn_token) = dyn_token {
let dyn_span = dyn_token.span;
let star_token: Option<Token![*]> = input.parse()?;
let bounds = TypeTraitObject::parse_bounds(dyn_span, input, allow_plus)?;
return Ok(if star_token.is_some() {
Type::Verbatim(verbatim::between(begin, input))
} else {
Type::TraitObject(TypeTraitObject {
dyn_token: Some(dyn_token),
bounds,
})
});
}
let ty: TypePath = input.parse()?;
if ty.qself.is_some() {
return Ok(Type::Path(ty));
}
if input.peek(Token![!]) && !input.peek(Token![!=]) && ty.path.is_mod_style() {
let bang_token: Token![!] = input.parse()?;
let (delimiter, tokens) = mac::parse_delimiter(input)?;
return Ok(Type::Macro(TypeMacro {
mac: Macro {
path: ty.path,
bang_token,
delimiter,
tokens,
},
}));
if input.peek(Token![!]) && !input.peek(Token![!=]) {
let mut contains_arguments = false;
for segment in &ty.path.segments {
match segment.arguments {
PathArguments::None => {}
PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => {
contains_arguments = true;
}
}
}
if !contains_arguments {
let bang_token: Token![!] = input.parse()?;
let (delimiter, tokens) = mac::parse_delimiter(input)?;
return Ok(Type::Macro(TypeMacro {
mac: Macro {
path: ty.path,
bang_token,
delimiter,
tokens,
},
}));
}
}
if lifetimes.is_some() || allow_plus && input.peek(Token![+]) {
@ -520,19 +619,6 @@ pub(crate) mod parsing {
}
Ok(Type::Path(ty))
} else if lookahead.peek(Token![dyn]) {
let dyn_token: Token![dyn] = input.parse()?;
let dyn_span = dyn_token.span;
let star_token: Option<Token![*]> = input.parse()?;
let bounds = TypeTraitObject::parse_bounds(dyn_span, input, allow_plus)?;
return Ok(if star_token.is_some() {
Type::Verbatim(verbatim::between(&begin, input))
} else {
Type::TraitObject(TypeTraitObject {
dyn_token: Some(dyn_token),
bounds,
})
});
} else if lookahead.peek(token::Bracket) {
let content;
let bracket_token = bracketed!(content in input);
@ -630,47 +716,61 @@ pub(crate) mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for TypeBareFn {
fn parse(input: ParseStream) -> Result<Self> {
let args;
let mut variadic = None;
let allow_mut_self = false;
parse_bare_fn(input, allow_mut_self).map(Option::unwrap)
}
}
Ok(TypeBareFn {
lifetimes: input.parse()?,
unsafety: input.parse()?,
abi: input.parse()?,
fn_token: input.parse()?,
paren_token: parenthesized!(args in input),
inputs: {
let mut inputs = Punctuated::new();
fn parse_bare_fn(input: ParseStream, allow_mut_self: bool) -> Result<Option<TypeBareFn>> {
let args;
let mut variadic = None;
let mut has_mut_self = false;
while !args.is_empty() {
let attrs = args.call(Attribute::parse_outer)?;
let bare_fn = TypeBareFn {
lifetimes: input.parse()?,
unsafety: input.parse()?,
abi: input.parse()?,
fn_token: input.parse()?,
paren_token: parenthesized!(args in input),
inputs: {
let mut inputs = Punctuated::new();
if inputs.empty_or_trailing()
&& (args.peek(Token![...])
|| args.peek(Ident)
&& args.peek2(Token![:])
&& args.peek3(Token![...]))
{
variadic = Some(parse_bare_variadic(&args, attrs)?);
break;
}
while !args.is_empty() {
let attrs = args.call(Attribute::parse_outer)?;
let allow_self = inputs.is_empty();
let arg = parse_bare_fn_arg(&args, allow_self)?;
inputs.push_value(BareFnArg { attrs, ..arg });
if args.is_empty() {
break;
}
let comma = args.parse()?;
inputs.push_punct(comma);
if inputs.empty_or_trailing() && args.peek(Token![...]) {
variadic = Some(Variadic {
attrs,
dots: args.parse()?,
});
break;
}
inputs
},
variadic,
output: input.call(ReturnType::without_plus)?,
})
if let Some(arg) = parse_bare_fn_arg(&args, allow_mut_self)? {
inputs.push_value(BareFnArg { attrs, ..arg });
} else {
has_mut_self = true;
}
if args.is_empty() {
break;
}
let comma = args.parse()?;
if !has_mut_self {
inputs.push_punct(comma);
}
}
inputs
},
variadic,
output: input.call(ReturnType::without_plus)?,
};
if has_mut_self {
Ok(None)
} else {
Ok(Some(bare_fn))
}
}
@ -738,7 +838,29 @@ pub(crate) mod parsing {
impl Parse for TypePath {
fn parse(input: ParseStream) -> Result<Self> {
let expr_style = false;
let (qself, path) = path::parsing::qpath(input, expr_style)?;
let (qself, mut path) = path::parsing::qpath(input, expr_style)?;
while path.segments.last().unwrap().arguments.is_empty()
&& (input.peek(token::Paren) || input.peek(Token![::]) && input.peek3(token::Paren))
{
input.parse::<Option<Token![::]>>()?;
let args: ParenthesizedGenericArguments = input.parse()?;
let allow_associated_type = cfg!(feature = "full")
&& match &args.output {
ReturnType::Default => true,
ReturnType::Type(_, ty) => match **ty {
// TODO: probably some of the other kinds allow this too.
Type::Paren(_) => true,
_ => false,
},
};
let parenthesized = PathArguments::Parenthesized(args);
path.segments.last_mut().unwrap().arguments = parenthesized;
if allow_associated_type {
Path::parse_rest(input, &mut path, expr_style)?;
}
}
Ok(TypePath { qself, path })
}
}
@ -806,7 +928,7 @@ pub(crate) mod parsing {
let mut at_least_one_trait = false;
for bound in &bounds {
match bound {
TypeParamBound::Trait(_) | TypeParamBound::Verbatim(_) => {
TypeParamBound::Trait(_) => {
at_least_one_trait = true;
break;
}
@ -846,7 +968,7 @@ pub(crate) mod parsing {
let mut at_least_one_trait = false;
for bound in &bounds {
match bound {
TypeParamBound::Trait(_) | TypeParamBound::Verbatim(_) => {
TypeParamBound::Trait(_) => {
at_least_one_trait = true;
break;
}
@ -902,72 +1024,75 @@ pub(crate) mod parsing {
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
impl Parse for BareFnArg {
fn parse(input: ParseStream) -> Result<Self> {
let allow_self = false;
parse_bare_fn_arg(input, allow_self)
let allow_mut_self = false;
parse_bare_fn_arg(input, allow_mut_self).map(Option::unwrap)
}
}
fn parse_bare_fn_arg(input: ParseStream, allow_self: bool) -> Result<BareFnArg> {
let attrs = input.call(Attribute::parse_outer)?;
let begin = input.fork();
let has_mut_self = allow_self && input.peek(Token![mut]) && input.peek2(Token![self]);
if has_mut_self {
input.parse::<Token![mut]>()?;
}
let mut has_self = false;
let mut name = if (input.peek(Ident) || input.peek(Token![_]) || {
has_self = allow_self && input.peek(Token![self]);
has_self
}) && input.peek2(Token![:])
&& !input.peek2(Token![::])
{
let name = input.call(Ident::parse_any)?;
let colon: Token![:] = input.parse()?;
Some((name, colon))
} else {
has_self = false;
None
};
let ty = if allow_self && !has_self && input.peek(Token![mut]) && input.peek2(Token![self])
{
input.parse::<Token![mut]>()?;
input.parse::<Token![self]>()?;
None
} else if has_mut_self && name.is_none() {
input.parse::<Token![self]>()?;
None
} else {
Some(input.parse()?)
};
let ty = match ty {
Some(ty) if !has_mut_self => ty,
_ => {
name = None;
Type::Verbatim(verbatim::between(&begin, input))
}
};
Ok(BareFnArg { attrs, name, ty })
}
fn parse_bare_variadic(input: ParseStream, attrs: Vec<Attribute>) -> Result<BareVariadic> {
Ok(BareVariadic {
attrs,
name: if input.peek(Ident) || input.peek(Token![_]) {
let name = input.call(Ident::parse_any)?;
let colon: Token![:] = input.parse()?;
Some((name, colon))
} else {
None
fn parse_bare_fn_arg(
input: ParseStream,
mut allow_mut_self: bool,
) -> Result<Option<BareFnArg>> {
let mut has_mut_self = false;
let arg = BareFnArg {
attrs: input.call(Attribute::parse_outer)?,
name: {
if (input.peek(Ident) || input.peek(Token![_]) || input.peek(Token![self]))
&& input.peek2(Token![:])
&& !input.peek2(Token![::])
{
let name = input.call(Ident::parse_any)?;
let colon: Token![:] = input.parse()?;
Some((name, colon))
} else if allow_mut_self
&& input.peek(Token![mut])
&& input.peek2(Token![self])
&& input.peek3(Token![:])
&& !input.peek3(Token![::])
{
has_mut_self = true;
allow_mut_self = false;
input.parse::<Token![mut]>()?;
input.parse::<Token![self]>()?;
input.parse::<Token![:]>()?;
None
} else {
None
}
},
dots: input.parse()?,
comma: input.parse()?,
})
ty: if !has_mut_self && input.peek(Token![...]) {
let dot3 = input.parse::<Token![...]>()?;
let args = vec![
TokenTree::Punct(Punct::new('.', Spacing::Joint)),
TokenTree::Punct(Punct::new('.', Spacing::Joint)),
TokenTree::Punct(Punct::new('.', Spacing::Alone)),
];
let tokens: TokenStream = args
.into_iter()
.zip(&dot3.spans)
.map(|(mut arg, span)| {
arg.set_span(*span);
arg
})
.collect();
Type::Verbatim(tokens)
} else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
has_mut_self = true;
input.parse::<Token![mut]>()?;
Type::Path(TypePath {
qself: None,
path: input.parse::<Token![self]>()?.into(),
})
} else {
input.parse()?
},
};
if has_mut_self {
Ok(None)
} else {
Ok(Some(arg))
}
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
@ -1077,11 +1202,6 @@ mod printing {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.paren_token.surround(tokens, |tokens| {
self.elems.to_tokens(tokens);
// If we only have one argument, we need a trailing comma to
// distinguish TypeTuple from TypeParen.
if self.elems.len() == 1 && !self.elems.trailing_punct() {
<Token![,]>::default().to_tokens(tokens);
}
});
}
}
@ -1167,15 +1287,10 @@ mod printing {
}
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
impl ToTokens for BareVariadic {
impl ToTokens for Variadic {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append_all(self.attrs.outer());
if let Some((name, colon)) = &self.name {
name.to_tokens(tokens);
colon.to_tokens(tokens);
}
self.dots.to_tokens(tokens);
self.comma.to_tokens(tokens);
}
}

View File

@ -1,9 +1,9 @@
use crate::parse::ParseStream;
use crate::parse::{ParseBuffer, ParseStream};
use proc_macro2::{Delimiter, TokenStream};
use std::cmp::Ordering;
use std::iter;
pub(crate) fn between<'a>(begin: ParseStream<'a>, end: ParseStream<'a>) -> TokenStream {
pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream {
let end = end.cursor();
let mut cursor = begin.cursor();
assert!(crate::buffer::same_buffer(end, cursor));

View File

@ -1,4 +1,4 @@
pub(crate) fn skip(mut s: &str) -> &str {
pub fn skip(mut s: &str) -> &str {
'skip: while !s.is_empty() {
let byte = s.as_bytes()[0];
if byte == b'/' {

1058
syn.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,6 @@
extern crate rustc_ast;
extern crate rustc_data_structures;
extern crate rustc_driver;
extern crate rustc_span;
extern crate thin_vec;
@ -13,6 +12,7 @@ use rustc_ast::ast::Arm;
use rustc_ast::ast::AssocConstraint;
use rustc_ast::ast::AssocConstraintKind;
use rustc_ast::ast::AssocItemKind;
use rustc_ast::ast::Async;
use rustc_ast::ast::AttrArgs;
use rustc_ast::ast::AttrArgsEq;
use rustc_ast::ast::AttrId;
@ -26,15 +26,11 @@ use rustc_ast::ast::BindingAnnotation;
use rustc_ast::ast::Block;
use rustc_ast::ast::BlockCheckMode;
use rustc_ast::ast::BorrowKind;
use rustc_ast::ast::BoundConstness;
use rustc_ast::ast::BoundPolarity;
use rustc_ast::ast::ByRef;
use rustc_ast::ast::CaptureBy;
use rustc_ast::ast::Closure;
use rustc_ast::ast::ClosureBinder;
use rustc_ast::ast::Const;
use rustc_ast::ast::ConstItem;
use rustc_ast::ast::CoroutineKind;
use rustc_ast::ast::Crate;
use rustc_ast::ast::Defaultness;
use rustc_ast::ast::DelimArgs;
@ -50,24 +46,8 @@ use rustc_ast::ast::FnDecl;
use rustc_ast::ast::FnHeader;
use rustc_ast::ast::FnRetTy;
use rustc_ast::ast::FnSig;
use rustc_ast::ast::ForLoopKind;
use rustc_ast::ast::ForeignItemKind;
use rustc_ast::ast::ForeignMod;
use rustc_ast::ast::FormatAlignment;
use rustc_ast::ast::FormatArgPosition;
use rustc_ast::ast::FormatArgPositionKind;
use rustc_ast::ast::FormatArgs;
use rustc_ast::ast::FormatArgsPiece;
use rustc_ast::ast::FormatArgument;
use rustc_ast::ast::FormatArgumentKind;
use rustc_ast::ast::FormatArguments;
use rustc_ast::ast::FormatCount;
use rustc_ast::ast::FormatDebugHex;
use rustc_ast::ast::FormatOptions;
use rustc_ast::ast::FormatPlaceholder;
use rustc_ast::ast::FormatSign;
use rustc_ast::ast::FormatTrait;
use rustc_ast::ast::GenBlockKind;
use rustc_ast::ast::GenericArg;
use rustc_ast::ast::GenericArgs;
use rustc_ast::ast::GenericBound;
@ -96,6 +76,7 @@ use rustc_ast::ast::Local;
use rustc_ast::ast::LocalKind;
use rustc_ast::ast::MacCall;
use rustc_ast::ast::MacCallStmt;
use rustc_ast::ast::MacDelimiter;
use rustc_ast::ast::MacStmtStyle;
use rustc_ast::ast::MacroDef;
use rustc_ast::ast::MetaItemLit;
@ -111,7 +92,6 @@ use rustc_ast::ast::Param;
use rustc_ast::ast::ParenthesizedArgs;
use rustc_ast::ast::Pat;
use rustc_ast::ast::PatField;
use rustc_ast::ast::PatFieldsRest;
use rustc_ast::ast::PatKind;
use rustc_ast::ast::Path;
use rustc_ast::ast::PathSegment;
@ -120,7 +100,6 @@ use rustc_ast::ast::QSelf;
use rustc_ast::ast::RangeEnd;
use rustc_ast::ast::RangeLimits;
use rustc_ast::ast::RangeSyntax;
use rustc_ast::ast::StaticItem;
use rustc_ast::ast::Stmt;
use rustc_ast::ast::StmtKind;
use rustc_ast::ast::StrLit;
@ -129,7 +108,7 @@ use rustc_ast::ast::StructExpr;
use rustc_ast::ast::StructRest;
use rustc_ast::ast::Term;
use rustc_ast::ast::Trait;
use rustc_ast::ast::TraitBoundModifiers;
use rustc_ast::ast::TraitBoundModifier;
use rustc_ast::ast::TraitObjectSyntax;
use rustc_ast::ast::TraitRef;
use rustc_ast::ast::Ty;
@ -154,15 +133,13 @@ use rustc_ast::ast::WhereRegionPredicate;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, CommentKind, Delimiter, Lit, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{
AttrTokenStream, AttrTokenTree, AttributesData, DelimSpacing, DelimSpan, LazyAttrTokenStream,
Spacing, TokenStream, TokenTree,
AttrTokenStream, AttrTokenTree, AttributesData, DelimSpan, LazyAttrTokenStream, Spacing,
TokenStream, TokenTree,
};
use rustc_data_structures::sync::Lrc;
use rustc_span::source_map::Spanned;
use rustc_span::symbol::{sym, Ident};
use rustc_span::{ErrorGuaranteed, Span, Symbol, SyntaxContext, DUMMY_SP};
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash};
use rustc_span::{Span, Symbol, SyntaxContext, DUMMY_SP};
use thin_vec::ThinVec;
pub trait SpanlessEq {
@ -197,16 +174,6 @@ impl<T: SpanlessEq> SpanlessEq for Option<T> {
}
}
impl<T: SpanlessEq, E: SpanlessEq> SpanlessEq for Result<T, E> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Ok(this), Ok(other)) => SpanlessEq::eq(this, other),
(Err(this), Err(other)) => SpanlessEq::eq(this, other),
_ => false,
}
}
}
impl<T: SpanlessEq> SpanlessEq for [T] {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len() && self.iter().zip(other).all(|(a, b)| SpanlessEq::eq(a, b))
@ -229,17 +196,6 @@ impl<T: SpanlessEq> SpanlessEq for ThinVec<T> {
}
}
impl<K: Eq + Hash, V: SpanlessEq, S: BuildHasher> SpanlessEq for HashMap<K, V, S> {
fn eq(&self, other: &Self) -> bool {
self.len() == other.len()
&& self.iter().all(|(key, this_v)| {
other
.get(key)
.map_or(false, |other_v| SpanlessEq::eq(this_v, other_v))
})
}
}
impl<T: SpanlessEq> SpanlessEq for Spanned<T> {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(&self.node, &other.node)
@ -290,7 +246,6 @@ macro_rules! spanless_eq_partial_eq {
spanless_eq_partial_eq!(bool);
spanless_eq_partial_eq!(u8);
spanless_eq_partial_eq!(u16);
spanless_eq_partial_eq!(u32);
spanless_eq_partial_eq!(u128);
spanless_eq_partial_eq!(usize);
spanless_eq_partial_eq!(char);
@ -300,7 +255,6 @@ spanless_eq_partial_eq!(CommentKind);
spanless_eq_partial_eq!(Delimiter);
spanless_eq_partial_eq!(InlineAsmOptions);
spanless_eq_partial_eq!(token::LitKind);
spanless_eq_partial_eq!(ErrorGuaranteed);
macro_rules! spanless_eq_struct {
{
@ -460,25 +414,18 @@ spanless_eq_struct!(AttributesData; attrs tokens);
spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl decl_span);
spanless_eq_struct!(BindingAnnotation; 0 1);
spanless_eq_struct!(Block; stmts id rules span tokens could_be_bare_literal);
spanless_eq_struct!(Closure; binder capture_clause constness coroutine_kind movability fn_decl body !fn_decl_span !fn_arg_span);
spanless_eq_struct!(ConstItem; defaultness generics ty expr);
spanless_eq_struct!(Closure; binder capture_clause asyncness movability fn_decl body !fn_decl_span);
spanless_eq_struct!(Crate; attrs items spans id is_placeholder);
spanless_eq_struct!(DelimArgs; dspan delim tokens);
spanless_eq_struct!(DelimSpacing; open close);
spanless_eq_struct!(EnumDef; variants);
spanless_eq_struct!(Expr; id kind span attrs !tokens);
spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder);
spanless_eq_struct!(FieldDef; attrs id span vis ident ty is_placeholder);
spanless_eq_struct!(Fn; defaultness generics sig body);
spanless_eq_struct!(FnDecl; inputs output);
spanless_eq_struct!(FnHeader; constness coroutine_kind unsafety ext);
spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
spanless_eq_struct!(Fn; defaultness generics sig body);
spanless_eq_struct!(FnSig; header decl span);
spanless_eq_struct!(ForeignMod; unsafety abi items);
spanless_eq_struct!(FormatArgPosition; index kind span);
spanless_eq_struct!(FormatArgs; span template arguments);
spanless_eq_struct!(FormatArgument; kind expr);
spanless_eq_struct!(FormatOptions; width precision alignment fill sign alternate zero_pad debug_hex);
spanless_eq_struct!(FormatPlaceholder; argument span format_trait format_options);
spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind !colon_span);
spanless_eq_struct!(Generics; params where_clause span);
spanless_eq_struct!(Impl; defaultness unsafety generics constness polarity of_trait self_ty items);
@ -489,10 +436,10 @@ spanless_eq_struct!(Label; ident);
spanless_eq_struct!(Lifetime; id ident);
spanless_eq_struct!(Lit; kind symbol suffix);
spanless_eq_struct!(Local; pat ty kind id span attrs !tokens);
spanless_eq_struct!(MacCall; path args);
spanless_eq_struct!(MacCall; path args prior_type_ascription);
spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
spanless_eq_struct!(MacroDef; body macro_rules);
spanless_eq_struct!(MetaItemLit; symbol suffix kind span);
spanless_eq_struct!(MetaItemLit; token_lit kind span);
spanless_eq_struct!(MethodCall; seg receiver args !span);
spanless_eq_struct!(ModSpans; !inner_span !inject_use_span);
spanless_eq_struct!(MutTy; ty mutbl);
@ -504,13 +451,11 @@ spanless_eq_struct!(Path; span segments tokens);
spanless_eq_struct!(PathSegment; ident id args);
spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
spanless_eq_struct!(QSelf; ty path_span position);
spanless_eq_struct!(StaticItem; ty mutability expr);
spanless_eq_struct!(Stmt; id kind span);
spanless_eq_struct!(StrLit; symbol suffix symbol_unescaped style span);
spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
spanless_eq_struct!(StructExpr; qself path fields rest);
spanless_eq_struct!(Token; kind span);
spanless_eq_struct!(Trait; unsafety is_auto generics bounds items);
spanless_eq_struct!(TraitBoundModifiers; constness polarity);
spanless_eq_struct!(TraitRef; path ref_id);
spanless_eq_struct!(Ty; id kind span tokens);
spanless_eq_struct!(TyAlias; defaultness generics where_clauses !where_predicates_split bounds ty);
@ -523,36 +468,25 @@ spanless_eq_struct!(WhereClause; has_where_token predicates span);
spanless_eq_struct!(WhereEqPredicate; span lhs_ty rhs_ty);
spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0) Type(0) MacCall(0));
spanless_eq_enum!(AssocConstraintKind; Equality(term) Bound(bounds));
spanless_eq_enum!(AssocItemKind; Const(0) Fn(0) Type(0) MacCall(0));
spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
spanless_eq_enum!(AttrArgs; Empty Delimited(0) Eq(0 1));
spanless_eq_enum!(AttrArgsEq; Ast(0) Hir(0));
spanless_eq_enum!(AttrStyle; Outer Inner);
spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2 3) Attributes(0));
spanless_eq_enum!(AttrTokenTree; Token(0 1) Delimited(0 1 2) Attributes(0));
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
spanless_eq_enum!(BlockCheckMode; Default Unsafe(0));
spanless_eq_enum!(BorrowKind; Ref Raw);
spanless_eq_enum!(BoundConstness; Never Always(0) Maybe(0));
spanless_eq_enum!(BoundPolarity; Positive Negative(0) Maybe(0));
spanless_eq_enum!(ByRef; Yes No);
spanless_eq_enum!(CaptureBy; Value(move_kw) Ref);
spanless_eq_enum!(CaptureBy; Value Ref);
spanless_eq_enum!(ClosureBinder; NotPresent For(span generic_params));
spanless_eq_enum!(Const; Yes(0) No);
spanless_eq_enum!(Defaultness; Default(0) Final);
spanless_eq_enum!(Extern; None Implicit(0) Explicit(0 1));
spanless_eq_enum!(FloatTy; F32 F64);
spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
spanless_eq_enum!(ForLoopKind; For ForAwait);
spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0));
spanless_eq_enum!(FormatAlignment; Left Right Center);
spanless_eq_enum!(FormatArgPositionKind; Implicit Number Named);
spanless_eq_enum!(FormatArgsPiece; Literal(0) Placeholder(0));
spanless_eq_enum!(FormatArgumentKind; Normal Named(0) Captured(0));
spanless_eq_enum!(FormatCount; Literal(0) Argument(0));
spanless_eq_enum!(FormatDebugHex; Lower Upper);
spanless_eq_enum!(FormatSign; Plus Minus);
spanless_eq_enum!(FormatTrait; Display Debug LowerExp UpperExp Octal Pointer Binary LowerHex UpperHex);
spanless_eq_enum!(GenBlockKind; Async Gen AsyncGen);
spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
@ -566,52 +500,50 @@ spanless_eq_enum!(IsAuto; Yes No);
spanless_eq_enum!(LitFloatType; Suffixed(0) Unsuffixed);
spanless_eq_enum!(LitIntType; Signed(0) Unsigned(0) Unsuffixed);
spanless_eq_enum!(LocalKind; Decl Init(0) InitElse(0 1));
spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded);
spanless_eq_enum!(Movability; Static Movable);
spanless_eq_enum!(Mutability; Mut Not);
spanless_eq_enum!(PatFieldsRest; Rest None);
spanless_eq_enum!(RangeEnd; Included(0) Excluded);
spanless_eq_enum!(RangeLimits; HalfOpen Closed);
spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
spanless_eq_enum!(StrStyle; Cooked Raw(0));
spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
spanless_eq_enum!(Term; Ty(0) Const(0));
spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2 3));
spanless_eq_enum!(TokenTree; Token(0 1) Delimited(0 1 2));
spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
spanless_eq_enum!(TraitObjectSyntax; Dyn DynStar None);
spanless_eq_enum!(UintTy; Usize U8 U16 U32 U64 U128);
spanless_eq_enum!(UnOp; Deref Not Neg);
spanless_eq_enum!(Unsafe; Yes(0) No);
spanless_eq_enum!(UnsafeSource; CompilerGenerated UserProvided);
spanless_eq_enum!(UseTreeKind; Simple(0) Nested(0) Glob);
spanless_eq_enum!(VariantData; Struct(fields recovered) Tuple(0 1) Unit(0));
spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
spanless_eq_enum!(VisibilityKind; Public Restricted(path id shorthand) Inherited);
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
spanless_eq_enum!(CoroutineKind; Async(span closure_id return_impl_trait_id)
Gen(span closure_id return_impl_trait_id)
AsyncGen(span closure_id return_impl_trait_id));
spanless_eq_enum!(ExprKind; Array(0) ConstBlock(0) Call(0 1) MethodCall(0)
Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1 2 3)
If(0 1 2) While(0 1 2) ForLoop(pat iter body label kind) Loop(0 1 2)
Match(0 1) Closure(0) Block(0 1) Gen(0 1 2) Await(0 1) TryBlock(0)
Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1 2) Underscore
Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0)
InlineAsm(0) OffsetOf(0 1) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0)
Yield(0) Yeet(0) Become(0) IncludedBytes(0) FormatArgs(0) Err);
spanless_eq_enum!(ExprKind; Box(0) Array(0) ConstBlock(0) Call(0 1)
MethodCall(0) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1)
Let(0 1 2) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1 2) Match(0 1)
Closure(0) Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2)
AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore Range(0 1 2) Path(0 1)
AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0) MacCall(0)
Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0) Yeet(0) IncludedBytes(0)
Err);
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
Sym(sym));
spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0) Const(0) Fn(0)
Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1)
spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
Fn(0) Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1)
Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0));
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0 1) CStr(0 1) Byte(0) Char(0)
Int(0 1) Float(0 1) Bool(0) Err);
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
Float(0 1) Bool(0) Err);
spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2 3) TupleStruct(0 1 2)
Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
Never Paren(0) MacCall(0));
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Ref(0 1) BareFn(0) Never
Tup(0) AnonStruct(0) AnonUnion(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1)
Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) Err CVarArgs);
Paren(0) MacCall(0));
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
ImplicitSelf MacCall(0) Err CVarArgs);
impl SpanlessEq for Ident {
fn eq(&self, other: &Self) -> bool {
@ -659,20 +591,17 @@ impl SpanlessEq for TokenKind {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(TokenKind::Literal(this), TokenKind::Literal(other)) => SpanlessEq::eq(this, other),
(TokenKind::DotDotEq | TokenKind::DotDotDot, _) => match other {
(TokenKind::DotDotEq, _) | (TokenKind::DotDotDot, _) => match other {
TokenKind::DotDotEq | TokenKind::DotDotDot => true,
_ => false,
},
(TokenKind::Interpolated(this), TokenKind::Interpolated(other)) => {
let (this, this_span) = this.as_ref();
let (other, other_span) = other.as_ref();
SpanlessEq::eq(this_span, other_span)
&& match (this, other) {
(Nonterminal::NtExpr(this), Nonterminal::NtExpr(other)) => {
SpanlessEq::eq(this, other)
}
_ => this == other,
match (this.as_ref(), other.as_ref()) {
(Nonterminal::NtExpr(this), Nonterminal::NtExpr(other)) => {
SpanlessEq::eq(this, other)
}
_ => this == other,
}
}
_ => self == other,
}
@ -737,7 +666,7 @@ fn doc_comment<'a>(
}
}
let stream = match trees.next() {
Some(TokenTree::Delimited(_span, _spacing, Delimiter::Bracket, stream)) => stream,
Some(TokenTree::Delimited(_span, Delimiter::Bracket, stream)) => stream,
_ => return false,
};
let mut trees = stream.trees();
@ -781,7 +710,7 @@ fn is_escaped_literal_token(token: &Token, unescaped: Symbol) -> bool {
Token {
kind: TokenKind::Interpolated(nonterminal),
span: _,
} => match &nonterminal.0 {
} => match nonterminal.as_ref() {
Nonterminal::NtExpr(expr) => match &expr.kind {
ExprKind::Lit(lit) => is_escaped_lit(lit, unescaped),
_ => false,
@ -805,8 +734,12 @@ fn is_escaped_literal_attr_args(value: &AttrArgsEq, unescaped: Symbol) -> bool {
fn is_escaped_literal_meta_item_lit(lit: &MetaItemLit, unescaped: Symbol) -> bool {
match lit {
MetaItemLit {
symbol: _,
suffix: None,
token_lit:
Lit {
kind: token::LitKind::Str,
symbol: _,
suffix: None,
},
kind,
span: _,
} => is_escaped_lit_kind(kind, unescaped),
@ -871,9 +804,3 @@ impl SpanlessEq for AttrKind {
}
}
}
impl SpanlessEq for FormatArguments {
fn eq(&self, other: &Self) -> bool {
SpanlessEq::eq(self.all_args(), other.all_args())
}
}

Some files were not shown because too many files have changed in this diff Show More