mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-29 07:42:04 +00:00
Bug 1716518 - Upgrade syn to v1.0.73. r=emilio
Differential Revision: https://phabricator.services.mozilla.com/D117829
This commit is contained in:
parent
9f7c13daac
commit
5bdfefb64b
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -4946,9 +4946,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.40"
|
||||
version = "1.0.73"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
|
||||
checksum = "f71489ff30030d2ae598524f61326b902466f72a0fb1a8564c001cc63425bcc7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
2
third_party/rust/syn/.cargo-checksum.json
vendored
2
third_party/rust/syn/.cargo-checksum.json
vendored
File diff suppressed because one or more lines are too long
9
third_party/rust/syn/Cargo.toml
vendored
9
third_party/rust/syn/Cargo.toml
vendored
@ -13,7 +13,7 @@
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "syn"
|
||||
version = "1.0.40"
|
||||
version = "1.0.73"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
|
||||
description = "Parser for Rust source code"
|
||||
@ -24,6 +24,7 @@ license = "MIT OR Apache-2.0"
|
||||
repository = "https://github.com/dtolnay/syn"
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--cfg", "doc_cfg"]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
|
||||
[package.metadata.playground]
|
||||
@ -38,7 +39,7 @@ required-features = ["full", "parsing"]
|
||||
name = "file"
|
||||
required-features = ["full", "parsing"]
|
||||
[dependencies.proc-macro2]
|
||||
version = "1.0.13"
|
||||
version = "1.0.26"
|
||||
default-features = false
|
||||
|
||||
[dependencies.quote]
|
||||
@ -55,7 +56,7 @@ version = "1.0"
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.insta]
|
||||
version = "0.16"
|
||||
version = "1.0"
|
||||
|
||||
[dev-dependencies.rayon]
|
||||
version = "1.0"
|
||||
@ -74,7 +75,7 @@ features = ["blocking"]
|
||||
version = "0"
|
||||
|
||||
[dev-dependencies.tar]
|
||||
version = "0.4"
|
||||
version = "0.4.16"
|
||||
|
||||
[dev-dependencies.termcolor]
|
||||
version = "1.0"
|
||||
|
6
third_party/rust/syn/README.md
vendored
6
third_party/rust/syn/README.md
vendored
@ -150,7 +150,7 @@ By tracking span information all the way through the expansion of a procedural
|
||||
macro as shown in the `heapsize` example, token-based macros in Syn are able to
|
||||
trigger errors that directly pinpoint the source of the problem.
|
||||
|
||||
```
|
||||
```console
|
||||
error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
|
||||
--> src/main.rs:7:5
|
||||
|
|
||||
@ -171,7 +171,7 @@ Syn's parsing API.
|
||||
The example reimplements the popular `lazy_static` crate from crates.io as a
|
||||
procedural macro.
|
||||
|
||||
```
|
||||
```rust
|
||||
lazy_static! {
|
||||
static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
|
||||
}
|
||||
@ -180,7 +180,7 @@ lazy_static! {
|
||||
The implementation shows how to trigger custom warnings and error messages on
|
||||
the macro input.
|
||||
|
||||
```
|
||||
```console
|
||||
warning: come on, pick a more creative name
|
||||
--> src/main.rs:10:16
|
||||
|
|
||||
|
4
third_party/rust/syn/build.rs
vendored
4
third_party/rust/syn/build.rs
vendored
@ -15,6 +15,10 @@ fn main() {
|
||||
println!("cargo:rustc-cfg=syn_omit_await_from_token_macro");
|
||||
}
|
||||
|
||||
if compiler.minor < 39 {
|
||||
println!("cargo:rustc-cfg=syn_no_const_vec_new");
|
||||
}
|
||||
|
||||
if !compiler.nightly {
|
||||
println!("cargo:rustc-cfg=syn_disable_nightly_tests");
|
||||
}
|
||||
|
57
third_party/rust/syn/src/attr.rs
vendored
57
third_party/rust/syn/src/attr.rs
vendored
@ -1,9 +1,7 @@
|
||||
use super::*;
|
||||
use crate::punctuated::Punctuated;
|
||||
|
||||
use std::iter;
|
||||
|
||||
use proc_macro2::TokenStream;
|
||||
use std::iter;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
|
||||
@ -146,6 +144,7 @@ ast_struct! {
|
||||
/// };
|
||||
/// assert_eq!(doc, attr);
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Attribute {
|
||||
pub pound_token: Token![#],
|
||||
pub style: AttrStyle,
|
||||
@ -162,6 +161,7 @@ impl Attribute {
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_meta(&self) -> Result<Meta> {
|
||||
fn clone_ident_segment(segment: &PathSegment) -> PathSegment {
|
||||
PathSegment {
|
||||
@ -209,6 +209,7 @@ impl Attribute {
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_args_with(T::parse)
|
||||
}
|
||||
@ -218,6 +219,7 @@ impl Attribute {
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
let parser = |input: ParseStream| {
|
||||
let args = enter_args(self, input)?;
|
||||
@ -231,6 +233,7 @@ impl Attribute {
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
|
||||
let mut attrs = Vec::new();
|
||||
while input.peek(Token![#]) {
|
||||
@ -244,11 +247,10 @@ impl Attribute {
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
|
||||
let mut attrs = Vec::new();
|
||||
while input.peek(Token![#]) && input.peek2(Token![!]) {
|
||||
attrs.push(input.call(parsing::single_parse_inner)?);
|
||||
}
|
||||
parsing::parse_inner(input, &mut attrs)?;
|
||||
Ok(attrs)
|
||||
}
|
||||
}
|
||||
@ -323,6 +325,7 @@ ast_enum! {
|
||||
/// - `#![feature(proc_macro)]`
|
||||
/// - `//! # Example`
|
||||
/// - `/*! Please file an issue */`
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum AttrStyle {
|
||||
Outer,
|
||||
Inner(Token![!]),
|
||||
@ -352,10 +355,8 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
||||
//
|
||||
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
||||
// blocked on https://github.com/rust-lang/rust/issues/62833
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Meta {
|
||||
Path(Path),
|
||||
|
||||
@ -372,6 +373,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct MetaList {
|
||||
pub path: Path,
|
||||
pub paren_token: token::Paren,
|
||||
@ -384,6 +386,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct MetaNameValue {
|
||||
pub path: Path,
|
||||
pub eq_token: Token![=],
|
||||
@ -410,6 +413,7 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum NestedMeta {
|
||||
/// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
|
||||
/// would be a nested `Meta::Path`.
|
||||
@ -455,6 +459,7 @@ ast_enum_of_structs! {
|
||||
/// # "".parse().unwrap()
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub type AttributeArgs = Vec<NestedMeta>;
|
||||
|
||||
pub trait FilterAttrs<'a> {
|
||||
@ -494,11 +499,15 @@ where
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::ext::IdentExt;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
#[cfg(feature = "full")]
|
||||
use crate::private;
|
||||
|
||||
pub fn parse_inner(input: ParseStream, attrs: &mut Vec<Attribute>) -> Result<()> {
|
||||
while input.peek(Token![#]) && input.peek2(Token![!]) {
|
||||
attrs.push(input.call(parsing::single_parse_inner)?);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn single_parse_inner(input: ParseStream) -> Result<Attribute> {
|
||||
let content;
|
||||
@ -522,15 +531,6 @@ pub mod parsing {
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl private {
|
||||
pub fn attrs(outer: Vec<Attribute>, inner: Vec<Attribute>) -> Vec<Attribute> {
|
||||
let mut attrs = outer;
|
||||
attrs.extend(inner);
|
||||
attrs
|
||||
}
|
||||
}
|
||||
|
||||
// Like Path::parse_mod_style but accepts keywords in the path.
|
||||
fn parse_meta_path(input: ParseStream) -> Result<Path> {
|
||||
Ok(Path {
|
||||
@ -556,6 +556,7 @@ pub mod parsing {
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Meta {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = input.call(parse_meta_path)?;
|
||||
@ -563,6 +564,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for MetaList {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = input.call(parse_meta_path)?;
|
||||
@ -570,6 +572,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for MetaNameValue {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let path = input.call(parse_meta_path)?;
|
||||
@ -577,11 +580,14 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for NestedMeta {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Lit) && !(input.peek(LitBool) && input.peek2(Token![=])) {
|
||||
input.parse().map(NestedMeta::Lit)
|
||||
} else if input.peek(Ident::peek_any) {
|
||||
} else if input.peek(Ident::peek_any)
|
||||
|| input.peek(Token![::]) && input.peek3(Ident::peek_any)
|
||||
{
|
||||
input.parse().map(NestedMeta::Meta)
|
||||
} else {
|
||||
Err(input.error("expected identifier or literal"))
|
||||
@ -623,6 +629,7 @@ mod printing {
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Attribute {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pound_token.to_tokens(tokens);
|
||||
@ -636,15 +643,17 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for MetaList {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.nested.to_tokens(tokens);
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for MetaNameValue {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
|
2
third_party/rust/syn/src/await.rs
vendored
2
third_party/rust/syn/src/await.rs
vendored
@ -1,2 +1,2 @@
|
||||
// See include!("await.rs") in token.rs.
|
||||
export_token_macro![(await)];
|
||||
export_token_macro! {[await]}
|
||||
|
4
third_party/rust/syn/src/buffer.rs
vendored
4
third_party/rust/syn/src/buffer.rs
vendored
@ -12,13 +12,11 @@
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
use crate::proc_macro as pm;
|
||||
use crate::Lifetime;
|
||||
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
|
||||
use std::marker::PhantomData;
|
||||
use std::ptr;
|
||||
|
||||
use crate::Lifetime;
|
||||
|
||||
/// Internal type which is used instead of `TokenTree` to represent a token tree
|
||||
/// within a `TokenBuffer`.
|
||||
enum Entry {
|
||||
|
51
third_party/rust/syn/src/custom_keyword.rs
vendored
51
third_party/rust/syn/src/custom_keyword.rs
vendored
@ -26,8 +26,8 @@
|
||||
///
|
||||
/// - Field access to its span — `let sp = whatever_token.span`
|
||||
///
|
||||
/// [Peeking]: parse::ParseBuffer::peek
|
||||
/// [Parsing]: parse::ParseBuffer::parse
|
||||
/// [Peeking]: crate::parse::ParseBuffer::peek
|
||||
/// [Parsing]: crate::parse::ParseBuffer::parse
|
||||
/// [Printing]: quote::ToTokens
|
||||
/// [`Span`]: proc_macro2::Span
|
||||
///
|
||||
@ -91,23 +91,23 @@ macro_rules! custom_keyword {
|
||||
($ident:ident) => {
|
||||
#[allow(non_camel_case_types)]
|
||||
pub struct $ident {
|
||||
pub span: $crate::export::Span,
|
||||
pub span: $crate::__private::Span,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(dead_code, non_snake_case)]
|
||||
pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
|
||||
pub fn $ident<__S: $crate::__private::IntoSpans<[$crate::__private::Span; 1]>>(
|
||||
span: __S,
|
||||
) -> $ident {
|
||||
$ident {
|
||||
span: $crate::export::IntoSpans::into_spans(span)[0],
|
||||
span: $crate::__private::IntoSpans::into_spans(span)[0],
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::export::Default for $ident {
|
||||
impl $crate::__private::Default for $ident {
|
||||
fn default() -> Self {
|
||||
$ident {
|
||||
span: $crate::export::Span::call_site(),
|
||||
span: $crate::__private::Span::call_site(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -127,7 +127,7 @@ macro_rules! impl_parse_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
// For peek.
|
||||
impl $crate::token::CustomToken for $ident {
|
||||
fn peek(cursor: $crate::buffer::Cursor) -> $crate::export::bool {
|
||||
fn peek(cursor: $crate::buffer::Cursor) -> $crate::__private::bool {
|
||||
if let Some((ident, _rest)) = cursor.ident() {
|
||||
ident == stringify!($ident)
|
||||
} else {
|
||||
@ -135,7 +135,7 @@ macro_rules! impl_parse_for_custom_keyword {
|
||||
}
|
||||
}
|
||||
|
||||
fn display() -> &'static $crate::export::str {
|
||||
fn display() -> &'static $crate::__private::str {
|
||||
concat!("`", stringify!($ident), "`")
|
||||
}
|
||||
}
|
||||
@ -143,12 +143,12 @@ macro_rules! impl_parse_for_custom_keyword {
|
||||
impl $crate::parse::Parse for $ident {
|
||||
fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
|
||||
input.step(|cursor| {
|
||||
if let $crate::export::Some((ident, rest)) = cursor.ident() {
|
||||
if let $crate::__private::Some((ident, rest)) = cursor.ident() {
|
||||
if ident == stringify!($ident) {
|
||||
return $crate::export::Ok(($ident { span: ident.span() }, rest));
|
||||
return $crate::__private::Ok(($ident { span: ident.span() }, rest));
|
||||
}
|
||||
}
|
||||
$crate::export::Err(cursor.error(concat!(
|
||||
$crate::__private::Err(cursor.error(concat!(
|
||||
"expected `",
|
||||
stringify!($ident),
|
||||
"`"
|
||||
@ -173,10 +173,10 @@ macro_rules! impl_parse_for_custom_keyword {
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::export::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
|
||||
impl $crate::__private::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
|
||||
let ident = $crate::Ident::new(stringify!($ident), self.span);
|
||||
$crate::export::TokenStreamExt::append(tokens, ident);
|
||||
$crate::__private::TokenStreamExt::append(tokens, ident);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -196,9 +196,10 @@ macro_rules! impl_to_tokens_for_custom_keyword {
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::export::Copy for $ident {}
|
||||
impl $crate::__private::Copy for $ident {}
|
||||
|
||||
impl $crate::export::Clone for $ident {
|
||||
#[allow(clippy::expl_impl_clone_on_copy)]
|
||||
impl $crate::__private::Clone for $ident {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
@ -220,25 +221,25 @@ macro_rules! impl_clone_for_custom_keyword {
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_keyword {
|
||||
($ident:ident) => {
|
||||
impl $crate::export::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::export::Formatter) -> $crate::export::fmt::Result {
|
||||
$crate::export::Formatter::write_str(
|
||||
impl $crate::__private::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result {
|
||||
$crate::__private::Formatter::write_str(
|
||||
f,
|
||||
concat!("Keyword [", stringify!($ident), "]"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::export::Eq for $ident {}
|
||||
impl $crate::__private::Eq for $ident {}
|
||||
|
||||
impl $crate::export::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::export::bool {
|
||||
impl $crate::__private::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::__private::bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::export::Hash for $ident {
|
||||
fn hash<__H: $crate::export::Hasher>(&self, _state: &mut __H) {}
|
||||
impl $crate::__private::Hash for $ident {
|
||||
fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
41
third_party/rust/syn/src/custom_punctuation.rs
vendored
41
third_party/rust/syn/src/custom_punctuation.rs
vendored
@ -22,8 +22,8 @@
|
||||
///
|
||||
/// - Field access to its spans — `let spans = lrarrow.spans`
|
||||
///
|
||||
/// [Peeking]: parse::ParseBuffer::peek
|
||||
/// [Parsing]: parse::ParseBuffer::parse
|
||||
/// [Peeking]: crate::parse::ParseBuffer::peek
|
||||
/// [Parsing]: crate::parse::ParseBuffer::parse
|
||||
/// [Printing]: quote::ToTokens
|
||||
/// [`Span`]: proc_macro2::Span
|
||||
///
|
||||
@ -83,18 +83,18 @@ macro_rules! custom_punctuation {
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(dead_code, non_snake_case)]
|
||||
pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
|
||||
pub fn $ident<__S: $crate::__private::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
|
||||
spans: __S,
|
||||
) -> $ident {
|
||||
let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
|
||||
$ident {
|
||||
spans: $crate::export::IntoSpans::into_spans(spans)
|
||||
spans: $crate::__private::IntoSpans::into_spans(spans)
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::export::Default for $ident {
|
||||
impl $crate::__private::Default for $ident {
|
||||
fn default() -> Self {
|
||||
$ident($crate::export::Span::call_site())
|
||||
$ident($crate::__private::Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
@ -116,7 +116,7 @@ macro_rules! impl_parse_for_custom_punctuation {
|
||||
$crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
|
||||
}
|
||||
|
||||
fn display() -> &'static $crate::export::str {
|
||||
fn display() -> &'static $crate::__private::str {
|
||||
concat!("`", $crate::stringify_punct!($($tt)+), "`")
|
||||
}
|
||||
}
|
||||
@ -145,8 +145,8 @@ macro_rules! impl_parse_for_custom_punctuation {
|
||||
#[macro_export]
|
||||
macro_rules! impl_to_tokens_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::export::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
|
||||
impl $crate::__private::ToTokens for $ident {
|
||||
fn to_tokens(&self, tokens: &mut $crate::__private::TokenStream2) {
|
||||
$crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
|
||||
}
|
||||
}
|
||||
@ -167,9 +167,10 @@ macro_rules! impl_to_tokens_for_custom_punctuation {
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::export::Copy for $ident {}
|
||||
impl $crate::__private::Copy for $ident {}
|
||||
|
||||
impl $crate::export::Clone for $ident {
|
||||
#[allow(clippy::expl_impl_clone_on_copy)]
|
||||
impl $crate::__private::Clone for $ident {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
@ -191,22 +192,22 @@ macro_rules! impl_clone_for_custom_punctuation {
|
||||
#[macro_export]
|
||||
macro_rules! impl_extra_traits_for_custom_punctuation {
|
||||
($ident:ident, $($tt:tt)+) => {
|
||||
impl $crate::export::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::export::Formatter) -> $crate::export::fmt::Result {
|
||||
$crate::export::Formatter::write_str(f, stringify!($ident))
|
||||
impl $crate::__private::Debug for $ident {
|
||||
fn fmt(&self, f: &mut $crate::__private::Formatter) -> $crate::__private::fmt::Result {
|
||||
$crate::__private::Formatter::write_str(f, stringify!($ident))
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::export::Eq for $ident {}
|
||||
impl $crate::__private::Eq for $ident {}
|
||||
|
||||
impl $crate::export::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::export::bool {
|
||||
impl $crate::__private::PartialEq for $ident {
|
||||
fn eq(&self, _other: &Self) -> $crate::__private::bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl $crate::export::Hash for $ident {
|
||||
fn hash<__H: $crate::export::Hasher>(&self, _state: &mut __H) {}
|
||||
impl $crate::__private::Hash for $ident {
|
||||
fn hash<__H: $crate::__private::Hasher>(&self, _state: &mut __H) {}
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -224,7 +225,7 @@ macro_rules! impl_extra_traits_for_custom_punctuation {
|
||||
#[macro_export]
|
||||
macro_rules! custom_punctuation_repr {
|
||||
($($tt:tt)+) => {
|
||||
[$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
|
||||
[$crate::__private::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
|
||||
};
|
||||
}
|
||||
|
||||
|
90
third_party/rust/syn/src/data.rs
vendored
90
third_party/rust/syn/src/data.rs
vendored
@ -6,6 +6,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Variant {
|
||||
/// Attributes tagged on the variant.
|
||||
pub attrs: Vec<Attribute>,
|
||||
@ -31,10 +32,8 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
||||
//
|
||||
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
||||
// blocked on https://github.com/rust-lang/rust/issues/62833
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Fields {
|
||||
/// Named fields of a struct or struct variant such as `Point { x: f64,
|
||||
/// y: f64 }`.
|
||||
@ -54,6 +53,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct FieldsNamed {
|
||||
pub brace_token: token::Brace,
|
||||
pub named: Punctuated<Field, Token![,]>,
|
||||
@ -65,6 +65,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct FieldsUnnamed {
|
||||
pub paren_token: token::Paren,
|
||||
pub unnamed: Punctuated<Field, Token![,]>,
|
||||
@ -149,6 +150,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Field {
|
||||
/// Attributes tagged on the field.
|
||||
pub attrs: Vec<Attribute>,
|
||||
@ -179,10 +181,8 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
||||
//
|
||||
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
||||
// blocked on https://github.com/rust-lang/rust/issues/62833
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Visibility {
|
||||
/// A public visibility level: `pub`.
|
||||
Public(VisPublic),
|
||||
@ -204,6 +204,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct VisPublic {
|
||||
pub pub_token: Token![pub],
|
||||
}
|
||||
@ -214,6 +215,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct VisCrate {
|
||||
pub crate_token: Token![crate],
|
||||
}
|
||||
@ -225,6 +227,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct VisRestricted {
|
||||
pub pub_token: Token![pub],
|
||||
pub paren_token: token::Paren,
|
||||
@ -236,40 +239,41 @@ ast_struct! {
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::ext::IdentExt;
|
||||
use crate::parse::discouraged::Speculative;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Variant {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let mut attrs = input.call(Attribute::parse_outer)?;
|
||||
let _visibility: Visibility = input.parse()?;
|
||||
let ident: Ident = input.parse()?;
|
||||
let fields = if input.peek(token::Brace) {
|
||||
let fields = parse_braced(input, &mut attrs)?;
|
||||
Fields::Named(fields)
|
||||
} else if input.peek(token::Paren) {
|
||||
Fields::Unnamed(input.parse()?)
|
||||
} else {
|
||||
Fields::Unit
|
||||
};
|
||||
let discriminant = if input.peek(Token![=]) {
|
||||
let eq_token: Token![=] = input.parse()?;
|
||||
let discriminant: Expr = input.parse()?;
|
||||
Some((eq_token, discriminant))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Variant {
|
||||
attrs,
|
||||
ident: input.parse()?,
|
||||
fields: {
|
||||
if input.peek(token::Brace) {
|
||||
Fields::Named(input.parse()?)
|
||||
} else if input.peek(token::Paren) {
|
||||
Fields::Unnamed(input.parse()?)
|
||||
} else {
|
||||
Fields::Unit
|
||||
}
|
||||
},
|
||||
discriminant: {
|
||||
if input.peek(Token![=]) {
|
||||
let eq_token: Token![=] = input.parse()?;
|
||||
let discriminant: Expr = input.parse()?;
|
||||
Some((eq_token, discriminant))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
ident,
|
||||
fields,
|
||||
discriminant,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for FieldsNamed {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
@ -280,6 +284,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for FieldsUnnamed {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
@ -290,8 +295,20 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn parse_braced(
|
||||
input: ParseStream,
|
||||
attrs: &mut Vec<Attribute>,
|
||||
) -> Result<FieldsNamed> {
|
||||
let content;
|
||||
let brace_token = braced!(content in input);
|
||||
attr::parsing::parse_inner(&content, attrs)?;
|
||||
let named = content.parse_terminated(Field::parse_named)?;
|
||||
Ok(FieldsNamed { brace_token, named })
|
||||
}
|
||||
|
||||
impl Field {
|
||||
/// Parses a named (braced struct) field.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_named(input: ParseStream) -> Result<Self> {
|
||||
Ok(Field {
|
||||
attrs: input.call(Attribute::parse_outer)?,
|
||||
@ -303,6 +320,7 @@ pub mod parsing {
|
||||
}
|
||||
|
||||
/// Parses an unnamed (tuple struct) field.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_unnamed(input: ParseStream) -> Result<Self> {
|
||||
Ok(Field {
|
||||
attrs: input.call(Attribute::parse_outer)?,
|
||||
@ -314,6 +332,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Visibility {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
// Recognize an empty None-delimited group, as produced by a $:vis
|
||||
@ -405,12 +424,11 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
use crate::print::TokensOrDefault;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Variant {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
@ -423,6 +441,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldsNamed {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
@ -431,6 +450,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldsUnnamed {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
@ -439,6 +459,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Field {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
@ -451,18 +472,21 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for VisPublic {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pub_token.to_tokens(tokens)
|
||||
self.pub_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for VisCrate {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.crate_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for VisRestricted {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pub_token.to_tokens(tokens);
|
||||
|
39
third_party/rust/syn/src/derive.rs
vendored
39
third_party/rust/syn/src/derive.rs
vendored
@ -5,6 +5,7 @@ ast_struct! {
|
||||
/// Data structure sent to a `proc_macro_derive` macro.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DeriveInput {
|
||||
/// Attributes tagged on the whole struct or enum.
|
||||
pub attrs: Vec<Attribute>,
|
||||
@ -32,10 +33,8 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
||||
//
|
||||
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
||||
// blocked on https://github.com/rust-lang/rust/issues/62833
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub enum Data {
|
||||
/// A struct input to a `proc_macro_derive` macro.
|
||||
Struct(DataStruct),
|
||||
@ -55,6 +54,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DataStruct {
|
||||
pub struct_token: Token![struct],
|
||||
pub fields: Fields,
|
||||
@ -67,6 +67,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DataEnum {
|
||||
pub enum_token: Token![enum],
|
||||
pub brace_token: token::Brace,
|
||||
@ -79,6 +80,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "derive")))]
|
||||
pub struct DataUnion {
|
||||
pub union_token: Token![union],
|
||||
pub fields: FieldsNamed,
|
||||
@ -88,12 +90,12 @@ ast_struct! {
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for DeriveInput {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let mut attrs = input.call(Attribute::parse_outer)?;
|
||||
let vis = input.parse::<Visibility>()?;
|
||||
|
||||
let lookahead = input.lookahead1();
|
||||
@ -101,7 +103,7 @@ pub mod parsing {
|
||||
let struct_token = input.parse::<Token![struct]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, fields, semi) = data_struct(input)?;
|
||||
let (where_clause, fields, semi) = data_struct(input, &mut attrs)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
@ -120,7 +122,7 @@ pub mod parsing {
|
||||
let enum_token = input.parse::<Token![enum]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, brace, variants) = data_enum(input)?;
|
||||
let (where_clause, brace, variants) = data_enum(input, &mut attrs)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
@ -139,7 +141,7 @@ pub mod parsing {
|
||||
let union_token = input.parse::<Token![union]>()?;
|
||||
let ident = input.parse::<Ident>()?;
|
||||
let generics = input.parse::<Generics>()?;
|
||||
let (where_clause, fields) = data_union(input)?;
|
||||
let (where_clause, fields) = data_union(input, &mut attrs)?;
|
||||
Ok(DeriveInput {
|
||||
attrs,
|
||||
vis,
|
||||
@ -161,6 +163,7 @@ pub mod parsing {
|
||||
|
||||
pub fn data_struct(
|
||||
input: ParseStream,
|
||||
attrs: &mut Vec<Attribute>,
|
||||
) -> Result<(Option<WhereClause>, Fields, Option<Token![;]>)> {
|
||||
let mut lookahead = input.lookahead1();
|
||||
let mut where_clause = None;
|
||||
@ -185,7 +188,7 @@ pub mod parsing {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
} else if lookahead.peek(token::Brace) {
|
||||
let fields = input.parse()?;
|
||||
let fields = data::parsing::parse_braced(input, attrs)?;
|
||||
Ok((where_clause, Fields::Named(fields), None))
|
||||
} else if lookahead.peek(Token![;]) {
|
||||
let semi = input.parse()?;
|
||||
@ -197,6 +200,7 @@ pub mod parsing {
|
||||
|
||||
pub fn data_enum(
|
||||
input: ParseStream,
|
||||
attrs: &mut Vec<Attribute>,
|
||||
) -> Result<(
|
||||
Option<WhereClause>,
|
||||
token::Brace,
|
||||
@ -206,14 +210,18 @@ pub mod parsing {
|
||||
|
||||
let content;
|
||||
let brace = braced!(content in input);
|
||||
attr::parsing::parse_inner(&content, attrs)?;
|
||||
let variants = content.parse_terminated(Variant::parse)?;
|
||||
|
||||
Ok((where_clause, brace, variants))
|
||||
}
|
||||
|
||||
pub fn data_union(input: ParseStream) -> Result<(Option<WhereClause>, FieldsNamed)> {
|
||||
pub fn data_union(
|
||||
input: ParseStream,
|
||||
attrs: &mut Vec<Attribute>,
|
||||
) -> Result<(Option<WhereClause>, FieldsNamed)> {
|
||||
let where_clause = input.parse()?;
|
||||
let fields = input.parse()?;
|
||||
let fields = data::parsing::parse_braced(input, attrs)?;
|
||||
Ok((where_clause, fields))
|
||||
}
|
||||
}
|
||||
@ -221,13 +229,12 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::print::TokensOrDefault;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for DeriveInput {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
for attr in self.attrs.outer() {
|
||||
|
2
third_party/rust/syn/src/discouraged.rs
vendored
2
third_party/rust/syn/src/discouraged.rs
vendored
@ -189,6 +189,6 @@ impl<'a> Speculative for ParseBuffer<'a> {
|
||||
|
||||
// See comment on `cell` in the struct definition.
|
||||
self.cell
|
||||
.set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
|
||||
.set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) });
|
||||
}
|
||||
}
|
||||
|
66
third_party/rust/syn/src/error.rs
vendored
66
third_party/rust/syn/src/error.rs
vendored
@ -1,17 +1,15 @@
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::iter::FromIterator;
|
||||
use std::slice;
|
||||
use std::vec;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::buffer::Cursor;
|
||||
use crate::thread::ThreadBound;
|
||||
use proc_macro2::{
|
||||
Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
|
||||
};
|
||||
#[cfg(feature = "printing")]
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::buffer::Cursor;
|
||||
use crate::thread::ThreadBound;
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::iter::FromIterator;
|
||||
use std::slice;
|
||||
use std::vec;
|
||||
|
||||
/// The result of a Syn parser.
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
@ -25,7 +23,7 @@ pub type Result<T> = std::result::Result<T, Error>;
|
||||
/// [`compile_error!`] in the generated code. This produces a better diagnostic
|
||||
/// message than simply panicking the macro.
|
||||
///
|
||||
/// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
///
|
||||
/// When parsing macro input, the [`parse_macro_input!`] macro handles the
|
||||
/// conversion to `compile_error!` automatically.
|
||||
@ -191,7 +189,7 @@ impl Error {
|
||||
/// The [`parse_macro_input!`] macro provides a convenient way to invoke
|
||||
/// this method correctly in a procedural macro.
|
||||
///
|
||||
/// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
pub fn to_compile_error(&self) -> TokenStream {
|
||||
self.messages
|
||||
.iter()
|
||||
@ -199,10 +197,46 @@ impl Error {
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Render the error as an invocation of [`compile_error!`].
|
||||
///
|
||||
/// [`compile_error!`]: std::compile_error!
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// use proc_macro::TokenStream;
|
||||
/// use syn::{parse_macro_input, DeriveInput, Error};
|
||||
///
|
||||
/// # const _: &str = stringify! {
|
||||
/// #[proc_macro_derive(MyTrait)]
|
||||
/// # };
|
||||
/// pub fn derive_my_trait(input: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(input as DeriveInput);
|
||||
/// my_trait::expand(input)
|
||||
/// .unwrap_or_else(Error::into_compile_error)
|
||||
/// .into()
|
||||
/// }
|
||||
///
|
||||
/// mod my_trait {
|
||||
/// use proc_macro2::TokenStream;
|
||||
/// use syn::{DeriveInput, Result};
|
||||
///
|
||||
/// pub(crate) fn expand(input: DeriveInput) -> Result<TokenStream> {
|
||||
/// /* ... */
|
||||
/// # unimplemented!()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub fn into_compile_error(self) -> TokenStream {
|
||||
self.to_compile_error()
|
||||
}
|
||||
|
||||
/// Add another error message to self such that when `to_compile_error()` is
|
||||
/// called, both errors will be emitted together.
|
||||
pub fn combine(&mut self, another: Error) {
|
||||
self.messages.extend(another.messages)
|
||||
self.messages.extend(another.messages);
|
||||
}
|
||||
}
|
||||
|
||||
@ -311,15 +345,11 @@ impl Clone for ErrorMessage {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {
|
||||
fn description(&self) -> &str {
|
||||
"parse error"
|
||||
}
|
||||
}
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl From<LexError> for Error {
|
||||
fn from(err: LexError) -> Self {
|
||||
Error::new(Span::call_site(), format!("{:?}", err))
|
||||
Error::new(err.span(), "lex error")
|
||||
}
|
||||
}
|
||||
|
||||
|
2
third_party/rust/syn/src/export.rs
vendored
2
third_party/rust/syn/src/export.rs
vendored
@ -33,3 +33,5 @@ mod help {
|
||||
pub type Bool = bool;
|
||||
pub type Str = str;
|
||||
}
|
||||
|
||||
pub struct private(pub(crate) ());
|
||||
|
623
third_party/rust/syn/src/expr.rs
vendored
623
third_party/rust/syn/src/expr.rs
vendored
File diff suppressed because it is too large
Load Diff
6
third_party/rust/syn/src/ext.rs
vendored
6
third_party/rust/syn/src/ext.rs
vendored
@ -2,14 +2,12 @@
|
||||
//!
|
||||
//! *This module is available only if Syn is built with the `"parsing"` feature.*
|
||||
|
||||
use proc_macro2::Ident;
|
||||
|
||||
use crate::parse::{ParseStream, Result};
|
||||
|
||||
use crate::buffer::Cursor;
|
||||
use crate::parse::Peek;
|
||||
use crate::parse::{ParseStream, Result};
|
||||
use crate::sealed::lookahead;
|
||||
use crate::token::CustomToken;
|
||||
use proc_macro2::Ident;
|
||||
|
||||
/// Additional methods for `Ident` not provided by proc-macro2 or libproc_macro.
|
||||
///
|
||||
|
38
third_party/rust/syn/src/file.rs
vendored
38
third_party/rust/syn/src/file.rs
vendored
@ -51,24 +51,33 @@ ast_struct! {
|
||||
/// shebang: None,
|
||||
/// attrs: [],
|
||||
/// items: [
|
||||
/// ExternCrate(
|
||||
/// ItemExternCrate {
|
||||
/// Use(
|
||||
/// ItemUse {
|
||||
/// attrs: [],
|
||||
/// vis: Inherited,
|
||||
/// extern_token: Extern,
|
||||
/// crate_token: Crate,
|
||||
/// ident: Ident {
|
||||
/// term: Term(
|
||||
/// "syn"
|
||||
/// ),
|
||||
/// span: Span
|
||||
/// },
|
||||
/// rename: None,
|
||||
/// semi_token: Semi
|
||||
/// }
|
||||
/// use_token: Use,
|
||||
/// leading_colon: None,
|
||||
/// tree: Path(
|
||||
/// UsePath {
|
||||
/// ident: Ident(
|
||||
/// std,
|
||||
/// ),
|
||||
/// colon2_token: Colon2,
|
||||
/// tree: Name(
|
||||
/// UseName {
|
||||
/// ident: Ident(
|
||||
/// env,
|
||||
/// ),
|
||||
/// },
|
||||
/// ),
|
||||
/// },
|
||||
/// ),
|
||||
/// semi_token: Semi,
|
||||
/// },
|
||||
/// ),
|
||||
/// ...
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct File {
|
||||
pub shebang: Option<String>,
|
||||
pub attrs: Vec<Attribute>,
|
||||
@ -79,9 +88,9 @@ ast_struct! {
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for File {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(File {
|
||||
@ -106,6 +115,7 @@ mod printing {
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for File {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.inner());
|
||||
|
183
third_party/rust/syn/src/gen/clone.rs
vendored
183
third_party/rust/syn/src/gen/clone.rs
vendored
File diff suppressed because it is too large
Load Diff
178
third_party/rust/syn/src/gen/debug.rs
vendored
178
third_party/rust/syn/src/gen/debug.rs
vendored
File diff suppressed because it is too large
Load Diff
358
third_party/rust/syn/src/gen/eq.rs
vendored
358
third_party/rust/syn/src/gen/eq.rs
vendored
File diff suppressed because it is too large
Load Diff
176
third_party/rust/syn/src/gen/hash.rs
vendored
176
third_party/rust/syn/src/gen/hash.rs
vendored
File diff suppressed because it is too large
Load Diff
756
third_party/rust/syn/src/gen/visit.rs
vendored
756
third_party/rust/syn/src/gen/visit.rs
vendored
File diff suppressed because it is too large
Load Diff
756
third_party/rust/syn/src/gen/visit_mut.rs
vendored
756
third_party/rust/syn/src/gen/visit_mut.rs
vendored
File diff suppressed because it is too large
Load Diff
123
third_party/rust/syn/src/generics.rs
vendored
123
third_party/rust/syn/src/generics.rs
vendored
@ -11,6 +11,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Generics {
|
||||
pub lt_token: Option<Token![<]>,
|
||||
pub params: Punctuated<GenericParam, Token![,]>,
|
||||
@ -30,7 +31,8 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum GenericParam {
|
||||
/// A generic type parameter: `T: Into<String>`.
|
||||
Type(TypeParam),
|
||||
@ -48,6 +50,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeParam {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub ident: Ident,
|
||||
@ -63,6 +66,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct LifetimeDef {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub lifetime: Lifetime,
|
||||
@ -76,6 +80,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct ConstParam {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub const_token: Token![const],
|
||||
@ -167,17 +172,10 @@ impl Generics {
|
||||
|
||||
/// Initializes an empty `where`-clause if there is not one present already.
|
||||
pub fn make_where_clause(&mut self) -> &mut WhereClause {
|
||||
// This is Option::get_or_insert_with in Rust 1.20.
|
||||
if self.where_clause.is_none() {
|
||||
self.where_clause = Some(WhereClause {
|
||||
where_token: <Token![where]>::default(),
|
||||
predicates: Punctuated::new(),
|
||||
});
|
||||
}
|
||||
match &mut self.where_clause {
|
||||
Some(where_clause) => where_clause,
|
||||
None => unreachable!(),
|
||||
}
|
||||
self.where_clause.get_or_insert_with(|| WhereClause {
|
||||
where_token: <Token![where]>::default(),
|
||||
predicates: Punctuated::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -294,6 +292,10 @@ impl<'a> Iterator for ConstParamsMut<'a> {
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature and the `"printing"` feature.*
|
||||
#[cfg(feature = "printing")]
|
||||
#[cfg_attr(
|
||||
doc_cfg,
|
||||
doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing")))
|
||||
)]
|
||||
pub struct ImplGenerics<'a>(&'a Generics);
|
||||
|
||||
/// Returned by `Generics::split_for_impl`.
|
||||
@ -301,6 +303,10 @@ pub struct ImplGenerics<'a>(&'a Generics);
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature and the `"printing"` feature.*
|
||||
#[cfg(feature = "printing")]
|
||||
#[cfg_attr(
|
||||
doc_cfg,
|
||||
doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing")))
|
||||
)]
|
||||
pub struct TypeGenerics<'a>(&'a Generics);
|
||||
|
||||
/// Returned by `TypeGenerics::as_turbofish`.
|
||||
@ -308,6 +314,10 @@ pub struct TypeGenerics<'a>(&'a Generics);
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature and the `"printing"` feature.*
|
||||
#[cfg(feature = "printing")]
|
||||
#[cfg_attr(
|
||||
doc_cfg,
|
||||
doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing")))
|
||||
)]
|
||||
pub struct Turbofish<'a>(&'a Generics);
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
@ -333,6 +343,10 @@ impl Generics {
|
||||
///
|
||||
/// *This method is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature and the `"printing"` feature.*
|
||||
#[cfg_attr(
|
||||
doc_cfg,
|
||||
doc(cfg(all(any(feature = "full", feature = "derive"), feature = "printing")))
|
||||
)]
|
||||
pub fn split_for_impl(&self) -> (ImplGenerics, TypeGenerics, Option<&WhereClause>) {
|
||||
(
|
||||
ImplGenerics(self),
|
||||
@ -346,6 +360,7 @@ impl Generics {
|
||||
macro_rules! generics_wrapper_impls {
|
||||
($ty:ident) => {
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl<'a> Clone for $ty<'a> {
|
||||
fn clone(&self) -> Self {
|
||||
$ty(self.0)
|
||||
@ -353,6 +368,7 @@ macro_rules! generics_wrapper_impls {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl<'a> Debug for $ty<'a> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter
|
||||
@ -363,9 +379,11 @@ macro_rules! generics_wrapper_impls {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl<'a> Eq for $ty<'a> {}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl<'a> PartialEq for $ty<'a> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0 == other.0
|
||||
@ -373,6 +391,7 @@ macro_rules! generics_wrapper_impls {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl<'a> Hash for $ty<'a> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.0.hash(state);
|
||||
@ -404,6 +423,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct BoundLifetimes {
|
||||
pub for_token: Token![for],
|
||||
pub lt_token: Token![<],
|
||||
@ -452,6 +472,7 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum TypeParamBound {
|
||||
Trait(TraitBound),
|
||||
Lifetime(Lifetime),
|
||||
@ -463,6 +484,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TraitBound {
|
||||
pub paren_token: Option<token::Paren>,
|
||||
pub modifier: TraitBoundModifier,
|
||||
@ -479,6 +501,7 @@ ast_enum! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum TraitBoundModifier {
|
||||
None,
|
||||
Maybe(Token![?]),
|
||||
@ -491,6 +514,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct WhereClause {
|
||||
pub where_token: Token![where],
|
||||
pub predicates: Punctuated<WherePredicate, Token![,]>,
|
||||
@ -507,7 +531,8 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum WherePredicate {
|
||||
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
|
||||
Type(PredicateType),
|
||||
@ -525,6 +550,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct PredicateType {
|
||||
/// Any lifetimes from a `for` binding
|
||||
pub lifetimes: Option<BoundLifetimes>,
|
||||
@ -541,6 +567,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct PredicateLifetime {
|
||||
pub lifetime: Lifetime,
|
||||
pub colon_token: Token![:],
|
||||
@ -553,6 +580,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct PredicateEq {
|
||||
pub lhs_ty: Type,
|
||||
pub eq_token: Token![=],
|
||||
@ -563,9 +591,10 @@ ast_struct! {
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::ext::IdentExt;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Generics {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if !input.peek(Token![<]) {
|
||||
@ -575,7 +604,6 @@ pub mod parsing {
|
||||
let lt_token: Token![<] = input.parse()?;
|
||||
|
||||
let mut params = Punctuated::new();
|
||||
let mut allow_lifetime_param = true;
|
||||
loop {
|
||||
if input.peek(Token![>]) {
|
||||
break;
|
||||
@ -583,23 +611,30 @@ pub mod parsing {
|
||||
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
let lookahead = input.lookahead1();
|
||||
if allow_lifetime_param && lookahead.peek(Lifetime) {
|
||||
if lookahead.peek(Lifetime) {
|
||||
params.push_value(GenericParam::Lifetime(LifetimeDef {
|
||||
attrs,
|
||||
..input.parse()?
|
||||
}));
|
||||
} else if lookahead.peek(Ident) {
|
||||
allow_lifetime_param = false;
|
||||
params.push_value(GenericParam::Type(TypeParam {
|
||||
attrs,
|
||||
..input.parse()?
|
||||
}));
|
||||
} else if lookahead.peek(Token![const]) {
|
||||
allow_lifetime_param = false;
|
||||
params.push_value(GenericParam::Const(ConstParam {
|
||||
attrs,
|
||||
..input.parse()?
|
||||
}));
|
||||
} else if input.peek(Token![_]) {
|
||||
params.push_value(GenericParam::Type(TypeParam {
|
||||
attrs,
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
colon_token: None,
|
||||
bounds: Punctuated::new(),
|
||||
eq_token: None,
|
||||
default: None,
|
||||
}));
|
||||
} else {
|
||||
return Err(lookahead.error());
|
||||
}
|
||||
@ -622,6 +657,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for GenericParam {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
@ -648,6 +684,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for LifetimeDef {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let has_colon;
|
||||
@ -685,6 +722,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for BoundLifetimes {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(BoundLifetimes {
|
||||
@ -706,6 +744,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Option<BoundLifetimes> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Token![for]) {
|
||||
@ -716,6 +755,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeParam {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let attrs = input.call(Attribute::parse_outer)?;
|
||||
@ -769,6 +809,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeParamBound {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Lifetime) {
|
||||
@ -787,6 +828,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TraitBound {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let modifier: TraitBoundModifier = input.parse()?;
|
||||
@ -807,6 +849,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TraitBoundModifier {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Token![?]) {
|
||||
@ -817,6 +860,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for ConstParam {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let mut default = None;
|
||||
@ -829,7 +873,7 @@ pub mod parsing {
|
||||
eq_token: {
|
||||
if input.peek(Token![=]) {
|
||||
let eq_token = input.parse()?;
|
||||
default = Some(input.parse::<Expr>()?);
|
||||
default = Some(path::parsing::const_argument(input)?);
|
||||
Some(eq_token)
|
||||
} else {
|
||||
None
|
||||
@ -840,6 +884,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for WhereClause {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(WhereClause {
|
||||
@ -870,6 +915,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Option<WhereClause> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Token![where]) {
|
||||
@ -880,6 +926,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for WherePredicate {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Lifetime) && input.peek2(Token![:]) {
|
||||
@ -945,15 +992,14 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
#[cfg(feature = "full")]
|
||||
use proc_macro2::TokenTree;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::print::TokensOrDefault;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Generics {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if self.params.is_empty() {
|
||||
@ -1103,6 +1149,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for BoundLifetimes {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.for_token.to_tokens(tokens);
|
||||
@ -1112,6 +1159,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for LifetimeDef {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -1123,6 +1171,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeParam {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -1136,14 +1185,21 @@ mod printing {
|
||||
{
|
||||
if self.eq_token.is_none() {
|
||||
if let Type::Verbatim(default) = default {
|
||||
let mut iter = default.clone().into_iter();
|
||||
match (iter.next(), iter.next()) {
|
||||
(Some(TokenTree::Punct(ref q)), Some(TokenTree::Ident(ref c)))
|
||||
if q.as_char() == '?' && c == "const" =>
|
||||
{
|
||||
return default.to_tokens(tokens);
|
||||
let mut iter = default.clone().into_iter().peekable();
|
||||
while let Some(token) = iter.next() {
|
||||
if let TokenTree::Punct(q) = token {
|
||||
if q.as_char() == '?' {
|
||||
if let Some(TokenTree::Ident(c)) = iter.peek() {
|
||||
if c == "const" {
|
||||
if self.bounds.is_empty() {
|
||||
TokensOrDefault(&self.colon_token)
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
return default.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1154,6 +1210,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TraitBound {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let to_tokens = |tokens: &mut TokenStream| {
|
||||
@ -1168,6 +1225,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TraitBoundModifier {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
@ -1177,6 +1235,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for ConstParam {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -1191,6 +1250,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for WhereClause {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if !self.predicates.is_empty() {
|
||||
@ -1200,6 +1260,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PredicateType {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.lifetimes.to_tokens(tokens);
|
||||
@ -1209,6 +1270,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PredicateLifetime {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.lifetime.to_tokens(tokens);
|
||||
@ -1217,6 +1279,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PredicateEq {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.lhs_ty.to_tokens(tokens);
|
||||
|
24
third_party/rust/syn/src/group.rs
vendored
24
third_party/rust/syn/src/group.rs
vendored
@ -1,8 +1,7 @@
|
||||
use proc_macro2::{Delimiter, Span};
|
||||
|
||||
use crate::error::Result;
|
||||
use crate::parse::ParseBuffer;
|
||||
use crate::token;
|
||||
use proc_macro2::{Delimiter, Span};
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
@ -134,15 +133,16 @@ fn parse_delimited<'a>(
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! parenthesized {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::group::parse_parens(&$cursor) {
|
||||
$crate::export::Ok(parens) => {
|
||||
$crate::__private::Ok(parens) => {
|
||||
$content = parens.content;
|
||||
parens.token
|
||||
}
|
||||
$crate::export::Err(error) => {
|
||||
return $crate::export::Err(error);
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -211,15 +211,16 @@ macro_rules! parenthesized {
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! braced {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::group::parse_braces(&$cursor) {
|
||||
$crate::export::Ok(braces) => {
|
||||
$crate::__private::Ok(braces) => {
|
||||
$content = braces.content;
|
||||
braces.token
|
||||
}
|
||||
$crate::export::Err(error) => {
|
||||
return $crate::export::Err(error);
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -265,15 +266,16 @@ macro_rules! braced {
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
macro_rules! bracketed {
|
||||
($content:ident in $cursor:expr) => {
|
||||
match $crate::group::parse_brackets(&$cursor) {
|
||||
$crate::export::Ok(brackets) => {
|
||||
$crate::__private::Ok(brackets) => {
|
||||
$content = brackets.content;
|
||||
brackets.token
|
||||
}
|
||||
$crate::export::Err(error) => {
|
||||
return $crate::export::Err(error);
|
||||
$crate::__private::Err(error) => {
|
||||
return $crate::__private::Err(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
1
third_party/rust/syn/src/ident.rs
vendored
1
third_party/rust/syn/src/ident.rs
vendored
@ -36,6 +36,7 @@ fn accept_as_ident(ident: &Ident) -> bool {
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Ident {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
|
553
third_party/rust/syn/src/item.rs
vendored
553
third_party/rust/syn/src/item.rs
vendored
File diff suppressed because it is too large
Load Diff
58
third_party/rust/syn/src/lib.rs
vendored
58
third_party/rust/syn/src/lib.rs
vendored
@ -1,4 +1,4 @@
|
||||
//! [![github]](https://github.com/dtolnay/syn) [![crates-io]](https://crates.io/crates/syn) [![docs-rs]](https://docs.rs/syn)
|
||||
//! [![github]](https://github.com/dtolnay/syn) [![crates-io]](https://crates.io/crates/syn) [![docs-rs]](crate)
|
||||
//!
|
||||
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
|
||||
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
|
||||
@ -39,12 +39,12 @@
|
||||
//! procedural macros enable only what they need, and do not pay in compile
|
||||
//! time for all the rest.
|
||||
//!
|
||||
//! [`syn::File`]: struct.File.html
|
||||
//! [`syn::Item`]: enum.Item.html
|
||||
//! [`syn::Expr`]: enum.Expr.html
|
||||
//! [`syn::Type`]: enum.Type.html
|
||||
//! [`syn::DeriveInput`]: struct.DeriveInput.html
|
||||
//! [parser functions]: parse/index.html
|
||||
//! [`syn::File`]: File
|
||||
//! [`syn::Item`]: Item
|
||||
//! [`syn::Expr`]: Expr
|
||||
//! [`syn::Type`]: Type
|
||||
//! [`syn::DeriveInput`]: DeriveInput
|
||||
//! [parser functions]: mod@parse
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
@ -58,7 +58,7 @@
|
||||
//! tokens, then hand some tokens back to the compiler to compile into the
|
||||
//! user's crate.
|
||||
//!
|
||||
//! [`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
|
||||
//! [`TokenStream`]: proc_macro::TokenStream
|
||||
//!
|
||||
//! ```toml
|
||||
//! [dependencies]
|
||||
@ -250,46 +250,49 @@
|
||||
//! dynamic library libproc_macro from rustc toolchain.
|
||||
|
||||
// Syn types in rustdoc of other crates get linked to here.
|
||||
#![doc(html_root_url = "https://docs.rs/syn/1.0.40")]
|
||||
#![deny(clippy::all, clippy::pedantic)]
|
||||
#![doc(html_root_url = "https://docs.rs/syn/1.0.73")]
|
||||
#![cfg_attr(doc_cfg, feature(doc_cfg))]
|
||||
#![allow(non_camel_case_types)]
|
||||
// Ignored clippy lints.
|
||||
#![allow(
|
||||
clippy::blocks_in_if_conditions,
|
||||
clippy::cognitive_complexity,
|
||||
clippy::doc_markdown,
|
||||
clippy::eval_order_dependence,
|
||||
clippy::inherent_to_string,
|
||||
clippy::large_enum_variant,
|
||||
clippy::manual_non_exhaustive,
|
||||
clippy::match_like_matches_macro,
|
||||
clippy::manual_map, // https://github.com/rust-lang/rust-clippy/issues/6795
|
||||
clippy::match_on_vec_items,
|
||||
clippy::missing_panics_doc,
|
||||
clippy::needless_doctest_main,
|
||||
clippy::needless_pass_by_value,
|
||||
clippy::never_loop,
|
||||
clippy::suspicious_op_assign_impl,
|
||||
clippy::too_many_arguments,
|
||||
clippy::trivially_copy_pass_by_ref,
|
||||
clippy::unnecessary_unwrap
|
||||
clippy::unnecessary_unwrap,
|
||||
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/6983
|
||||
clippy::wrong_self_convention
|
||||
)]
|
||||
// Ignored clippy_pedantic lints.
|
||||
#![allow(
|
||||
clippy::cast_possible_truncation,
|
||||
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/7127
|
||||
clippy::cloned_instead_of_copied,
|
||||
clippy::default_trait_access,
|
||||
clippy::empty_enum,
|
||||
clippy::expl_impl_clone_on_copy,
|
||||
clippy::if_not_else,
|
||||
clippy::items_after_statements,
|
||||
clippy::match_same_arms,
|
||||
// clippy bug: https://github.com/rust-lang/rust-clippy/issues/6984
|
||||
clippy::match_wildcard_for_single_variants,
|
||||
clippy::missing_errors_doc,
|
||||
clippy::module_name_repetitions,
|
||||
clippy::must_use_candidate,
|
||||
clippy::option_if_let_else,
|
||||
clippy::redundant_else,
|
||||
clippy::shadow_unrelated,
|
||||
clippy::similar_names,
|
||||
clippy::single_match_else,
|
||||
clippy::too_many_lines,
|
||||
clippy::unseparated_literal_suffix,
|
||||
clippy::use_self,
|
||||
clippy::used_underscore_binding,
|
||||
clippy::wildcard_imports
|
||||
)]
|
||||
@ -434,8 +437,10 @@ pub use crate::path::{
|
||||
};
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub mod buffer;
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub mod ext;
|
||||
pub mod punctuated;
|
||||
#[cfg(all(any(feature = "full", feature = "derive"), feature = "extra-traits"))]
|
||||
@ -456,6 +461,7 @@ pub mod parse_quote;
|
||||
pub mod parse_macro_input;
|
||||
|
||||
#[cfg(all(feature = "parsing", feature = "printing"))]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
|
||||
pub mod spanned;
|
||||
|
||||
#[cfg(all(feature = "parsing", feature = "full"))]
|
||||
@ -579,6 +585,7 @@ mod gen {
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "visit")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "visit")))]
|
||||
#[rustfmt::skip]
|
||||
pub mod visit;
|
||||
|
||||
@ -675,6 +682,7 @@ mod gen {
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "visit-mut")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "visit-mut")))]
|
||||
#[rustfmt::skip]
|
||||
pub mod visit_mut;
|
||||
|
||||
@ -761,6 +769,7 @@ mod gen {
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "fold")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "fold")))]
|
||||
#[rustfmt::skip]
|
||||
pub mod fold;
|
||||
|
||||
@ -788,7 +797,8 @@ pub use crate::gen::*;
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
pub mod export;
|
||||
#[path = "export.rs"]
|
||||
pub mod __private;
|
||||
|
||||
mod custom_keyword;
|
||||
mod custom_punctuation;
|
||||
@ -800,6 +810,7 @@ mod thread;
|
||||
mod lookahead;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub mod parse;
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
@ -811,10 +822,9 @@ mod verbatim;
|
||||
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
|
||||
mod print;
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
use crate::__private::private;
|
||||
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
struct private;
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// https://github.com/rust-lang/rust/issues/62830
|
||||
#[cfg(feature = "parsing")]
|
||||
@ -873,6 +883,7 @@ pub use crate::error::{Error, Result};
|
||||
feature = "parsing",
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
|
||||
pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
|
||||
parse::Parser::parse(T::parse, tokens)
|
||||
}
|
||||
@ -889,6 +900,7 @@ pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"` feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
|
||||
parse::Parser::parse2(T::parse, tokens)
|
||||
}
|
||||
@ -917,6 +929,7 @@ pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
|
||||
/// # run().unwrap();
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
|
||||
parse::Parser::parse_str(T::parse, s)
|
||||
}
|
||||
@ -959,6 +972,7 @@ pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
|
||||
/// # run().unwrap();
|
||||
/// ```
|
||||
#[cfg(all(feature = "parsing", feature = "full"))]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "full"))))]
|
||||
pub fn parse_file(mut content: &str) -> Result<File> {
|
||||
// Strip the BOM if it is present
|
||||
const BOM: &str = "\u{feff}";
|
||||
|
23
third_party/rust/syn/src/lifetime.rs
vendored
23
third_party/rust/syn/src/lifetime.rs
vendored
@ -1,9 +1,8 @@
|
||||
use proc_macro2::{Ident, Span};
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{self, Display};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
use proc_macro2::{Ident, Span};
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::lookahead;
|
||||
|
||||
@ -17,9 +16,6 @@ use crate::lookahead;
|
||||
/// the XID_Start property.
|
||||
/// - All following characters must be Unicode code points with the XID_Continue
|
||||
/// property.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
pub struct Lifetime {
|
||||
pub apostrophe: Span,
|
||||
pub ident: Ident,
|
||||
@ -61,6 +57,17 @@ impl Lifetime {
|
||||
ident: Ident::new(&symbol[1..], span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.apostrophe
|
||||
.join(self.ident.span())
|
||||
.unwrap_or(self.apostrophe)
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.apostrophe = span;
|
||||
self.ident.set_span(span);
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Lifetime {
|
||||
@ -101,7 +108,7 @@ impl Ord for Lifetime {
|
||||
|
||||
impl Hash for Lifetime {
|
||||
fn hash<H: Hasher>(&self, h: &mut H) {
|
||||
self.ident.hash(h)
|
||||
self.ident.hash(h);
|
||||
}
|
||||
}
|
||||
|
||||
@ -115,9 +122,9 @@ pub fn Lifetime(marker: lookahead::TokenMarker) -> Lifetime {
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Lifetime {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
@ -132,10 +139,10 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
|
||||
use proc_macro2::{Punct, Spacing, TokenStream};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Lifetime {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let mut apostrophe = Punct::new('\'', Spacing::Joint);
|
||||
|
247
third_party/rust/syn/src/lit.rs
vendored
247
third_party/rust/syn/src/lit.rs
vendored
@ -1,23 +1,18 @@
|
||||
use proc_macro2::{Literal, Span};
|
||||
use std::fmt::{self, Display};
|
||||
use std::str::{self, FromStr};
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
use proc_macro2::Ident;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
use proc_macro2::TokenTree;
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::lookahead;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, Parser};
|
||||
use crate::{Error, Result};
|
||||
#[cfg(feature = "printing")]
|
||||
use proc_macro2::Ident;
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::TokenStream;
|
||||
use proc_macro2::TokenTree;
|
||||
use proc_macro2::{Literal, Span};
|
||||
use std::fmt::{self, Display};
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::str::{self, FromStr};
|
||||
|
||||
ast_enum_of_structs! {
|
||||
/// A Rust literal such as a string or integer or boolean.
|
||||
@ -26,10 +21,7 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
||||
//
|
||||
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
||||
// blocked on https://github.com/rust-lang/rust/issues/62833
|
||||
/// [syntax tree enum]: crate::Expr#syntax-tree-enums
|
||||
pub enum Lit {
|
||||
/// A UTF-8 string literal: `"foo"`.
|
||||
Str(LitStr),
|
||||
@ -48,7 +40,7 @@ ast_enum_of_structs! {
|
||||
|
||||
/// A floating point literal: `1f64` or `1.0e10f64`.
|
||||
///
|
||||
/// Must be finite. May not be infinte or NaN.
|
||||
/// Must be finite. May not be infinite or NaN.
|
||||
Float(LitFloat),
|
||||
|
||||
/// A boolean literal: `true` or `false`.
|
||||
@ -108,7 +100,7 @@ struct LitIntRepr {
|
||||
ast_struct! {
|
||||
/// A floating point literal: `1f64` or `1.0e10f64`.
|
||||
///
|
||||
/// Must be finite. May not be infinte or NaN.
|
||||
/// Must be finite. May not be infinite or NaN.
|
||||
pub struct LitFloat {
|
||||
repr: Box<LitFloatRepr>,
|
||||
}
|
||||
@ -178,6 +170,7 @@ impl LitStr {
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_with(T::parse)
|
||||
}
|
||||
@ -207,6 +200,7 @@ impl LitStr {
|
||||
/// # }
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
use proc_macro2::Group;
|
||||
|
||||
@ -244,7 +238,7 @@ impl LitStr {
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.repr.token.set_span(span)
|
||||
self.repr.token.set_span(span);
|
||||
}
|
||||
|
||||
pub fn suffix(&self) -> &str {
|
||||
@ -275,7 +269,7 @@ impl LitByteStr {
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.repr.token.set_span(span)
|
||||
self.repr.token.set_span(span);
|
||||
}
|
||||
|
||||
pub fn suffix(&self) -> &str {
|
||||
@ -306,7 +300,7 @@ impl LitByte {
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.repr.token.set_span(span)
|
||||
self.repr.token.set_span(span);
|
||||
}
|
||||
|
||||
pub fn suffix(&self) -> &str {
|
||||
@ -337,7 +331,7 @@ impl LitChar {
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.repr.token.set_span(span)
|
||||
self.repr.token.set_span(span);
|
||||
}
|
||||
|
||||
pub fn suffix(&self) -> &str {
|
||||
@ -412,7 +406,7 @@ impl LitInt {
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.repr.token.set_span(span)
|
||||
self.repr.token.set_span(span);
|
||||
}
|
||||
}
|
||||
|
||||
@ -484,7 +478,7 @@ impl LitFloat {
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.repr.token.set_span(span)
|
||||
self.repr.token.set_span(span);
|
||||
}
|
||||
}
|
||||
|
||||
@ -511,11 +505,30 @@ impl Display for LitFloat {
|
||||
}
|
||||
}
|
||||
|
||||
impl LitBool {
|
||||
pub fn new(value: bool, span: Span) -> Self {
|
||||
LitBool { value, span }
|
||||
}
|
||||
|
||||
pub fn value(&self) -> bool {
|
||||
self.value
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
mod debug_impls {
|
||||
use super::*;
|
||||
use std::fmt::{self, Debug};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for LitStr {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter
|
||||
@ -525,6 +538,7 @@ mod debug_impls {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for LitByteStr {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter
|
||||
@ -534,6 +548,7 @@ mod debug_impls {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for LitByte {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter
|
||||
@ -543,6 +558,7 @@ mod debug_impls {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for LitChar {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter
|
||||
@ -552,6 +568,7 @@ mod debug_impls {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for LitInt {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter
|
||||
@ -561,6 +578,7 @@ mod debug_impls {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for LitFloat {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter
|
||||
@ -570,6 +588,7 @@ mod debug_impls {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for LitBool {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter
|
||||
@ -581,6 +600,7 @@ mod debug_impls {
|
||||
}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Clone for LitRepr {
|
||||
fn clone(&self) -> Self {
|
||||
LitRepr {
|
||||
@ -591,6 +611,7 @@ impl Clone for LitRepr {
|
||||
}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Clone for LitIntRepr {
|
||||
fn clone(&self) -> Self {
|
||||
LitIntRepr {
|
||||
@ -602,6 +623,7 @@ impl Clone for LitIntRepr {
|
||||
}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Clone for LitFloatRepr {
|
||||
fn clone(&self) -> Self {
|
||||
LitFloatRepr {
|
||||
@ -615,6 +637,7 @@ impl Clone for LitFloatRepr {
|
||||
macro_rules! lit_extra_traits {
|
||||
($ty:ident) => {
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Clone for $ty {
|
||||
fn clone(&self) -> Self {
|
||||
$ty {
|
||||
@ -624,6 +647,7 @@ macro_rules! lit_extra_traits {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl PartialEq for $ty {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.repr.token.to_string() == other.repr.token.to_string()
|
||||
@ -631,6 +655,7 @@ macro_rules! lit_extra_traits {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Hash for $ty {
|
||||
fn hash<H>(&self, state: &mut H)
|
||||
where
|
||||
@ -690,6 +715,7 @@ pub mod parsing {
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
use proc_macro2::Punct;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Lit {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
@ -730,21 +756,19 @@ pub mod parsing {
|
||||
let mut repr = lit.to_string();
|
||||
repr.insert(0, '-');
|
||||
|
||||
if !(repr.ends_with("f32") || repr.ends_with("f64")) {
|
||||
if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
|
||||
if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
|
||||
token.set_span(span);
|
||||
return Some((
|
||||
Lit::Int(LitInt {
|
||||
repr: Box::new(LitIntRepr {
|
||||
token,
|
||||
digits,
|
||||
suffix,
|
||||
}),
|
||||
if let Some((digits, suffix)) = value::parse_lit_int(&repr) {
|
||||
if let Some(mut token) = value::to_literal(&repr, &digits, &suffix) {
|
||||
token.set_span(span);
|
||||
return Some((
|
||||
Lit::Int(LitInt {
|
||||
repr: Box::new(LitIntRepr {
|
||||
token,
|
||||
digits,
|
||||
suffix,
|
||||
}),
|
||||
rest,
|
||||
));
|
||||
}
|
||||
}),
|
||||
rest,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@ -763,71 +787,78 @@ pub mod parsing {
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for LitStr {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let head = input.fork();
|
||||
match input.parse()? {
|
||||
Lit::Str(lit) => Ok(lit),
|
||||
match input.parse() {
|
||||
Ok(Lit::Str(lit)) => Ok(lit),
|
||||
_ => Err(head.error("expected string literal")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for LitByteStr {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let head = input.fork();
|
||||
match input.parse()? {
|
||||
Lit::ByteStr(lit) => Ok(lit),
|
||||
match input.parse() {
|
||||
Ok(Lit::ByteStr(lit)) => Ok(lit),
|
||||
_ => Err(head.error("expected byte string literal")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for LitByte {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let head = input.fork();
|
||||
match input.parse()? {
|
||||
Lit::Byte(lit) => Ok(lit),
|
||||
match input.parse() {
|
||||
Ok(Lit::Byte(lit)) => Ok(lit),
|
||||
_ => Err(head.error("expected byte literal")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for LitChar {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let head = input.fork();
|
||||
match input.parse()? {
|
||||
Lit::Char(lit) => Ok(lit),
|
||||
match input.parse() {
|
||||
Ok(Lit::Char(lit)) => Ok(lit),
|
||||
_ => Err(head.error("expected character literal")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for LitInt {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let head = input.fork();
|
||||
match input.parse()? {
|
||||
Lit::Int(lit) => Ok(lit),
|
||||
match input.parse() {
|
||||
Ok(Lit::Int(lit)) => Ok(lit),
|
||||
_ => Err(head.error("expected integer literal")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for LitFloat {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let head = input.fork();
|
||||
match input.parse()? {
|
||||
Lit::Float(lit) => Ok(lit),
|
||||
match input.parse() {
|
||||
Ok(Lit::Float(lit)) => Ok(lit),
|
||||
_ => Err(head.error("expected floating point literal")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for LitBool {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let head = input.fork();
|
||||
match input.parse()? {
|
||||
Lit::Bool(lit) => Ok(lit),
|
||||
match input.parse() {
|
||||
Ok(Lit::Bool(lit)) => Ok(lit),
|
||||
_ => Err(head.error("expected boolean literal")),
|
||||
}
|
||||
}
|
||||
@ -840,42 +871,49 @@ mod printing {
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for LitStr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.repr.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for LitByteStr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.repr.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for LitByte {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.repr.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for LitChar {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.repr.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for LitInt {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.repr.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for LitFloat {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.repr.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for LitBool {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let s = if self.value { "true" } else { "false" };
|
||||
@ -925,16 +963,14 @@ mod value {
|
||||
});
|
||||
}
|
||||
b'0'..=b'9' | b'-' => {
|
||||
if !(repr.ends_with("f32") || repr.ends_with("f64")) {
|
||||
if let Some((digits, suffix)) = parse_lit_int(&repr) {
|
||||
return Lit::Int(LitInt {
|
||||
repr: Box::new(LitIntRepr {
|
||||
token,
|
||||
digits,
|
||||
suffix,
|
||||
}),
|
||||
});
|
||||
}
|
||||
if let Some((digits, suffix)) = parse_lit_int(&repr) {
|
||||
return Lit::Int(LitInt {
|
||||
repr: Box::new(LitIntRepr {
|
||||
token,
|
||||
digits,
|
||||
suffix,
|
||||
}),
|
||||
});
|
||||
}
|
||||
if let Some((digits, suffix)) = parse_lit_float(&repr) {
|
||||
return Lit::Float(LitFloat {
|
||||
@ -1289,32 +1325,33 @@ mod value {
|
||||
|
||||
fn backslash_u(mut s: &str) -> (char, &str) {
|
||||
if byte(s, 0) != b'{' {
|
||||
panic!("expected {{ after \\u");
|
||||
panic!("{}", "expected { after \\u");
|
||||
}
|
||||
s = &s[1..];
|
||||
|
||||
let mut ch = 0;
|
||||
for _ in 0..6 {
|
||||
let mut digits = 0;
|
||||
loop {
|
||||
let b = byte(s, 0);
|
||||
match b {
|
||||
b'0'..=b'9' => {
|
||||
ch *= 0x10;
|
||||
ch += u32::from(b - b'0');
|
||||
s = &s[1..];
|
||||
}
|
||||
b'a'..=b'f' => {
|
||||
ch *= 0x10;
|
||||
ch += u32::from(10 + b - b'a');
|
||||
s = &s[1..];
|
||||
}
|
||||
b'A'..=b'F' => {
|
||||
ch *= 0x10;
|
||||
ch += u32::from(10 + b - b'A');
|
||||
let digit = match b {
|
||||
b'0'..=b'9' => b - b'0',
|
||||
b'a'..=b'f' => 10 + b - b'a',
|
||||
b'A'..=b'F' => 10 + b - b'A',
|
||||
b'_' if digits > 0 => {
|
||||
s = &s[1..];
|
||||
continue;
|
||||
}
|
||||
b'}' if digits == 0 => panic!("invalid empty unicode escape"),
|
||||
b'}' => break,
|
||||
_ => panic!("unexpected non-hex character after \\u"),
|
||||
};
|
||||
if digits == 6 {
|
||||
panic!("overlong unicode escape (must have at most 6 hex digits)");
|
||||
}
|
||||
ch *= 0x10;
|
||||
ch += u32::from(digit);
|
||||
digits += 1;
|
||||
s = &s[1..];
|
||||
}
|
||||
assert!(byte(s, 0) == b'}');
|
||||
s = &s[1..];
|
||||
@ -1351,7 +1388,7 @@ mod value {
|
||||
};
|
||||
|
||||
let mut value = BigInt::new();
|
||||
loop {
|
||||
'outer: loop {
|
||||
let b = byte(s, 0);
|
||||
let digit = match b {
|
||||
b'0'..=b'9' => b - b'0',
|
||||
@ -1361,10 +1398,32 @@ mod value {
|
||||
s = &s[1..];
|
||||
continue;
|
||||
}
|
||||
// NOTE: Looking at a floating point literal, we don't want to
|
||||
// consider these integers.
|
||||
// If looking at a floating point literal, we don't want to
|
||||
// consider it an integer.
|
||||
b'.' if base == 10 => return None,
|
||||
b'e' | b'E' if base == 10 => return None,
|
||||
b'e' | b'E' if base == 10 => {
|
||||
let mut has_exp = false;
|
||||
for (i, b) in s[1..].bytes().enumerate() {
|
||||
match b {
|
||||
b'_' => {}
|
||||
b'-' | b'+' => return None,
|
||||
b'0'..=b'9' => has_exp = true,
|
||||
_ => {
|
||||
let suffix = &s[1 + i..];
|
||||
if has_exp && crate::ident::xid_ok(suffix) {
|
||||
return None;
|
||||
} else {
|
||||
break 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if has_exp {
|
||||
return None;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
};
|
||||
|
||||
@ -1378,7 +1437,7 @@ mod value {
|
||||
}
|
||||
|
||||
let suffix = s;
|
||||
if suffix.is_empty() || crate::ident::xid_ok(&suffix) {
|
||||
if suffix.is_empty() || crate::ident::xid_ok(suffix) {
|
||||
let mut repr = value.to_string();
|
||||
if negative {
|
||||
repr.insert(0, '-');
|
||||
@ -1430,6 +1489,14 @@ mod value {
|
||||
bytes[write] = b'.';
|
||||
}
|
||||
b'e' | b'E' => {
|
||||
match bytes[read + 1..]
|
||||
.iter()
|
||||
.find(|b| **b != b'_')
|
||||
.unwrap_or(&b'\0')
|
||||
{
|
||||
b'-' | b'+' | b'0'..=b'9' => {}
|
||||
_ => break,
|
||||
}
|
||||
if has_e {
|
||||
if has_exponent {
|
||||
break;
|
||||
@ -1475,10 +1542,12 @@ mod value {
|
||||
|
||||
pub fn to_literal(repr: &str, digits: &str, suffix: &str) -> Option<Literal> {
|
||||
if repr.starts_with('-') {
|
||||
let f64_parse_finite = || digits.parse().ok().filter(|x: &f64| x.is_finite());
|
||||
let f32_parse_finite = || digits.parse().ok().filter(|x: &f32| x.is_finite());
|
||||
if suffix == "f64" {
|
||||
digits.parse().ok().map(Literal::f64_suffixed)
|
||||
f64_parse_finite().map(Literal::f64_suffixed)
|
||||
} else if suffix == "f32" {
|
||||
digits.parse().ok().map(Literal::f32_suffixed)
|
||||
f32_parse_finite().map(Literal::f32_suffixed)
|
||||
} else if suffix == "i64" {
|
||||
digits.parse().ok().map(Literal::i64_suffixed)
|
||||
} else if suffix == "i32" {
|
||||
@ -1490,7 +1559,7 @@ mod value {
|
||||
} else if !suffix.is_empty() {
|
||||
None
|
||||
} else if digits.contains('.') {
|
||||
digits.parse().ok().map(Literal::f64_unsuffixed)
|
||||
f64_parse_finite().map(Literal::f64_unsuffixed)
|
||||
} else {
|
||||
digits.parse().ok().map(Literal::i64_unsuffixed)
|
||||
}
|
||||
|
6
third_party/rust/syn/src/lookahead.rs
vendored
6
third_party/rust/syn/src/lookahead.rs
vendored
@ -1,12 +1,10 @@
|
||||
use std::cell::RefCell;
|
||||
|
||||
use proc_macro2::{Delimiter, Span};
|
||||
|
||||
use crate::buffer::Cursor;
|
||||
use crate::error::{self, Error};
|
||||
use crate::sealed::lookahead::Sealed;
|
||||
use crate::span::IntoSpans;
|
||||
use crate::token::Token;
|
||||
use proc_macro2::{Delimiter, Span};
|
||||
use std::cell::RefCell;
|
||||
|
||||
/// Support for checking the next token in a stream to decide how to parse.
|
||||
///
|
||||
|
7
third_party/rust/syn/src/mac.rs
vendored
7
third_party/rust/syn/src/mac.rs
vendored
@ -12,6 +12,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Macro {
|
||||
pub path: Path,
|
||||
pub bang_token: Token![!],
|
||||
@ -25,6 +26,7 @@ ast_enum! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum MacroDelimiter {
|
||||
Paren(Paren),
|
||||
Brace(Brace),
|
||||
@ -132,6 +134,7 @@ impl Macro {
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_body<T: Parse>(&self) -> Result<T> {
|
||||
self.parse_body_with(T::parse)
|
||||
}
|
||||
@ -139,6 +142,7 @@ impl Macro {
|
||||
/// Parse the tokens within the macro invocation's delimiters using the
|
||||
/// given parser.
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_body_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
|
||||
let scope = delimiter_span_close(&self.delimiter);
|
||||
crate::parse::parse_scoped(parser, scope, self.tokens.clone())
|
||||
@ -168,9 +172,9 @@ pub fn parse_delimiter(input: ParseStream) -> Result<(MacroDelimiter, TokenStrea
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Macro {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let tokens;
|
||||
@ -194,6 +198,7 @@ mod printing {
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Macro {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
|
17
third_party/rust/syn/src/macros.rs
vendored
17
third_party/rust/syn/src/macros.rs
vendored
@ -77,7 +77,7 @@ macro_rules! ast_enum_of_structs_impl {
|
||||
$pub:ident $enum:ident $name:ident {
|
||||
$(
|
||||
$(#[$variant_attr:meta])*
|
||||
$variant:ident $( ($member:ident) )*,
|
||||
$variant:ident $( ($($member:ident)::+) )*,
|
||||
)*
|
||||
}
|
||||
|
||||
@ -87,7 +87,7 @@ macro_rules! ast_enum_of_structs_impl {
|
||||
check_keyword_matches!(enum $enum);
|
||||
|
||||
$($(
|
||||
ast_enum_from_struct!($name::$variant, $member);
|
||||
ast_enum_from_struct!($name::$variant, $($member)::+);
|
||||
)*)*
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
@ -95,7 +95,7 @@ macro_rules! ast_enum_of_structs_impl {
|
||||
$($remaining)*
|
||||
()
|
||||
tokens
|
||||
$name { $($variant $($member)*,)* }
|
||||
$name { $($variant $($($member)::+)*,)* }
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -104,6 +104,9 @@ macro_rules! ast_enum_from_struct {
|
||||
// No From<TokenStream> for verbatim variants.
|
||||
($name:ident::Verbatim, $member:ident) => {};
|
||||
|
||||
// No From<TokenStream> for private variants.
|
||||
($name:ident::$variant:ident, crate::private) => {};
|
||||
|
||||
($name:ident::$variant:ident, $member:ident) => {
|
||||
impl From<$member> for $name {
|
||||
fn from(e: $member) -> $name {
|
||||
@ -131,7 +134,15 @@ macro_rules! generate_to_tokens {
|
||||
);
|
||||
};
|
||||
|
||||
(($($arms:tt)*) $tokens:ident $name:ident { $variant:ident crate::private, $($next:tt)*}) => {
|
||||
generate_to_tokens!(
|
||||
($($arms)* $name::$variant(_) => unreachable!(),)
|
||||
$tokens $name { $($next)* }
|
||||
);
|
||||
};
|
||||
|
||||
(($($arms:tt)*) $tokens:ident $name:ident {}) => {
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ::quote::ToTokens for $name {
|
||||
fn to_tokens(&self, $tokens: &mut ::proc_macro2::TokenStream) {
|
||||
match self {
|
||||
|
7
third_party/rust/syn/src/op.rs
vendored
7
third_party/rust/syn/src/op.rs
vendored
@ -3,6 +3,7 @@ ast_enum! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum BinOp {
|
||||
/// The `+` operator (addition)
|
||||
Add(Token![+]),
|
||||
@ -68,6 +69,7 @@ ast_enum! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum UnOp {
|
||||
/// The `*` operator for dereferencing
|
||||
Deref(Token![*]),
|
||||
@ -81,7 +83,6 @@ ast_enum! {
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
fn parse_binop(input: ParseStream) -> Result<BinOp> {
|
||||
@ -126,6 +127,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for BinOp {
|
||||
#[cfg(not(feature = "full"))]
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
@ -160,6 +162,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for UnOp {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let lookahead = input.lookahead1();
|
||||
@ -182,6 +185,7 @@ mod printing {
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for BinOp {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
@ -217,6 +221,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for UnOp {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
|
83
third_party/rust/syn/src/parse.rs
vendored
83
third_party/rust/syn/src/parse.rs
vendored
@ -6,9 +6,8 @@
|
||||
//! [`Cursor`] type. `Cursor` is a cheaply copyable cursor over a range of
|
||||
//! tokens in a token stream.
|
||||
//!
|
||||
//! [`ParseStream`]: type.ParseStream.html
|
||||
//! [`Result<T>`]: type.Result.html
|
||||
//! [`Cursor`]: ../buffer/index.html
|
||||
//! [`Result<T>`]: Result
|
||||
//! [`Cursor`]: crate::buffer::Cursor
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
@ -23,7 +22,7 @@
|
||||
//! procedural macro, they will receive a helpful compiler error message
|
||||
//! pointing out the exact token that triggered the failure to parse.
|
||||
//!
|
||||
//! [`parse_macro_input!`]: ../macro.parse_macro_input.html
|
||||
//! [`parse_macro_input!`]: crate::parse_macro_input!
|
||||
//!
|
||||
//! ```
|
||||
//! # extern crate proc_macro;
|
||||
@ -96,10 +95,9 @@
|
||||
//! obvious default way. These functions can return any syntax tree node that
|
||||
//! implements the [`Parse`] trait, which includes most types in Syn.
|
||||
//!
|
||||
//! [`syn::parse`]: ../fn.parse.html
|
||||
//! [`syn::parse2`]: ../fn.parse2.html
|
||||
//! [`syn::parse_str`]: ../fn.parse_str.html
|
||||
//! [`Parse`]: trait.Parse.html
|
||||
//! [`syn::parse`]: crate::parse()
|
||||
//! [`syn::parse2`]: crate::parse2()
|
||||
//! [`syn::parse_str`]: crate::parse_str()
|
||||
//!
|
||||
//! ```
|
||||
//! use syn::Type;
|
||||
@ -114,7 +112,7 @@
|
||||
//!
|
||||
//! The [`parse_quote!`] macro also uses this approach.
|
||||
//!
|
||||
//! [`parse_quote!`]: ../macro.parse_quote.html
|
||||
//! [`parse_quote!`]: crate::parse_quote!
|
||||
//!
|
||||
//! # The `Parser` trait
|
||||
//!
|
||||
@ -124,8 +122,8 @@
|
||||
//! may or may not allow trailing punctuation, and parsing it the wrong way
|
||||
//! would either reject valid input or accept invalid input.
|
||||
//!
|
||||
//! [`Attribute`]: ../struct.Attribute.html
|
||||
//! [`Punctuated`]: ../punctuated/index.html
|
||||
//! [`Attribute`]: crate::Attribute
|
||||
//! [`Punctuated`]: crate::punctuated
|
||||
//!
|
||||
//! The `Parse` trait is not implemented in these cases because there is no good
|
||||
//! behavior to consider the default.
|
||||
@ -150,7 +148,6 @@
|
||||
//! single `Parse` implementation, and those parser functions can be invoked
|
||||
//! through the [`Parser`] trait.
|
||||
//!
|
||||
//! [`Parser`]: trait.Parser.html
|
||||
//!
|
||||
//! ```
|
||||
//! # extern crate proc_macro;
|
||||
@ -189,6 +186,17 @@
|
||||
#[path = "discouraged.rs"]
|
||||
pub mod discouraged;
|
||||
|
||||
use crate::buffer::{Cursor, TokenBuffer};
|
||||
use crate::error;
|
||||
use crate::lookahead;
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
use crate::proc_macro;
|
||||
use crate::punctuated::Punctuated;
|
||||
use crate::token::Token;
|
||||
use proc_macro2::{self, Delimiter, Group, Literal, Punct, Span, TokenStream, TokenTree};
|
||||
use std::cell::Cell;
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::marker::PhantomData;
|
||||
@ -197,19 +205,6 @@ use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[cfg(all(
|
||||
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
|
||||
feature = "proc-macro"
|
||||
))]
|
||||
use crate::proc_macro;
|
||||
use proc_macro2::{self, Delimiter, Group, Literal, Punct, Span, TokenStream, TokenTree};
|
||||
|
||||
use crate::buffer::{Cursor, TokenBuffer};
|
||||
use crate::error;
|
||||
use crate::lookahead;
|
||||
use crate::punctuated::Punctuated;
|
||||
use crate::token::Token;
|
||||
|
||||
pub use crate::error::{Error, Result};
|
||||
pub use crate::lookahead::{Lookahead1, Peek};
|
||||
|
||||
@ -250,7 +245,7 @@ pub type ParseStream<'a> = &'a ParseBuffer<'a>;
|
||||
/// - One of [the `syn::parse*` functions][syn-parse]; or
|
||||
/// - A method of the [`Parser`] trait.
|
||||
///
|
||||
/// [syn-parse]: index.html#the-synparse-functions
|
||||
/// [syn-parse]: self#the-synparse-functions
|
||||
pub struct ParseBuffer<'a> {
|
||||
scope: Span,
|
||||
// Instead of Cell<Cursor<'a>> so that ParseBuffer<'a> is covariant in 'a.
|
||||
@ -622,17 +617,36 @@ impl<'a> ParseBuffer<'a> {
|
||||
/// }
|
||||
/// ```
|
||||
pub fn peek2<T: Peek>(&self, token: T) -> bool {
|
||||
fn peek2(buffer: &ParseBuffer, peek: fn(Cursor) -> bool) -> bool {
|
||||
if let Some(group) = buffer.cursor().group(Delimiter::None) {
|
||||
if group.0.skip().map_or(false, peek) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
buffer.cursor().skip().map_or(false, peek)
|
||||
}
|
||||
|
||||
let _ = token;
|
||||
self.cursor().skip().map_or(false, T::Token::peek)
|
||||
peek2(self, T::Token::peek)
|
||||
}
|
||||
|
||||
/// Looks at the third-next token in the parse stream.
|
||||
pub fn peek3<T: Peek>(&self, token: T) -> bool {
|
||||
fn peek3(buffer: &ParseBuffer, peek: fn(Cursor) -> bool) -> bool {
|
||||
if let Some(group) = buffer.cursor().group(Delimiter::None) {
|
||||
if group.0.skip().and_then(Cursor::skip).map_or(false, peek) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
buffer
|
||||
.cursor()
|
||||
.skip()
|
||||
.and_then(Cursor::skip)
|
||||
.map_or(false, peek)
|
||||
}
|
||||
|
||||
let _ = token;
|
||||
self.cursor()
|
||||
.skip()
|
||||
.and_then(Cursor::skip)
|
||||
.map_or(false, T::Token::peek)
|
||||
peek3(self, T::Token::peek)
|
||||
}
|
||||
|
||||
/// Parses zero or more occurrences of `T` separated by punctuation of type
|
||||
@ -1045,12 +1059,14 @@ impl<'a> ParseBuffer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl<T: Parse> Parse for Box<T> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.parse().map(Box::new)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl<T: Parse + Token> Parse for Option<T> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if T::peek(input.cursor()) {
|
||||
@ -1061,12 +1077,14 @@ impl<T: Parse + Token> Parse for Option<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TokenStream {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| Ok((cursor.token_stream(), Cursor::empty())))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TokenTree {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| match cursor.token_tree() {
|
||||
@ -1076,6 +1094,7 @@ impl Parse for TokenTree {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Group {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
@ -1091,6 +1110,7 @@ impl Parse for Group {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Punct {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| match cursor.punct() {
|
||||
@ -1100,6 +1120,7 @@ impl Parse for Punct {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Literal {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| match cursor.literal() {
|
||||
|
51
third_party/rust/syn/src/parse_macro_input.rs
vendored
51
third_party/rust/syn/src/parse_macro_input.rs
vendored
@ -46,6 +46,42 @@
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Usage with Parser
|
||||
///
|
||||
/// This macro can also be used with the [`Parser` trait] for types that have
|
||||
/// multiple ways that they can be parsed.
|
||||
///
|
||||
/// [`Parser` trait]: crate::rustdoc_workaround::parse_module::Parser
|
||||
///
|
||||
/// ```
|
||||
/// # extern crate proc_macro;
|
||||
/// #
|
||||
/// # use proc_macro::TokenStream;
|
||||
/// # use syn::{parse_macro_input, Result};
|
||||
/// # use syn::parse::ParseStream;
|
||||
/// #
|
||||
/// # struct MyMacroInput {}
|
||||
/// #
|
||||
/// impl MyMacroInput {
|
||||
/// fn parse_alternate(input: ParseStream) -> Result<Self> {
|
||||
/// /* ... */
|
||||
/// # Ok(MyMacroInput {})
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// # const IGNORE: &str = stringify! {
|
||||
/// #[proc_macro]
|
||||
/// # };
|
||||
/// pub fn my_macro(tokens: TokenStream) -> TokenStream {
|
||||
/// let input = parse_macro_input!(tokens with MyMacroInput::parse_alternate);
|
||||
///
|
||||
/// /* ... */
|
||||
/// # "".parse().unwrap()
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// <br>
|
||||
///
|
||||
/// # Expansion
|
||||
///
|
||||
/// `parse_macro_input!($variable as $Type)` expands to something like:
|
||||
@ -68,12 +104,21 @@
|
||||
/// # }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
|
||||
macro_rules! parse_macro_input {
|
||||
($tokenstream:ident as $ty:ty) => {
|
||||
match $crate::parse_macro_input::parse::<$ty>($tokenstream) {
|
||||
$crate::export::Ok(data) => data,
|
||||
$crate::export::Err(err) => {
|
||||
return $crate::export::TokenStream::from(err.to_compile_error());
|
||||
$crate::__private::Ok(data) => data,
|
||||
$crate::__private::Err(err) => {
|
||||
return $crate::__private::TokenStream::from(err.to_compile_error());
|
||||
}
|
||||
}
|
||||
};
|
||||
($tokenstream:ident with $parser:path) => {
|
||||
match $crate::parse::Parser::parse($parser, $tokenstream) {
|
||||
$crate::__private::Ok(data) => data,
|
||||
$crate::__private::Err(err) => {
|
||||
return $crate::__private::TokenStream::from(err.to_compile_error());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
9
third_party/rust/syn/src/parse_quote.rs
vendored
9
third_party/rust/syn/src/parse_quote.rs
vendored
@ -6,7 +6,7 @@
|
||||
/// The return type can be any syntax tree node that implements the [`Parse`]
|
||||
/// trait.
|
||||
///
|
||||
/// [`Parse`]: parse::Parse
|
||||
/// [`Parse`]: crate::parse::Parse
|
||||
///
|
||||
/// ```
|
||||
/// use quote::quote;
|
||||
@ -58,7 +58,7 @@
|
||||
/// `P` with optional trailing punctuation
|
||||
/// - [`Vec<Stmt>`] — parses the same as `Block::parse_within`
|
||||
///
|
||||
/// [`Punctuated<T, P>`]: punctuated::Punctuated
|
||||
/// [`Punctuated<T, P>`]: crate::punctuated::Punctuated
|
||||
/// [`Vec<Stmt>`]: Block::parse_within
|
||||
///
|
||||
/// # Panics
|
||||
@ -69,12 +69,13 @@
|
||||
//
|
||||
// TODO: allow Punctuated to be inferred as intra doc link, currently blocked on
|
||||
// https://github.com/rust-lang/rust/issues/62834
|
||||
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
|
||||
#[macro_export]
|
||||
macro_rules! parse_quote {
|
||||
($($tt:tt)*) => {
|
||||
$crate::parse_quote::parse(
|
||||
$crate::export::From::from(
|
||||
$crate::export::quote::quote!($($tt)*)
|
||||
$crate::__private::From::from(
|
||||
$crate::__private::quote::quote!($($tt)*)
|
||||
)
|
||||
)
|
||||
};
|
||||
|
126
third_party/rust/syn/src/pat.rs
vendored
126
third_party/rust/syn/src/pat.rs
vendored
@ -12,10 +12,8 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
||||
//
|
||||
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
||||
// blocked on https://github.com/rust-lang/rust/issues/62833
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub enum Pat {
|
||||
/// A box pattern: `box v`.
|
||||
Box(PatBox),
|
||||
@ -74,8 +72,31 @@ ast_enum_of_structs! {
|
||||
/// A pattern that matches any value: `_`.
|
||||
Wild(PatWild),
|
||||
|
||||
// The following is the only supported idiom for exhaustive matching of
|
||||
// this enum.
|
||||
//
|
||||
// match expr {
|
||||
// Pat::Box(e) => {...}
|
||||
// Pat::Ident(e) => {...}
|
||||
// ...
|
||||
// Pat::Wild(e) => {...}
|
||||
//
|
||||
// #[cfg(test)]
|
||||
// Pat::__TestExhaustive(_) => unimplemented!(),
|
||||
// #[cfg(not(test))]
|
||||
// _ => { /* some sane fallback */ }
|
||||
// }
|
||||
//
|
||||
// This way we fail your tests but don't break your library when adding
|
||||
// a variant. You will be notified by a test failure when a variant is
|
||||
// added, so that you can add code to handle it, but your library will
|
||||
// continue to compile and work for downstream users in the interim.
|
||||
//
|
||||
// Once `deny(reachable)` is available in rustc, Pat will be
|
||||
// reimplemented as a non_exhaustive enum.
|
||||
// https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237
|
||||
#[doc(hidden)]
|
||||
__Nonexhaustive,
|
||||
__TestExhaustive(crate::private),
|
||||
}
|
||||
}
|
||||
|
||||
@ -83,6 +104,7 @@ ast_struct! {
|
||||
/// A box pattern: `box v`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatBox {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub box_token: Token![box],
|
||||
@ -97,6 +119,7 @@ ast_struct! {
|
||||
/// constant; these cannot be distinguished syntactically.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatIdent {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub by_ref: Option<Token![ref]>,
|
||||
@ -113,6 +136,7 @@ ast_struct! {
|
||||
/// are represented as an `Expr::Unary`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatLit {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub expr: Box<Expr>,
|
||||
@ -123,6 +147,7 @@ ast_struct! {
|
||||
/// A macro in pattern position.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatMacro {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub mac: Macro,
|
||||
@ -133,6 +158,7 @@ ast_struct! {
|
||||
/// A pattern that matches any one of a set of cases.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatOr {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub leading_vert: Option<Token![|]>,
|
||||
@ -150,6 +176,7 @@ ast_struct! {
|
||||
/// associated constants.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatPath {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub qself: Option<QSelf>,
|
||||
@ -161,6 +188,7 @@ ast_struct! {
|
||||
/// A range pattern: `1..=2`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatRange {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub lo: Box<Expr>,
|
||||
@ -173,6 +201,7 @@ ast_struct! {
|
||||
/// A reference pattern: `&mut var`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatReference {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub and_token: Token![&],
|
||||
@ -185,6 +214,7 @@ ast_struct! {
|
||||
/// The dots in a tuple or slice pattern: `[0, 1, ..]`
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatRest {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub dot2_token: Token![..],
|
||||
@ -195,6 +225,7 @@ ast_struct! {
|
||||
/// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatSlice {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub bracket_token: token::Bracket,
|
||||
@ -206,6 +237,7 @@ ast_struct! {
|
||||
/// A struct or struct variant pattern: `Variant { x, y, .. }`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatStruct {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub path: Path,
|
||||
@ -219,6 +251,7 @@ ast_struct! {
|
||||
/// A tuple pattern: `(a, b)`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatTuple {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub paren_token: token::Paren,
|
||||
@ -230,6 +263,7 @@ ast_struct! {
|
||||
/// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatTupleStruct {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub path: Path,
|
||||
@ -241,6 +275,7 @@ ast_struct! {
|
||||
/// A type ascription pattern: `foo: f64`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatType {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub pat: Box<Pat>,
|
||||
@ -253,6 +288,7 @@ ast_struct! {
|
||||
/// A pattern that matches any value: `_`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct PatWild {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub underscore_token: Token![_],
|
||||
@ -266,6 +302,7 @@ ast_struct! {
|
||||
/// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct FieldPat {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub member: Member,
|
||||
@ -277,31 +314,29 @@ ast_struct! {
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::ext::IdentExt;
|
||||
use crate::parse::{Parse, ParseBuffer, ParseStream, Result};
|
||||
use crate::path;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Pat {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let begin = input.fork();
|
||||
let lookahead = input.lookahead1();
|
||||
if lookahead.peek(Ident)
|
||||
&& ({
|
||||
input.peek2(Token![::])
|
||||
|| input.peek2(Token![!])
|
||||
|| input.peek2(token::Brace)
|
||||
|| input.peek2(token::Paren)
|
||||
|| input.peek2(Token![..])
|
||||
&& !{
|
||||
let ahead = input.fork();
|
||||
ahead.parse::<Ident>()?;
|
||||
ahead.parse::<RangeLimits>()?;
|
||||
ahead.is_empty() || ahead.peek(Token![,])
|
||||
}
|
||||
})
|
||||
|| input.peek(Token![self]) && input.peek2(Token![::])
|
||||
|| lookahead.peek(Token![::])
|
||||
if {
|
||||
let ahead = input.fork();
|
||||
ahead.parse::<Option<Ident>>()?.is_some()
|
||||
&& (ahead.peek(Token![::])
|
||||
|| ahead.peek(Token![!])
|
||||
|| ahead.peek(token::Brace)
|
||||
|| ahead.peek(token::Paren)
|
||||
|| ahead.peek(Token![..])
|
||||
&& ahead.parse::<RangeLimits>().is_ok()
|
||||
&& !(ahead.is_empty() || ahead.peek(Token![,])))
|
||||
} || {
|
||||
let ahead = input.fork();
|
||||
ahead.parse::<Option<Token![self]>>()?.is_some() && ahead.peek(Token![::])
|
||||
} || lookahead.peek(Token![::])
|
||||
|| lookahead.peek(Token![<])
|
||||
|| input.peek(Token![Self])
|
||||
|| input.peek(Token![super])
|
||||
@ -312,7 +347,8 @@ pub mod parsing {
|
||||
input.call(pat_wild).map(Pat::Wild)
|
||||
} else if input.peek(Token![box]) {
|
||||
input.call(pat_box).map(Pat::Box)
|
||||
} else if input.peek(Token![-]) || lookahead.peek(Lit) {
|
||||
} else if input.peek(Token![-]) || lookahead.peek(Lit) || lookahead.peek(Token![const])
|
||||
{
|
||||
pat_lit_or_range(input)
|
||||
} else if lookahead.peek(Token![ref])
|
||||
|| lookahead.peek(Token![mut])
|
||||
@ -328,6 +364,8 @@ pub mod parsing {
|
||||
input.call(pat_slice).map(Pat::Slice)
|
||||
} else if lookahead.peek(Token![..]) && !input.peek(Token![...]) {
|
||||
pat_range_half_open(input, begin)
|
||||
} else if lookahead.peek(Token![const]) {
|
||||
input.call(pat_const).map(Pat::Verbatim)
|
||||
} else {
|
||||
Err(lookahead.error())
|
||||
}
|
||||
@ -485,7 +523,7 @@ pub mod parsing {
|
||||
attrs,
|
||||
member,
|
||||
colon_token: input.parse()?,
|
||||
pat: Box::new(multi_pat(input)?),
|
||||
pat: Box::new(multi_pat_with_leading_vert(input)?),
|
||||
});
|
||||
}
|
||||
|
||||
@ -564,7 +602,7 @@ pub mod parsing {
|
||||
|
||||
let mut elems = Punctuated::new();
|
||||
while !content.is_empty() {
|
||||
let value = multi_pat(&content)?;
|
||||
let value = multi_pat_with_leading_vert(&content)?;
|
||||
elems.push_value(value);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
@ -605,6 +643,8 @@ pub mod parsing {
|
||||
} else {
|
||||
Ok(Pat::Verbatim(verbatim::between(begin, input)))
|
||||
}
|
||||
} else if let Expr::Verbatim(verbatim) = *lo {
|
||||
Ok(Pat::Verbatim(verbatim))
|
||||
} else {
|
||||
Ok(Pat::Lit(PatLit {
|
||||
attrs: Vec::new(),
|
||||
@ -638,6 +678,8 @@ pub mod parsing {
|
||||
|| lookahead.peek(Token![crate])
|
||||
{
|
||||
Expr::Path(input.parse()?)
|
||||
} else if lookahead.peek(Token![const]) {
|
||||
Expr::Verbatim(input.call(expr::parsing::expr_const)?)
|
||||
} else {
|
||||
return Err(lookahead.error());
|
||||
};
|
||||
@ -659,7 +701,7 @@ pub mod parsing {
|
||||
|
||||
let mut elems = Punctuated::new();
|
||||
while !content.is_empty() {
|
||||
let value = multi_pat(&content)?;
|
||||
let value = multi_pat_with_leading_vert(&content)?;
|
||||
elems.push_value(value);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
@ -675,6 +717,18 @@ pub mod parsing {
|
||||
})
|
||||
}
|
||||
|
||||
fn pat_const(input: ParseStream) -> Result<TokenStream> {
|
||||
let begin = input.fork();
|
||||
input.parse::<Token![const]>()?;
|
||||
|
||||
let content;
|
||||
braced!(content in input);
|
||||
content.call(Attribute::parse_inner)?;
|
||||
content.call(Block::parse_within)?;
|
||||
|
||||
Ok(verbatim::between(begin, input))
|
||||
}
|
||||
|
||||
pub fn multi_pat(input: ParseStream) -> Result<Pat> {
|
||||
multi_pat_impl(input, None)
|
||||
}
|
||||
@ -710,12 +764,11 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
|
||||
use crate::attr::FilterAttrs;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
use crate::attr::FilterAttrs;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatWild {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -723,6 +776,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatIdent {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -736,6 +790,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatStruct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -751,6 +806,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatTupleStruct {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -759,6 +815,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatType {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -768,6 +825,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatPath {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -775,6 +833,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatTuple {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -784,6 +843,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatBox {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -792,6 +852,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatReference {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -801,6 +862,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatRest {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -808,6 +870,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatLit {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -815,6 +878,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatRange {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -827,6 +891,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatSlice {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -836,6 +901,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatMacro {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -843,6 +909,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PatOr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -851,6 +918,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for FieldPat {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
|
193
third_party/rust/syn/src/path.rs
vendored
193
third_party/rust/syn/src/path.rs
vendored
@ -6,6 +6,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Path {
|
||||
pub leading_colon: Option<Token![::]>,
|
||||
pub segments: Punctuated<PathSegment, Token![::]>,
|
||||
@ -31,6 +32,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct PathSegment {
|
||||
pub ident: Ident,
|
||||
pub arguments: PathArguments,
|
||||
@ -62,6 +64,7 @@ ast_enum! {
|
||||
/// ## Parenthesized
|
||||
///
|
||||
/// The `(A, B) -> C` in `Fn(A, B) -> C`.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum PathArguments {
|
||||
None,
|
||||
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
|
||||
@ -100,6 +103,7 @@ ast_enum! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum GenericArgument {
|
||||
/// A lifetime argument.
|
||||
Lifetime(Lifetime),
|
||||
@ -124,6 +128,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct AngleBracketedGenericArguments {
|
||||
pub colon2_token: Option<Token![::]>,
|
||||
pub lt_token: Token![<],
|
||||
@ -137,6 +142,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Binding {
|
||||
pub ident: Ident,
|
||||
pub eq_token: Token![=],
|
||||
@ -149,6 +155,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Constraint {
|
||||
pub ident: Ident,
|
||||
pub colon_token: Token![:],
|
||||
@ -162,6 +169,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct ParenthesizedGenericArguments {
|
||||
pub paren_token: token::Paren,
|
||||
/// `(A, B)`
|
||||
@ -191,6 +199,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct QSelf {
|
||||
pub lt_token: Token![<],
|
||||
pub ty: Box<Type>,
|
||||
@ -204,17 +213,17 @@ ast_struct! {
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
use crate::expr;
|
||||
use crate::ext::IdentExt;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Path {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse_helper(input, false)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for GenericArgument {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Lifetime) && !input.peek2(Token![+]) {
|
||||
@ -230,22 +239,93 @@ pub mod parsing {
|
||||
if input.peek(Ident) && input.peek2(Token![:]) && !input.peek2(Token![::]) {
|
||||
return Ok(GenericArgument::Constraint(input.parse()?));
|
||||
}
|
||||
}
|
||||
|
||||
if input.peek(Lit) {
|
||||
let lit = input.parse()?;
|
||||
return Ok(GenericArgument::Const(Expr::Lit(lit)));
|
||||
}
|
||||
if input.peek(Lit) || input.peek(token::Brace) {
|
||||
return const_argument(input).map(GenericArgument::Const);
|
||||
}
|
||||
|
||||
if input.peek(token::Brace) {
|
||||
let block = input.call(expr::parsing::expr_block)?;
|
||||
return Ok(GenericArgument::Const(Expr::Block(block)));
|
||||
#[cfg(feature = "full")]
|
||||
let begin = input.fork();
|
||||
|
||||
let argument: Type = input.parse()?;
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
{
|
||||
if match &argument {
|
||||
Type::Path(argument)
|
||||
if argument.qself.is_none()
|
||||
&& argument.path.leading_colon.is_none()
|
||||
&& argument.path.segments.len() == 1 =>
|
||||
{
|
||||
match argument.path.segments[0].arguments {
|
||||
PathArguments::AngleBracketed(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
_ => false,
|
||||
} && if input.peek(Token![=]) {
|
||||
input.parse::<Token![=]>()?;
|
||||
input.parse::<Type>()?;
|
||||
true
|
||||
} else if input.peek(Token![:]) {
|
||||
input.parse::<Token![:]>()?;
|
||||
input.call(constraint_bounds)?;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
} {
|
||||
let verbatim = verbatim::between(begin, input);
|
||||
return Ok(GenericArgument::Type(Type::Verbatim(verbatim)));
|
||||
}
|
||||
}
|
||||
|
||||
input.parse().map(GenericArgument::Type)
|
||||
Ok(GenericArgument::Type(argument))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn const_argument(input: ParseStream) -> Result<Expr> {
|
||||
let lookahead = input.lookahead1();
|
||||
|
||||
if input.peek(Lit) {
|
||||
let lit = input.parse()?;
|
||||
return Ok(Expr::Lit(lit));
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
{
|
||||
if input.peek(Ident) {
|
||||
let ident: Ident = input.parse()?;
|
||||
return Ok(Expr::Path(ExprPath {
|
||||
attrs: Vec::new(),
|
||||
qself: None,
|
||||
path: Path::from(ident),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
if input.peek(token::Brace) {
|
||||
#[cfg(feature = "full")]
|
||||
{
|
||||
let block: ExprBlock = input.parse()?;
|
||||
return Ok(Expr::Block(block));
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
{
|
||||
let begin = input.fork();
|
||||
let content;
|
||||
braced!(content in input);
|
||||
content.parse::<Expr>()?;
|
||||
let verbatim = verbatim::between(begin, input);
|
||||
return Ok(Expr::Verbatim(verbatim));
|
||||
}
|
||||
}
|
||||
|
||||
Err(lookahead.error())
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for AngleBracketedGenericArguments {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(AngleBracketedGenericArguments {
|
||||
@ -272,6 +352,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for ParenthesizedGenericArguments {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
@ -283,6 +364,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for PathSegment {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse_helper(input, false)
|
||||
@ -315,6 +397,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Binding {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(Binding {
|
||||
@ -326,31 +409,35 @@ pub mod parsing {
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Constraint {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(Constraint {
|
||||
ident: input.parse()?,
|
||||
colon_token: input.parse()?,
|
||||
bounds: {
|
||||
let mut bounds = Punctuated::new();
|
||||
loop {
|
||||
if input.peek(Token![,]) || input.peek(Token![>]) {
|
||||
break;
|
||||
}
|
||||
let value = input.parse()?;
|
||||
bounds.push_value(value);
|
||||
if !input.peek(Token![+]) {
|
||||
break;
|
||||
}
|
||||
let punct = input.parse()?;
|
||||
bounds.push_punct(punct);
|
||||
}
|
||||
bounds
|
||||
},
|
||||
bounds: constraint_bounds(input)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
fn constraint_bounds(input: ParseStream) -> Result<Punctuated<TypeParamBound, Token![+]>> {
|
||||
let mut bounds = Punctuated::new();
|
||||
loop {
|
||||
if input.peek(Token![,]) || input.peek(Token![>]) {
|
||||
break;
|
||||
}
|
||||
let value = input.parse()?;
|
||||
bounds.push_value(value);
|
||||
if !input.peek(Token![+]) {
|
||||
break;
|
||||
}
|
||||
let punct = input.parse()?;
|
||||
bounds.push_punct(punct);
|
||||
}
|
||||
Ok(bounds)
|
||||
}
|
||||
|
||||
impl Path {
|
||||
/// Parse a `Path` containing no path arguments on any of its segments.
|
||||
///
|
||||
@ -385,6 +472,7 @@ pub mod parsing {
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_mod_style(input: ParseStream) -> Result<Self> {
|
||||
Ok(Path {
|
||||
leading_colon: input.parse()?,
|
||||
@ -448,6 +536,7 @@ pub mod parsing {
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn is_ident<I: ?Sized>(&self, ident: &I) -> bool
|
||||
where
|
||||
Ident: PartialEq<I>,
|
||||
@ -469,6 +558,7 @@ pub mod parsing {
|
||||
///
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn get_ident(&self) -> Option<&Ident> {
|
||||
if self.leading_colon.is_none()
|
||||
&& self.segments.len() == 1
|
||||
@ -480,22 +570,32 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
|
||||
Ok(Path {
|
||||
pub(crate) fn parse_helper(input: ParseStream, expr_style: bool) -> Result<Self> {
|
||||
let mut path = Path {
|
||||
leading_colon: input.parse()?,
|
||||
segments: {
|
||||
let mut segments = Punctuated::new();
|
||||
let value = PathSegment::parse_helper(input, expr_style)?;
|
||||
segments.push_value(value);
|
||||
while input.peek(Token![::]) {
|
||||
let punct: Token![::] = input.parse()?;
|
||||
segments.push_punct(punct);
|
||||
let value = PathSegment::parse_helper(input, expr_style)?;
|
||||
segments.push_value(value);
|
||||
}
|
||||
segments
|
||||
},
|
||||
})
|
||||
};
|
||||
Path::parse_rest(input, &mut path, expr_style)?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub(crate) fn parse_rest(
|
||||
input: ParseStream,
|
||||
path: &mut Self,
|
||||
expr_style: bool,
|
||||
) -> Result<()> {
|
||||
while input.peek(Token![::]) {
|
||||
let punct: Token![::] = input.parse()?;
|
||||
path.segments.push_punct(punct);
|
||||
let value = PathSegment::parse_helper(input, expr_style)?;
|
||||
path.segments.push_value(value);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -555,12 +655,12 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
use std::cmp;
|
||||
|
||||
use crate::print::TokensOrDefault;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Path {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.leading_colon.to_tokens(tokens);
|
||||
@ -568,6 +668,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PathSegment {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
@ -575,6 +676,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for PathArguments {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
@ -589,6 +691,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for GenericArgument {
|
||||
#[allow(clippy::match_same_arms)]
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
@ -616,6 +719,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for AngleBracketedGenericArguments {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.colon2_token.to_tokens(tokens);
|
||||
@ -658,9 +762,9 @@ mod printing {
|
||||
GenericArgument::Binding(_) | GenericArgument::Constraint(_) => {
|
||||
if !trailing_or_empty {
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
trailing_or_empty = true;
|
||||
}
|
||||
param.to_tokens(tokens);
|
||||
trailing_or_empty = param.punct().is_some();
|
||||
}
|
||||
GenericArgument::Lifetime(_)
|
||||
| GenericArgument::Type(_)
|
||||
@ -672,6 +776,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Binding {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
@ -680,6 +785,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Constraint {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
@ -688,6 +794,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for ParenthesizedGenericArguments {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
@ -698,7 +805,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl private {
|
||||
pub fn print_path(tokens: &mut TokenStream, qself: &Option<QSelf>, path: &Path) {
|
||||
pub(crate) fn print_path(tokens: &mut TokenStream, qself: &Option<QSelf>, path: &Path) {
|
||||
let qself = match qself {
|
||||
Some(qself) => qself,
|
||||
None => {
|
||||
@ -709,11 +816,7 @@ mod printing {
|
||||
qself.lt_token.to_tokens(tokens);
|
||||
qself.ty.to_tokens(tokens);
|
||||
|
||||
let pos = if qself.position > 0 && qself.position >= path.segments.len() {
|
||||
path.segments.len() - 1
|
||||
} else {
|
||||
qself.position
|
||||
};
|
||||
let pos = cmp::min(qself.position, path.segments.len());
|
||||
let mut segments = path.segments.pairs();
|
||||
if pos > 0 {
|
||||
TokensOrDefault(&qself.as_token).to_tokens(tokens);
|
||||
|
51
third_party/rust/syn/src/punctuated.rs
vendored
51
third_party/rust/syn/src/punctuated.rs
vendored
@ -13,7 +13,7 @@
|
||||
//! syntax tree node + punctuation, where every node in the sequence is followed
|
||||
//! by punctuation except for possibly the final one.
|
||||
//!
|
||||
//! [`Punctuated<T, P>`]: struct.Punctuated.html
|
||||
//! [`Punctuated<T, P>`]: Punctuated
|
||||
//!
|
||||
//! ```text
|
||||
//! a_function_call(arg1, arg2, arg3);
|
||||
@ -50,7 +50,17 @@ pub struct Punctuated<T, P> {
|
||||
|
||||
impl<T, P> Punctuated<T, P> {
|
||||
/// Creates an empty punctuated sequence.
|
||||
pub fn new() -> Punctuated<T, P> {
|
||||
#[cfg(not(syn_no_const_vec_new))]
|
||||
pub const fn new() -> Self {
|
||||
Punctuated {
|
||||
inner: Vec::new(),
|
||||
last: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates an empty punctuated sequence.
|
||||
#[cfg(syn_no_const_vec_new)]
|
||||
pub fn new() -> Self {
|
||||
Punctuated {
|
||||
inner: Vec::new(),
|
||||
last: None,
|
||||
@ -152,7 +162,11 @@ impl<T, P> Punctuated<T, P> {
|
||||
/// Panics if the sequence does not already have a trailing punctuation when
|
||||
/// this method is called.
|
||||
pub fn push_value(&mut self, value: T) {
|
||||
assert!(self.empty_or_trailing());
|
||||
assert!(
|
||||
self.empty_or_trailing(),
|
||||
"Punctuated::push_value: cannot push value if Punctuated is missing trailing punctuation",
|
||||
);
|
||||
|
||||
self.last = Some(Box::new(value));
|
||||
}
|
||||
|
||||
@ -164,7 +178,11 @@ impl<T, P> Punctuated<T, P> {
|
||||
///
|
||||
/// Panics if the sequence is empty or already has a trailing punctuation.
|
||||
pub fn push_punct(&mut self, punctuation: P) {
|
||||
assert!(self.last.is_some());
|
||||
assert!(
|
||||
self.last.is_some(),
|
||||
"Punctuated::push_punct: cannot push punctuation if Punctuated is empty or already has trailing punctuation",
|
||||
);
|
||||
|
||||
let last = self.last.take().unwrap();
|
||||
self.inner.push((*last, punctuation));
|
||||
}
|
||||
@ -218,7 +236,10 @@ impl<T, P> Punctuated<T, P> {
|
||||
where
|
||||
P: Default,
|
||||
{
|
||||
assert!(index <= self.len());
|
||||
assert!(
|
||||
index <= self.len(),
|
||||
"Punctuated::insert: index out of range",
|
||||
);
|
||||
|
||||
if index == self.len() {
|
||||
self.push(value);
|
||||
@ -242,6 +263,7 @@ impl<T, P> Punctuated<T, P> {
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_terminated(input: ParseStream) -> Result<Self>
|
||||
where
|
||||
T: Parse,
|
||||
@ -262,6 +284,7 @@ impl<T, P> Punctuated<T, P> {
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_terminated_with(
|
||||
input: ParseStream,
|
||||
parser: fn(ParseStream) -> Result<T>,
|
||||
@ -298,6 +321,7 @@ impl<T, P> Punctuated<T, P> {
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_separated_nonempty(input: ParseStream) -> Result<Self>
|
||||
where
|
||||
T: Parse,
|
||||
@ -318,6 +342,7 @@ impl<T, P> Punctuated<T, P> {
|
||||
/// *This function is available only if Syn is built with the `"parsing"`
|
||||
/// feature.*
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_separated_nonempty_with(
|
||||
input: ParseStream,
|
||||
parser: fn(ParseStream) -> Result<T>,
|
||||
@ -342,6 +367,7 @@ impl<T, P> Punctuated<T, P> {
|
||||
}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl<T, P> Clone for Punctuated<T, P>
|
||||
where
|
||||
T: Clone,
|
||||
@ -356,6 +382,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl<T, P> Eq for Punctuated<T, P>
|
||||
where
|
||||
T: Eq,
|
||||
@ -364,6 +391,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl<T, P> PartialEq for Punctuated<T, P>
|
||||
where
|
||||
T: PartialEq,
|
||||
@ -376,6 +404,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl<T, P> Hash for Punctuated<T, P>
|
||||
where
|
||||
T: Hash,
|
||||
@ -389,6 +418,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let mut list = f.debug_list();
|
||||
@ -435,7 +465,11 @@ impl<T, P> FromIterator<Pair<T, P>> for Punctuated<T, P> {
|
||||
|
||||
impl<T, P> Extend<Pair<T, P>> for Punctuated<T, P> {
|
||||
fn extend<I: IntoIterator<Item = Pair<T, P>>>(&mut self, i: I) {
|
||||
assert!(self.empty_or_trailing());
|
||||
assert!(
|
||||
self.empty_or_trailing(),
|
||||
"Punctuated::extend: Punctuated is not empty or does not have a trailing punctuation",
|
||||
);
|
||||
|
||||
let mut nomore = false;
|
||||
for pair in i {
|
||||
if nomore {
|
||||
@ -928,6 +962,7 @@ impl<T, P> Pair<T, P> {
|
||||
}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl<T, P> Clone for Pair<T, P>
|
||||
where
|
||||
T: Clone,
|
||||
@ -975,16 +1010,18 @@ mod printing {
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl<T, P> ToTokens for Punctuated<T, P>
|
||||
where
|
||||
T: ToTokens,
|
||||
P: ToTokens,
|
||||
{
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.pairs())
|
||||
tokens.append_all(self.pairs());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl<T, P> ToTokens for Pair<T, P>
|
||||
where
|
||||
T: ToTokens,
|
||||
|
2
third_party/rust/syn/src/reserved.rs
vendored
2
third_party/rust/syn/src/reserved.rs
vendored
@ -26,6 +26,7 @@ impl Default for Reserved {
|
||||
}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Clone for Reserved {
|
||||
fn clone(&self) -> Self {
|
||||
Reserved {
|
||||
@ -35,6 +36,7 @@ impl Clone for Reserved {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for Reserved {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.debug_struct("Reserved").finish()
|
||||
|
4
third_party/rust/syn/src/spanned.rs
vendored
4
third_party/rust/syn/src/spanned.rs
vendored
@ -13,8 +13,8 @@
|
||||
//! of a struct for which we are deriving a trait implementation, and we need to
|
||||
//! be able to pass a reference to one of those fields across threads.
|
||||
//!
|
||||
//! [`Type`]: ../enum.Type.html
|
||||
//! [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
|
||||
//! [`Type`]: crate::Type
|
||||
//! [`Sync`]: std::marker::Sync
|
||||
//!
|
||||
//! If the field type does *not* implement `Sync` as required, we want the
|
||||
//! compiler to report an error pointing out exactly which type it was.
|
||||
|
13
third_party/rust/syn/src/stmt.rs
vendored
13
third_party/rust/syn/src/stmt.rs
vendored
@ -4,6 +4,7 @@ ast_struct! {
|
||||
/// A braced block containing Rust statements.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct Block {
|
||||
pub brace_token: token::Brace,
|
||||
/// Statements in a block
|
||||
@ -15,6 +16,7 @@ ast_enum! {
|
||||
/// A statement, usually ending in a semicolon.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub enum Stmt {
|
||||
/// A local (let) binding.
|
||||
Local(Local),
|
||||
@ -34,6 +36,7 @@ ast_struct! {
|
||||
/// A local `let` binding: `let x: u64 = s.parse()?`.
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "full")))]
|
||||
pub struct Local {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub let_token: Token![let],
|
||||
@ -46,7 +49,6 @@ ast_struct! {
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::parse::discouraged::Speculative;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
use proc_macro2::TokenStream;
|
||||
@ -104,6 +106,7 @@ pub mod parsing {
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn parse_within(input: ParseStream) -> Result<Vec<Stmt>> {
|
||||
let mut stmts = Vec::new();
|
||||
loop {
|
||||
@ -130,6 +133,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Block {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
@ -140,6 +144,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Stmt {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
parse_stmt(input, false)
|
||||
@ -166,7 +171,7 @@ pub mod parsing {
|
||||
|| input.peek(Token![extern])
|
||||
|| input.peek(Token![use])
|
||||
|| input.peek(Token![static]) && (input.peek2(Token![mut]) || input.peek2(Ident))
|
||||
|| input.peek(Token![const])
|
||||
|| input.peek(Token![const]) && !input.peek2(token::Brace)
|
||||
|| input.peek(Token![unsafe]) && !input.peek2(token::Brace)
|
||||
|| input.peek(Token![async])
|
||||
&& (input.peek2(Token![unsafe])
|
||||
@ -274,10 +279,10 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Block {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
@ -286,6 +291,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Stmt {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
@ -300,6 +306,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Local {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
expr::printing::outer_attrs_to_tokens(&self.attrs, tokens);
|
||||
|
277
third_party/rust/syn/src/token.rs
vendored
277
third_party/rust/syn/src/token.rs
vendored
@ -4,13 +4,13 @@
|
||||
//! prefer to use the [`Token!`] macro instead. This is a type-macro that
|
||||
//! expands to the token type of the given token.
|
||||
//!
|
||||
//! [`Token!`]: ../macro.Token.html
|
||||
//! [`Token!`]: crate::Token
|
||||
//!
|
||||
//! # Example
|
||||
//!
|
||||
//! The [`ItemStatic`] syntax tree node is defined like this.
|
||||
//!
|
||||
//! [`ItemStatic`]: ../struct.ItemStatic.html
|
||||
//! [`ItemStatic`]: crate::ItemStatic
|
||||
//!
|
||||
//! ```
|
||||
//! # use syn::{Attribute, Expr, Ident, Token, Type, Visibility};
|
||||
@ -35,10 +35,10 @@
|
||||
//! method. Delimiter tokens are parsed using the [`parenthesized!`],
|
||||
//! [`bracketed!`] and [`braced!`] macros.
|
||||
//!
|
||||
//! [`ParseStream::parse`]: ../parse/struct.ParseBuffer.html#method.parse
|
||||
//! [`parenthesized!`]: ../macro.parenthesized.html
|
||||
//! [`bracketed!`]: ../macro.bracketed.html
|
||||
//! [`braced!`]: ../macro.braced.html
|
||||
//! [`ParseStream::parse`]: crate::parse::ParseBuffer::parse()
|
||||
//! [`parenthesized!`]: crate::parenthesized!
|
||||
//! [`bracketed!`]: crate::bracketed!
|
||||
//! [`braced!`]: crate::braced!
|
||||
//!
|
||||
//! ```
|
||||
//! use syn::{Attribute, Result};
|
||||
@ -83,29 +83,11 @@
|
||||
//!
|
||||
//! - Field access to its span — `let sp = the_token.span`
|
||||
//!
|
||||
//! [Peeking]: ../parse/struct.ParseBuffer.html#method.peek
|
||||
//! [Parsing]: ../parse/struct.ParseBuffer.html#method.parse
|
||||
//! [Peeking]: crate::parse::ParseBuffer::peek()
|
||||
//! [Parsing]: crate::parse::ParseBuffer::parse()
|
||||
//! [Printing]: https://docs.rs/quote/1.0/quote/trait.ToTokens.html
|
||||
//! [`Span`]: https://docs.rs/proc-macro2/1.0/proc_macro2/struct.Span.html
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::cmp;
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::fmt::{self, Debug};
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
#[cfg(any(feature = "parsing", feature = "printing"))]
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro2::Span;
|
||||
#[cfg(feature = "printing")]
|
||||
use proc_macro2::TokenStream;
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
|
||||
#[cfg(feature = "printing")]
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
use self::private::WithSpan;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::buffer::Cursor;
|
||||
@ -120,6 +102,22 @@ use crate::lookahead;
|
||||
#[cfg(feature = "parsing")]
|
||||
use crate::parse::{Parse, ParseStream};
|
||||
use crate::span::IntoSpans;
|
||||
#[cfg(any(feature = "parsing", feature = "printing"))]
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro2::Span;
|
||||
#[cfg(feature = "printing")]
|
||||
use proc_macro2::TokenStream;
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::{Delimiter, Literal, Punct, TokenTree};
|
||||
#[cfg(feature = "printing")]
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::cmp;
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::fmt::{self, Debug};
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
/// Marker trait for types that represent single tokens.
|
||||
///
|
||||
@ -270,9 +268,11 @@ macro_rules! define_keywords {
|
||||
}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Copy for $name {}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Clone for $name {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
@ -280,6 +280,7 @@ macro_rules! define_keywords {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for $name {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(stringify!($name))
|
||||
@ -287,9 +288,11 @@ macro_rules! define_keywords {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl cmp::Eq for $name {}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl PartialEq for $name {
|
||||
fn eq(&self, _other: &$name) -> bool {
|
||||
true
|
||||
@ -297,11 +300,13 @@ macro_rules! define_keywords {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Hash for $name {
|
||||
fn hash<H: Hasher>(&self, _state: &mut H) {}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for $name {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
printing::keyword($token, self.span, tokens);
|
||||
@ -309,6 +314,7 @@ macro_rules! define_keywords {
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for $name {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok($name {
|
||||
@ -385,9 +391,11 @@ macro_rules! define_punctuation_structs {
|
||||
}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Copy for $name {}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Clone for $name {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
@ -395,6 +403,7 @@ macro_rules! define_punctuation_structs {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for $name {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(stringify!($name))
|
||||
@ -402,9 +411,11 @@ macro_rules! define_punctuation_structs {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl cmp::Eq for $name {}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl PartialEq for $name {
|
||||
fn eq(&self, _other: &$name) -> bool {
|
||||
true
|
||||
@ -412,6 +423,7 @@ macro_rules! define_punctuation_structs {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Hash for $name {
|
||||
fn hash<H: Hasher>(&self, _state: &mut H) {}
|
||||
}
|
||||
@ -429,6 +441,7 @@ macro_rules! define_punctuation {
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for $name {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
printing::punct($token, &self.spans, tokens);
|
||||
@ -436,6 +449,7 @@ macro_rules! define_punctuation {
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for $name {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok($name {
|
||||
@ -486,9 +500,11 @@ macro_rules! define_delimiters {
|
||||
}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Copy for $name {}
|
||||
|
||||
#[cfg(feature = "clone-impls")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "clone-impls")))]
|
||||
impl Clone for $name {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
@ -496,6 +512,7 @@ macro_rules! define_delimiters {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Debug for $name {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(stringify!($name))
|
||||
@ -503,9 +520,11 @@ macro_rules! define_delimiters {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl cmp::Eq for $name {}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl PartialEq for $name {
|
||||
fn eq(&self, _other: &$name) -> bool {
|
||||
true
|
||||
@ -513,6 +532,7 @@ macro_rules! define_delimiters {
|
||||
}
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "extra-traits")))]
|
||||
impl Hash for $name {
|
||||
fn hash<H: Hasher>(&self, _state: &mut H) {}
|
||||
}
|
||||
@ -538,6 +558,7 @@ define_punctuation_structs! {
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Underscore {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Ident::new("_", self.span));
|
||||
@ -545,6 +566,7 @@ impl ToTokens for Underscore {
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Underscore {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.step(|cursor| {
|
||||
@ -750,105 +772,105 @@ macro_rules! export_token_macro {
|
||||
// https://github.com/rust-lang/rust/issues/45939
|
||||
#[macro_export]
|
||||
macro_rules! Token {
|
||||
(abstract) => { $crate::token::Abstract };
|
||||
(as) => { $crate::token::As };
|
||||
(async) => { $crate::token::Async };
|
||||
(auto) => { $crate::token::Auto };
|
||||
[abstract] => { $crate::token::Abstract };
|
||||
[as] => { $crate::token::As };
|
||||
[async] => { $crate::token::Async };
|
||||
[auto] => { $crate::token::Auto };
|
||||
$($await_rule => { $crate::token::Await };)*
|
||||
(become) => { $crate::token::Become };
|
||||
(box) => { $crate::token::Box };
|
||||
(break) => { $crate::token::Break };
|
||||
(const) => { $crate::token::Const };
|
||||
(continue) => { $crate::token::Continue };
|
||||
(crate) => { $crate::token::Crate };
|
||||
(default) => { $crate::token::Default };
|
||||
(do) => { $crate::token::Do };
|
||||
(dyn) => { $crate::token::Dyn };
|
||||
(else) => { $crate::token::Else };
|
||||
(enum) => { $crate::token::Enum };
|
||||
(extern) => { $crate::token::Extern };
|
||||
(final) => { $crate::token::Final };
|
||||
(fn) => { $crate::token::Fn };
|
||||
(for) => { $crate::token::For };
|
||||
(if) => { $crate::token::If };
|
||||
(impl) => { $crate::token::Impl };
|
||||
(in) => { $crate::token::In };
|
||||
(let) => { $crate::token::Let };
|
||||
(loop) => { $crate::token::Loop };
|
||||
(macro) => { $crate::token::Macro };
|
||||
(match) => { $crate::token::Match };
|
||||
(mod) => { $crate::token::Mod };
|
||||
(move) => { $crate::token::Move };
|
||||
(mut) => { $crate::token::Mut };
|
||||
(override) => { $crate::token::Override };
|
||||
(priv) => { $crate::token::Priv };
|
||||
(pub) => { $crate::token::Pub };
|
||||
(ref) => { $crate::token::Ref };
|
||||
(return) => { $crate::token::Return };
|
||||
(Self) => { $crate::token::SelfType };
|
||||
(self) => { $crate::token::SelfValue };
|
||||
(static) => { $crate::token::Static };
|
||||
(struct) => { $crate::token::Struct };
|
||||
(super) => { $crate::token::Super };
|
||||
(trait) => { $crate::token::Trait };
|
||||
(try) => { $crate::token::Try };
|
||||
(type) => { $crate::token::Type };
|
||||
(typeof) => { $crate::token::Typeof };
|
||||
(union) => { $crate::token::Union };
|
||||
(unsafe) => { $crate::token::Unsafe };
|
||||
(unsized) => { $crate::token::Unsized };
|
||||
(use) => { $crate::token::Use };
|
||||
(virtual) => { $crate::token::Virtual };
|
||||
(where) => { $crate::token::Where };
|
||||
(while) => { $crate::token::While };
|
||||
(yield) => { $crate::token::Yield };
|
||||
(+) => { $crate::token::Add };
|
||||
(+=) => { $crate::token::AddEq };
|
||||
(&) => { $crate::token::And };
|
||||
(&&) => { $crate::token::AndAnd };
|
||||
(&=) => { $crate::token::AndEq };
|
||||
(@) => { $crate::token::At };
|
||||
(!) => { $crate::token::Bang };
|
||||
(^) => { $crate::token::Caret };
|
||||
(^=) => { $crate::token::CaretEq };
|
||||
(:) => { $crate::token::Colon };
|
||||
(::) => { $crate::token::Colon2 };
|
||||
(,) => { $crate::token::Comma };
|
||||
(/) => { $crate::token::Div };
|
||||
(/=) => { $crate::token::DivEq };
|
||||
($) => { $crate::token::Dollar };
|
||||
(.) => { $crate::token::Dot };
|
||||
(..) => { $crate::token::Dot2 };
|
||||
(...) => { $crate::token::Dot3 };
|
||||
(..=) => { $crate::token::DotDotEq };
|
||||
(=) => { $crate::token::Eq };
|
||||
(==) => { $crate::token::EqEq };
|
||||
(>=) => { $crate::token::Ge };
|
||||
(>) => { $crate::token::Gt };
|
||||
(<=) => { $crate::token::Le };
|
||||
(<) => { $crate::token::Lt };
|
||||
(*=) => { $crate::token::MulEq };
|
||||
(!=) => { $crate::token::Ne };
|
||||
(|) => { $crate::token::Or };
|
||||
(|=) => { $crate::token::OrEq };
|
||||
(||) => { $crate::token::OrOr };
|
||||
(#) => { $crate::token::Pound };
|
||||
(?) => { $crate::token::Question };
|
||||
(->) => { $crate::token::RArrow };
|
||||
(<-) => { $crate::token::LArrow };
|
||||
(%) => { $crate::token::Rem };
|
||||
(%=) => { $crate::token::RemEq };
|
||||
(=>) => { $crate::token::FatArrow };
|
||||
(;) => { $crate::token::Semi };
|
||||
(<<) => { $crate::token::Shl };
|
||||
(<<=) => { $crate::token::ShlEq };
|
||||
(>>) => { $crate::token::Shr };
|
||||
(>>=) => { $crate::token::ShrEq };
|
||||
(*) => { $crate::token::Star };
|
||||
(-) => { $crate::token::Sub };
|
||||
(-=) => { $crate::token::SubEq };
|
||||
(~) => { $crate::token::Tilde };
|
||||
(_) => { $crate::token::Underscore };
|
||||
[become] => { $crate::token::Become };
|
||||
[box] => { $crate::token::Box };
|
||||
[break] => { $crate::token::Break };
|
||||
[const] => { $crate::token::Const };
|
||||
[continue] => { $crate::token::Continue };
|
||||
[crate] => { $crate::token::Crate };
|
||||
[default] => { $crate::token::Default };
|
||||
[do] => { $crate::token::Do };
|
||||
[dyn] => { $crate::token::Dyn };
|
||||
[else] => { $crate::token::Else };
|
||||
[enum] => { $crate::token::Enum };
|
||||
[extern] => { $crate::token::Extern };
|
||||
[final] => { $crate::token::Final };
|
||||
[fn] => { $crate::token::Fn };
|
||||
[for] => { $crate::token::For };
|
||||
[if] => { $crate::token::If };
|
||||
[impl] => { $crate::token::Impl };
|
||||
[in] => { $crate::token::In };
|
||||
[let] => { $crate::token::Let };
|
||||
[loop] => { $crate::token::Loop };
|
||||
[macro] => { $crate::token::Macro };
|
||||
[match] => { $crate::token::Match };
|
||||
[mod] => { $crate::token::Mod };
|
||||
[move] => { $crate::token::Move };
|
||||
[mut] => { $crate::token::Mut };
|
||||
[override] => { $crate::token::Override };
|
||||
[priv] => { $crate::token::Priv };
|
||||
[pub] => { $crate::token::Pub };
|
||||
[ref] => { $crate::token::Ref };
|
||||
[return] => { $crate::token::Return };
|
||||
[Self] => { $crate::token::SelfType };
|
||||
[self] => { $crate::token::SelfValue };
|
||||
[static] => { $crate::token::Static };
|
||||
[struct] => { $crate::token::Struct };
|
||||
[super] => { $crate::token::Super };
|
||||
[trait] => { $crate::token::Trait };
|
||||
[try] => { $crate::token::Try };
|
||||
[type] => { $crate::token::Type };
|
||||
[typeof] => { $crate::token::Typeof };
|
||||
[union] => { $crate::token::Union };
|
||||
[unsafe] => { $crate::token::Unsafe };
|
||||
[unsized] => { $crate::token::Unsized };
|
||||
[use] => { $crate::token::Use };
|
||||
[virtual] => { $crate::token::Virtual };
|
||||
[where] => { $crate::token::Where };
|
||||
[while] => { $crate::token::While };
|
||||
[yield] => { $crate::token::Yield };
|
||||
[+] => { $crate::token::Add };
|
||||
[+=] => { $crate::token::AddEq };
|
||||
[&] => { $crate::token::And };
|
||||
[&&] => { $crate::token::AndAnd };
|
||||
[&=] => { $crate::token::AndEq };
|
||||
[@] => { $crate::token::At };
|
||||
[!] => { $crate::token::Bang };
|
||||
[^] => { $crate::token::Caret };
|
||||
[^=] => { $crate::token::CaretEq };
|
||||
[:] => { $crate::token::Colon };
|
||||
[::] => { $crate::token::Colon2 };
|
||||
[,] => { $crate::token::Comma };
|
||||
[/] => { $crate::token::Div };
|
||||
[/=] => { $crate::token::DivEq };
|
||||
[$] => { $crate::token::Dollar };
|
||||
[.] => { $crate::token::Dot };
|
||||
[..] => { $crate::token::Dot2 };
|
||||
[...] => { $crate::token::Dot3 };
|
||||
[..=] => { $crate::token::DotDotEq };
|
||||
[=] => { $crate::token::Eq };
|
||||
[==] => { $crate::token::EqEq };
|
||||
[>=] => { $crate::token::Ge };
|
||||
[>] => { $crate::token::Gt };
|
||||
[<=] => { $crate::token::Le };
|
||||
[<] => { $crate::token::Lt };
|
||||
[*=] => { $crate::token::MulEq };
|
||||
[!=] => { $crate::token::Ne };
|
||||
[|] => { $crate::token::Or };
|
||||
[|=] => { $crate::token::OrEq };
|
||||
[||] => { $crate::token::OrOr };
|
||||
[#] => { $crate::token::Pound };
|
||||
[?] => { $crate::token::Question };
|
||||
[->] => { $crate::token::RArrow };
|
||||
[<-] => { $crate::token::LArrow };
|
||||
[%] => { $crate::token::Rem };
|
||||
[%=] => { $crate::token::RemEq };
|
||||
[=>] => { $crate::token::FatArrow };
|
||||
[;] => { $crate::token::Semi };
|
||||
[<<] => { $crate::token::Shl };
|
||||
[<<=] => { $crate::token::ShlEq };
|
||||
[>>] => { $crate::token::Shr };
|
||||
[>>=] => { $crate::token::ShrEq };
|
||||
[*] => { $crate::token::Star };
|
||||
[-] => { $crate::token::Sub };
|
||||
[-=] => { $crate::token::SubEq };
|
||||
[~] => { $crate::token::Tilde };
|
||||
[_] => { $crate::token::Underscore };
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -857,20 +879,19 @@ macro_rules! export_token_macro {
|
||||
// https://github.com/rust-lang/rust/issues/57919
|
||||
// We put the Token![await] rule in a place that is not lexed by old rustc.
|
||||
#[cfg(not(syn_omit_await_from_token_macro))]
|
||||
include!("await.rs"); // export_token_macro![(await)];
|
||||
include!("await.rs"); // export_token_macro! {[await]}
|
||||
#[cfg(syn_omit_await_from_token_macro)]
|
||||
export_token_macro![];
|
||||
export_token_macro! {}
|
||||
|
||||
// Not public API.
|
||||
#[doc(hidden)]
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use proc_macro2::{Spacing, Span};
|
||||
|
||||
use crate::buffer::Cursor;
|
||||
use crate::error::{Error, Result};
|
||||
use crate::parse::ParseStream;
|
||||
use crate::span::FromSpans;
|
||||
use proc_macro2::{Spacing, Span};
|
||||
|
||||
pub fn keyword(input: ParseStream, token: &str) -> Result<Span> {
|
||||
input.step(|cursor| {
|
||||
|
3
third_party/rust/syn/src/tt.rs
vendored
3
third_party/rust/syn/src/tt.rs
vendored
@ -1,6 +1,5 @@
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
use proc_macro2::{Delimiter, TokenStream, TokenTree};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
pub struct TokenTreeHelper<'a>(pub &'a TokenTree);
|
||||
|
||||
|
161
third_party/rust/syn/src/ty.rs
vendored
161
third_party/rust/syn/src/ty.rs
vendored
@ -12,10 +12,8 @@ ast_enum_of_structs! {
|
||||
///
|
||||
/// This type is a [syntax tree enum].
|
||||
///
|
||||
/// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
|
||||
//
|
||||
// TODO: change syntax-tree-enum link to an intra rustdoc link, currently
|
||||
// blocked on https://github.com/rust-lang/rust/issues/62833
|
||||
/// [syntax tree enum]: Expr#syntax-tree-enums
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum Type {
|
||||
/// A fixed size array type: `[T; n]`.
|
||||
Array(TypeArray),
|
||||
@ -65,8 +63,31 @@ ast_enum_of_structs! {
|
||||
/// Tokens in type position not interpreted by Syn.
|
||||
Verbatim(TokenStream),
|
||||
|
||||
// The following is the only supported idiom for exhaustive matching of
|
||||
// this enum.
|
||||
//
|
||||
// match expr {
|
||||
// Type::Array(e) => {...}
|
||||
// Type::BareFn(e) => {...}
|
||||
// ...
|
||||
// Type::Verbatim(e) => {...}
|
||||
//
|
||||
// #[cfg(test)]
|
||||
// Type::__TestExhaustive(_) => unimplemented!(),
|
||||
// #[cfg(not(test))]
|
||||
// _ => { /* some sane fallback */ }
|
||||
// }
|
||||
//
|
||||
// This way we fail your tests but don't break your library when adding
|
||||
// a variant. You will be notified by a test failure when a variant is
|
||||
// added, so that you can add code to handle it, but your library will
|
||||
// continue to compile and work for downstream users in the interim.
|
||||
//
|
||||
// Once `deny(reachable)` is available in rustc, Type will be
|
||||
// reimplemented as a non_exhaustive enum.
|
||||
// https://github.com/rust-lang/rust/issues/44109#issuecomment-521781237
|
||||
#[doc(hidden)]
|
||||
__Nonexhaustive,
|
||||
__TestExhaustive(crate::private),
|
||||
}
|
||||
}
|
||||
|
||||
@ -75,6 +96,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeArray {
|
||||
pub bracket_token: token::Bracket,
|
||||
pub elem: Box<Type>,
|
||||
@ -88,6 +110,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeBareFn {
|
||||
pub lifetimes: Option<BoundLifetimes>,
|
||||
pub unsafety: Option<Token![unsafe]>,
|
||||
@ -105,6 +128,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeGroup {
|
||||
pub group_token: token::Group,
|
||||
pub elem: Box<Type>,
|
||||
@ -117,6 +141,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeImplTrait {
|
||||
pub impl_token: Token![impl],
|
||||
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
||||
@ -128,6 +153,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeInfer {
|
||||
pub underscore_token: Token![_],
|
||||
}
|
||||
@ -138,6 +164,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeMacro {
|
||||
pub mac: Macro,
|
||||
}
|
||||
@ -148,6 +175,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeNever {
|
||||
pub bang_token: Token![!],
|
||||
}
|
||||
@ -158,6 +186,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeParen {
|
||||
pub paren_token: token::Paren,
|
||||
pub elem: Box<Type>,
|
||||
@ -170,6 +199,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypePath {
|
||||
pub qself: Option<QSelf>,
|
||||
pub path: Path,
|
||||
@ -181,6 +211,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypePtr {
|
||||
pub star_token: Token![*],
|
||||
pub const_token: Option<Token![const]>,
|
||||
@ -194,6 +225,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeReference {
|
||||
pub and_token: Token![&],
|
||||
pub lifetime: Option<Lifetime>,
|
||||
@ -207,6 +239,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeSlice {
|
||||
pub bracket_token: token::Bracket,
|
||||
pub elem: Box<Type>,
|
||||
@ -219,6 +252,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeTraitObject {
|
||||
pub dyn_token: Option<Token![dyn]>,
|
||||
pub bounds: Punctuated<TypeParamBound, Token![+]>,
|
||||
@ -230,6 +264,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or
|
||||
/// `"full"` feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct TypeTuple {
|
||||
pub paren_token: token::Paren,
|
||||
pub elems: Punctuated<Type, Token![,]>,
|
||||
@ -241,6 +276,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Abi {
|
||||
pub extern_token: Token![extern],
|
||||
pub name: Option<LitStr>,
|
||||
@ -252,6 +288,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct BareFnArg {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub name: Option<(Ident, Token![:])>,
|
||||
@ -274,6 +311,7 @@ ast_struct! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub struct Variadic {
|
||||
pub attrs: Vec<Attribute>,
|
||||
pub dots: Token![...],
|
||||
@ -285,6 +323,7 @@ ast_enum! {
|
||||
///
|
||||
/// *This type is available only if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(doc_cfg, doc(cfg(any(feature = "full", feature = "derive"))))]
|
||||
pub enum ReturnType {
|
||||
/// Return type is not specified.
|
||||
///
|
||||
@ -298,13 +337,12 @@ ast_enum! {
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
|
||||
use crate::ext::IdentExt;
|
||||
use crate::parse::{Parse, ParseStream, Result};
|
||||
use crate::path;
|
||||
use proc_macro2::{Punct, Spacing, TokenTree};
|
||||
use std::iter::FromIterator;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Type {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let allow_plus = true;
|
||||
@ -318,6 +356,7 @@ pub mod parsing {
|
||||
/// contain a `+` character.
|
||||
///
|
||||
/// This parser does not allow a `+`, while the default parser does.
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
pub fn without_plus(input: ParseStream) -> Result<Self> {
|
||||
let allow_plus = false;
|
||||
ambig_ty(input, allow_plus)
|
||||
@ -325,11 +364,41 @@ pub mod parsing {
|
||||
}
|
||||
|
||||
fn ambig_ty(input: ParseStream, allow_plus: bool) -> Result<Type> {
|
||||
if input.peek(token::Group) && !input.peek2(Token![::]) && !input.peek2(Token![<]) {
|
||||
return input.parse().map(Type::Group);
|
||||
let begin = input.fork();
|
||||
|
||||
if input.peek(token::Group) {
|
||||
let mut group: TypeGroup = input.parse()?;
|
||||
if input.peek(Token![::]) && input.peek3(Ident::peek_any) {
|
||||
if let Type::Path(mut ty) = *group.elem {
|
||||
Path::parse_rest(input, &mut ty.path, false)?;
|
||||
return Ok(Type::Path(ty));
|
||||
} else {
|
||||
return Ok(Type::Path(TypePath {
|
||||
qself: Some(QSelf {
|
||||
lt_token: Token![<](group.group_token.span),
|
||||
position: 0,
|
||||
as_token: None,
|
||||
gt_token: Token![>](group.group_token.span),
|
||||
ty: group.elem,
|
||||
}),
|
||||
path: Path::parse_helper(input, false)?,
|
||||
}));
|
||||
}
|
||||
} else if input.peek(Token![<]) || input.peek(Token![::]) && input.peek3(Token![<]) {
|
||||
if let Type::Path(mut ty) = *group.elem {
|
||||
let arguments = &mut ty.path.segments.last_mut().unwrap().arguments;
|
||||
if let PathArguments::None = arguments {
|
||||
*arguments = PathArguments::AngleBracketed(input.parse()?);
|
||||
Path::parse_rest(input, &mut ty.path, false)?;
|
||||
return Ok(Type::Path(ty));
|
||||
} else {
|
||||
group.elem = Box::new(Type::Path(ty));
|
||||
}
|
||||
}
|
||||
}
|
||||
return Ok(Type::Group(group));
|
||||
}
|
||||
|
||||
let begin = input.fork();
|
||||
let mut lifetimes = None::<BoundLifetimes>;
|
||||
let mut lookahead = input.lookahead1();
|
||||
if lookahead.peek(Token![for]) {
|
||||
@ -388,9 +457,13 @@ pub mod parsing {
|
||||
let mut elems = Punctuated::new();
|
||||
elems.push_value(first);
|
||||
elems.push_punct(content.parse()?);
|
||||
let rest: Punctuated<Type, Token![,]> =
|
||||
content.parse_terminated(Parse::parse)?;
|
||||
elems.extend(rest);
|
||||
while !content.is_empty() {
|
||||
elems.push_value(content.parse()?);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
}
|
||||
elems.push_punct(content.parse()?);
|
||||
}
|
||||
elems
|
||||
},
|
||||
}));
|
||||
@ -567,6 +640,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeSlice {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
@ -577,6 +651,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeArray {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
@ -589,6 +664,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypePtr {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let star_token: Token![*] = input.parse()?;
|
||||
@ -611,6 +687,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeReference {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(TypeReference {
|
||||
@ -623,6 +700,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeBareFn {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let allow_mut_self = false;
|
||||
@ -680,6 +758,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeNever {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(TypeNever {
|
||||
@ -688,6 +767,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeInfer {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(TypeInfer {
|
||||
@ -696,6 +776,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeTuple {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let content;
|
||||
@ -715,15 +796,20 @@ pub mod parsing {
|
||||
let mut elems = Punctuated::new();
|
||||
elems.push_value(first);
|
||||
elems.push_punct(content.parse()?);
|
||||
let rest: Punctuated<Type, Token![,]> =
|
||||
content.parse_terminated(Parse::parse)?;
|
||||
elems.extend(rest);
|
||||
while !content.is_empty() {
|
||||
elems.push_value(content.parse()?);
|
||||
if content.is_empty() {
|
||||
break;
|
||||
}
|
||||
elems.push_punct(content.parse()?);
|
||||
}
|
||||
elems
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeMacro {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(TypeMacro {
|
||||
@ -732,6 +818,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypePath {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let (qself, mut path) = path::parsing::qpath(input, false)?;
|
||||
@ -764,12 +851,14 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for ReturnType {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse(input, true)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeTraitObject {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Self::parse(input, true)
|
||||
@ -822,6 +911,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeImplTrait {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(TypeImplTrait {
|
||||
@ -843,6 +933,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeGroup {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let group = crate::group::parse_group(input)?;
|
||||
@ -853,6 +944,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for TypeParen {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let allow_plus = false;
|
||||
@ -870,6 +962,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for BareFnArg {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let allow_mut_self = false;
|
||||
@ -915,12 +1008,14 @@ pub mod parsing {
|
||||
TokenTree::Punct(Punct::new('.', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new('.', Spacing::Alone)),
|
||||
];
|
||||
let tokens = TokenStream::from_iter(args.into_iter().zip(&dot3.spans).map(
|
||||
|(mut arg, span)| {
|
||||
let tokens: TokenStream = args
|
||||
.into_iter()
|
||||
.zip(&dot3.spans)
|
||||
.map(|(mut arg, span)| {
|
||||
arg.set_span(*span);
|
||||
arg
|
||||
},
|
||||
));
|
||||
})
|
||||
.collect();
|
||||
Type::Verbatim(tokens)
|
||||
} else if allow_mut_self && input.peek(Token![mut]) && input.peek2(Token![self]) {
|
||||
has_mut_self = true;
|
||||
@ -941,6 +1036,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Abi {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
Ok(Abi {
|
||||
@ -950,6 +1046,7 @@ pub mod parsing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
|
||||
impl Parse for Option<Abi> {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
if input.peek(Token![extern]) {
|
||||
@ -964,13 +1061,12 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::print::TokensOrDefault;
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
use crate::attr::FilterAttrs;
|
||||
use crate::print::TokensOrDefault;
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeSlice {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
@ -979,6 +1075,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeArray {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
@ -989,6 +1086,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypePtr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.star_token.to_tokens(tokens);
|
||||
@ -1002,6 +1100,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeReference {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.and_token.to_tokens(tokens);
|
||||
@ -1011,6 +1110,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeBareFn {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.lifetimes.to_tokens(tokens);
|
||||
@ -1031,12 +1131,14 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeNever {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.bang_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeTuple {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
@ -1045,12 +1147,14 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypePath {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
private::print_path(tokens, &self.qself, &self.path);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeTraitObject {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.dyn_token.to_tokens(tokens);
|
||||
@ -1058,6 +1162,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeImplTrait {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.impl_token.to_tokens(tokens);
|
||||
@ -1065,6 +1170,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeGroup {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.group_token.surround(tokens, |tokens| {
|
||||
@ -1073,6 +1179,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeParen {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
@ -1081,18 +1188,21 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeInfer {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.underscore_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for TypeMacro {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.mac.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for ReturnType {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
@ -1105,6 +1215,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for BareFnArg {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -1116,6 +1227,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Variadic {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
@ -1123,6 +1235,7 @@ mod printing {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(doc_cfg, doc(cfg(feature = "printing")))]
|
||||
impl ToTokens for Abi {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.extern_token.to_tokens(tokens);
|
||||
|
446
third_party/rust/syn/tests/common/eq.rs
vendored
446
third_party/rust/syn/tests/common/eq.rs
vendored
@ -1,47 +1,54 @@
|
||||
extern crate rustc_ast;
|
||||
extern crate rustc_data_structures;
|
||||
extern crate rustc_span;
|
||||
extern crate rustc_target;
|
||||
|
||||
use std::mem;
|
||||
|
||||
use rustc_ast::ast::{
|
||||
AngleBracketedArg, AngleBracketedArgs, AnonConst, Arm, AssocItemKind, AssocTyConstraint,
|
||||
AssocTyConstraintKind, Async, AttrId, AttrItem, AttrKind, AttrStyle, Attribute, BareFnTy,
|
||||
BinOpKind, BindingMode, Block, BlockCheckMode, BorrowKind, CaptureBy, Const, Crate, CrateSugar,
|
||||
Defaultness, EnumDef, Expr, ExprKind, Extern, Field, FieldPat, FloatTy, FnDecl, FnHeader,
|
||||
FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
|
||||
GenericParam, GenericParamKind, Generics, GlobalAsm, ImplPolarity, InlineAsm, InlineAsmOperand,
|
||||
InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy, IsAuto, Item,
|
||||
ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind, LlvmAsmDialect,
|
||||
LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt, MacDelimiter,
|
||||
MacStmtStyle, MacroDef, Mod, Movability, MutTy, Mutability, NodeId, Param, ParenthesizedArgs,
|
||||
Pat, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd, RangeLimits, RangeSyntax, Stmt,
|
||||
StmtKind, StrLit, StrStyle, StructField, TraitBoundModifier, TraitObjectSyntax, TraitRef, Ty,
|
||||
TyKind, UintTy, UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData,
|
||||
Defaultness, EnumDef, Expr, ExprField, ExprKind, Extern, FieldDef, FloatTy, FnDecl, FnHeader,
|
||||
FnKind, FnRetTy, FnSig, ForeignItemKind, ForeignMod, GenericArg, GenericArgs, GenericBound,
|
||||
GenericParam, GenericParamKind, Generics, ImplKind, ImplPolarity, Inline, InlineAsm,
|
||||
InlineAsmOperand, InlineAsmOptions, InlineAsmRegOrRegClass, InlineAsmTemplatePiece, IntTy,
|
||||
IsAuto, Item, ItemKind, Label, Lifetime, Lit, LitFloatType, LitIntType, LitKind,
|
||||
LlvmAsmDialect, LlvmInlineAsm, LlvmInlineAsmOutput, Local, MacArgs, MacCall, MacCallStmt,
|
||||
MacDelimiter, MacStmtStyle, MacroDef, ModKind, Movability, MutTy, Mutability, NodeId, Param,
|
||||
ParenthesizedArgs, Pat, PatField, PatKind, Path, PathSegment, PolyTraitRef, QSelf, RangeEnd,
|
||||
RangeLimits, RangeSyntax, Stmt, StmtKind, StrLit, StrStyle, StructExpr, StructRest,
|
||||
TraitBoundModifier, TraitKind, TraitObjectSyntax, TraitRef, Ty, TyAliasKind, TyKind, UintTy,
|
||||
UnOp, Unsafe, UnsafeSource, UseTree, UseTreeKind, Variant, VariantData, Visibility,
|
||||
VisibilityKind, WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
|
||||
WhereRegionPredicate,
|
||||
};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, CommentKind, DelimToken, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
use rustc_ast::token::{self, CommentKind, DelimToken, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{
|
||||
AttrAnnotatedTokenStream, AttrAnnotatedTokenTree, AttributesData, DelimSpan, LazyTokenStream,
|
||||
Spacing, TokenStream, TokenTree,
|
||||
};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::Ident;
|
||||
use rustc_span::{Span, Symbol, SyntaxContext};
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
use rustc_span::{Span, Symbol, SyntaxContext, DUMMY_SP};
|
||||
|
||||
pub trait SpanlessEq {
|
||||
fn eq(&self, other: &Self) -> bool;
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for Box<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(&**self, &**other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for P<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(&**self, &**other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for Lrc<T> {
|
||||
impl<T: ?Sized + SpanlessEq> SpanlessEq for Lrc<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
SpanlessEq::eq(&**self, &**other)
|
||||
}
|
||||
@ -57,12 +64,18 @@ impl<T: SpanlessEq> SpanlessEq for Option<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for Vec<T> {
|
||||
impl<T: SpanlessEq> SpanlessEq for [T] {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.len() == other.len() && self.iter().zip(other).all(|(a, b)| SpanlessEq::eq(a, b))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for Vec<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
<[T] as SpanlessEq>::eq(self, other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SpanlessEq> SpanlessEq for ThinVec<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.len() == other.len()
|
||||
@ -86,7 +99,7 @@ impl<A: SpanlessEq, B: SpanlessEq> SpanlessEq for (A, B) {
|
||||
}
|
||||
|
||||
macro_rules! spanless_eq_true {
|
||||
($name:ident) => {
|
||||
($name:ty) => {
|
||||
impl SpanlessEq for $name {
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
true
|
||||
@ -100,9 +113,10 @@ spanless_eq_true!(DelimSpan);
|
||||
spanless_eq_true!(AttrId);
|
||||
spanless_eq_true!(NodeId);
|
||||
spanless_eq_true!(SyntaxContext);
|
||||
spanless_eq_true!(Spacing);
|
||||
|
||||
macro_rules! spanless_eq_partial_eq {
|
||||
($name:ident) => {
|
||||
($name:ty) => {
|
||||
impl SpanlessEq for $name {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
PartialEq::eq(self, other)
|
||||
@ -122,73 +136,74 @@ spanless_eq_partial_eq!(Symbol);
|
||||
spanless_eq_partial_eq!(CommentKind);
|
||||
spanless_eq_partial_eq!(DelimToken);
|
||||
spanless_eq_partial_eq!(InlineAsmOptions);
|
||||
spanless_eq_partial_eq!(token::LitKind);
|
||||
|
||||
macro_rules! spanless_eq_struct {
|
||||
{
|
||||
$name:ident $(<$param:ident>)?;
|
||||
$([$field:ident $other:ident])*
|
||||
$(![$ignore:ident])*
|
||||
$($name:ident)::+ $(<$param:ident>)?
|
||||
$([$field:tt $this:ident $other:ident])*
|
||||
$(![$ignore:tt])*;
|
||||
} => {
|
||||
impl $(<$param: SpanlessEq>)* SpanlessEq for $name $(<$param>)* {
|
||||
impl $(<$param: SpanlessEq>)* SpanlessEq for $($name)::+ $(<$param>)* {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let $name { $($field,)* $($ignore: _,)* } = self;
|
||||
let $name { $($field: $other,)* $($ignore: _,)* } = other;
|
||||
$(SpanlessEq::eq($field, $other))&&*
|
||||
let $($name)::+ { $($field: $this,)* $($ignore: _,)* } = self;
|
||||
let $($name)::+ { $($field: $other,)* $($ignore: _,)* } = other;
|
||||
true $(&& SpanlessEq::eq($this, $other))*
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$name:ident $(<$param:ident>)?;
|
||||
$([$field:ident $other:ident])*
|
||||
$next:ident
|
||||
$($rest:ident)*
|
||||
$(!$ignore:ident)*
|
||||
$($name:ident)::+ $(<$param:ident>)?
|
||||
$([$field:tt $this:ident $other:ident])*
|
||||
$(![$ignore:tt])*;
|
||||
!$next:tt
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_struct! {
|
||||
$name $(<$param>)*;
|
||||
$([$field $other])*
|
||||
[$next other]
|
||||
$($name)::+ $(<$param>)*
|
||||
$([$field $this $other])*
|
||||
$(![$ignore])*
|
||||
![$next];
|
||||
$($rest)*
|
||||
$(!$ignore)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$name:ident $(<$param:ident>)?;
|
||||
$([$field:ident $other:ident])*
|
||||
$(![$ignore:ident])*
|
||||
!$next:ident
|
||||
$(!$rest:ident)*
|
||||
$($name:ident)::+ $(<$param:ident>)?
|
||||
$([$field:tt $this:ident $other:ident])*
|
||||
$(![$ignore:tt])*;
|
||||
$next:tt
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_struct! {
|
||||
$name $(<$param>)*;
|
||||
$([$field $other])*
|
||||
$(![$ignore])*
|
||||
![$next]
|
||||
$(!$rest)*
|
||||
$($name)::+ $(<$param>)*
|
||||
$([$field $this $other])*
|
||||
[$next this other]
|
||||
$(![$ignore])*;
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! spanless_eq_enum {
|
||||
{
|
||||
$name:ident;
|
||||
$([$variant:ident $([$field:tt $this:ident $other:ident])*])*
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $([$field:tt $this:ident $other:ident])* $(![$ignore:tt])*])*
|
||||
} => {
|
||||
impl SpanlessEq for $name {
|
||||
impl SpanlessEq for $($name)::+ {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match self {
|
||||
$(
|
||||
$name::$variant { .. } => {}
|
||||
$($variant)::+ { .. } => {}
|
||||
)*
|
||||
}
|
||||
#[allow(unreachable_patterns)]
|
||||
match (self, other) {
|
||||
$(
|
||||
(
|
||||
$name::$variant { $($field: $this),* },
|
||||
$name::$variant { $($field: $other),* },
|
||||
$($variant)::+ { $($field: $this,)* $($ignore: _,)* },
|
||||
$($variant)::+ { $($field: $other,)* $($ignore: _,)* },
|
||||
) => {
|
||||
true $(&& SpanlessEq::eq($this, $other))*
|
||||
}
|
||||
@ -200,57 +215,71 @@ macro_rules! spanless_eq_enum {
|
||||
};
|
||||
|
||||
{
|
||||
$name:ident;
|
||||
$([$variant:ident $($fields:tt)*])*
|
||||
$next:ident [$($named:tt)*] ( $i:tt $($field:tt)* )
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] (!$i:tt $($field:tt)*)
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$name;
|
||||
$([$variant $($fields)*])*
|
||||
$next [$($named)* [$i this other]] ( $($field)* )
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
$next [$([$($named)*])* $(![$ignore])* ![$i]] ($($field)*)
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$name:ident;
|
||||
$([$variant:ident $($fields:tt)*])*
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident [$([$($named:tt)*])* $(![$ignore:tt])*] ($i:tt $($field:tt)*)
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
$next [$([$($named)*])* [$i this other] $(![$ignore])*] ($($field)*)
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident [$($named:tt)*] ()
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$name;
|
||||
$([$variant $($fields)*])*
|
||||
[$next $($named)*]
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
[$($name)::+::$next; $($named)*]
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$name:ident;
|
||||
$([$variant:ident $($fields:tt)*])*
|
||||
$next:ident ( $($field:tt)* )
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident ($($field:tt)*)
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$name;
|
||||
$([$variant $($fields)*])*
|
||||
$next [] ( $($field)* )
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
$next [] ($($field)*)
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
|
||||
{
|
||||
$name:ident;
|
||||
$([$variant:ident $($fields:tt)*])*
|
||||
$($name:ident)::+;
|
||||
$([$($variant:ident)::+; $($fields:tt)*])*
|
||||
$next:ident
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
spanless_eq_enum! {
|
||||
$name;
|
||||
$([$variant $($fields)*])*
|
||||
[$next]
|
||||
$($name)::+;
|
||||
$([$($variant)::+; $($fields)*])*
|
||||
[$($name)::+::$next;]
|
||||
$($rest)*
|
||||
}
|
||||
};
|
||||
@ -259,23 +288,26 @@ macro_rules! spanless_eq_enum {
|
||||
spanless_eq_struct!(AngleBracketedArgs; span args);
|
||||
spanless_eq_struct!(AnonConst; id value);
|
||||
spanless_eq_struct!(Arm; attrs pat guard body span id is_placeholder);
|
||||
spanless_eq_struct!(AssocTyConstraint; id ident kind span);
|
||||
spanless_eq_struct!(AttrItem; path args);
|
||||
spanless_eq_struct!(AssocTyConstraint; id ident gen_args kind span);
|
||||
spanless_eq_struct!(AttrAnnotatedTokenStream; 0);
|
||||
spanless_eq_struct!(AttrItem; path args tokens);
|
||||
spanless_eq_struct!(Attribute; kind id style span);
|
||||
spanless_eq_struct!(AttributesData; attrs tokens);
|
||||
spanless_eq_struct!(BareFnTy; unsafety ext generic_params decl);
|
||||
spanless_eq_struct!(Block; stmts id rules span);
|
||||
spanless_eq_struct!(Crate; module attrs span proc_macros);
|
||||
spanless_eq_struct!(Block; stmts id rules span tokens);
|
||||
spanless_eq_struct!(Crate; attrs items span proc_macros);
|
||||
spanless_eq_struct!(EnumDef; variants);
|
||||
spanless_eq_struct!(Expr; id kind span attrs !tokens);
|
||||
spanless_eq_struct!(Field; attrs id span ident expr is_shorthand is_placeholder);
|
||||
spanless_eq_struct!(FieldPat; ident pat is_shorthand attrs id span is_placeholder);
|
||||
spanless_eq_struct!(ExprField; attrs id span ident expr is_shorthand is_placeholder);
|
||||
spanless_eq_struct!(FieldDef; attrs id span vis ident ty is_placeholder);
|
||||
spanless_eq_struct!(FnDecl; inputs output);
|
||||
spanless_eq_struct!(FnHeader; constness asyncness unsafety ext);
|
||||
spanless_eq_struct!(FnKind; 0 1 2 3);
|
||||
spanless_eq_struct!(FnSig; header decl span);
|
||||
spanless_eq_struct!(ForeignMod; abi items);
|
||||
spanless_eq_struct!(ForeignMod; unsafety abi items);
|
||||
spanless_eq_struct!(GenericParam; id ident attrs bounds is_placeholder kind);
|
||||
spanless_eq_struct!(Generics; params where_clause span);
|
||||
spanless_eq_struct!(GlobalAsm; asm);
|
||||
spanless_eq_struct!(ImplKind; unsafety polarity defaultness constness generics of_trait self_ty items);
|
||||
spanless_eq_struct!(InlineAsm; template operands options line_spans);
|
||||
spanless_eq_struct!(Item<K>; attrs id span vis ident kind !tokens);
|
||||
spanless_eq_struct!(Label; ident);
|
||||
@ -283,36 +315,39 @@ spanless_eq_struct!(Lifetime; id ident);
|
||||
spanless_eq_struct!(Lit; token kind span);
|
||||
spanless_eq_struct!(LlvmInlineAsm; asm asm_str_style outputs inputs clobbers volatile alignstack dialect);
|
||||
spanless_eq_struct!(LlvmInlineAsmOutput; constraint expr is_rw is_indirect);
|
||||
spanless_eq_struct!(Local; pat ty init id span attrs);
|
||||
spanless_eq_struct!(Local; pat ty init id span attrs !tokens);
|
||||
spanless_eq_struct!(MacCall; path args prior_type_ascription);
|
||||
spanless_eq_struct!(MacCallStmt; mac style attrs);
|
||||
spanless_eq_struct!(MacCallStmt; mac style attrs tokens);
|
||||
spanless_eq_struct!(MacroDef; body macro_rules);
|
||||
spanless_eq_struct!(Mod; inner items inline);
|
||||
spanless_eq_struct!(MutTy; ty mutbl);
|
||||
spanless_eq_struct!(Param; attrs ty pat id span is_placeholder);
|
||||
spanless_eq_struct!(ParenthesizedArgs; span inputs output);
|
||||
spanless_eq_struct!(ParenthesizedArgs; span inputs inputs_span output);
|
||||
spanless_eq_struct!(Pat; id kind span tokens);
|
||||
spanless_eq_struct!(Path; span segments);
|
||||
spanless_eq_struct!(PatField; ident pat is_shorthand attrs id span is_placeholder);
|
||||
spanless_eq_struct!(Path; span segments tokens);
|
||||
spanless_eq_struct!(PathSegment; ident id args);
|
||||
spanless_eq_struct!(PolyTraitRef; bound_generic_params trait_ref span);
|
||||
spanless_eq_struct!(QSelf; ty path_span position);
|
||||
spanless_eq_struct!(Stmt; id kind span);
|
||||
spanless_eq_struct!(StrLit; style symbol suffix span symbol_unescaped);
|
||||
spanless_eq_struct!(StructField; attrs id span vis ident ty is_placeholder);
|
||||
spanless_eq_struct!(StructExpr; path fields rest);
|
||||
spanless_eq_struct!(Token; kind span);
|
||||
spanless_eq_struct!(TraitKind; 0 1 2 3 4);
|
||||
spanless_eq_struct!(TraitRef; path ref_id);
|
||||
spanless_eq_struct!(Ty; id kind span);
|
||||
spanless_eq_struct!(Ty; id kind span tokens);
|
||||
spanless_eq_struct!(TyAliasKind; 0 1 2 3);
|
||||
spanless_eq_struct!(UseTree; prefix kind span);
|
||||
spanless_eq_struct!(Variant; attrs id span vis ident data disr_expr is_placeholder);
|
||||
spanless_eq_struct!(Variant; attrs id span !vis ident data disr_expr is_placeholder);
|
||||
spanless_eq_struct!(Visibility; kind span tokens);
|
||||
spanless_eq_struct!(WhereBoundPredicate; span bound_generic_params bounded_ty bounds);
|
||||
spanless_eq_struct!(WhereClause; has_where_token predicates span);
|
||||
spanless_eq_struct!(WhereEqPredicate; id span lhs_ty rhs_ty);
|
||||
spanless_eq_struct!(WhereRegionPredicate; span lifetime bounds);
|
||||
spanless_eq_struct!(token::Lit; kind symbol suffix);
|
||||
spanless_eq_enum!(AngleBracketedArg; Arg(0) Constraint(0));
|
||||
spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
|
||||
spanless_eq_enum!(AssocItemKind; Const(0 1 2) Fn(0) TyAlias(0) MacCall(0));
|
||||
spanless_eq_enum!(AssocTyConstraintKind; Equality(ty) Bound(bounds));
|
||||
spanless_eq_enum!(Async; Yes(span closure_id return_impl_trait_id) No);
|
||||
spanless_eq_enum!(AttrKind; Normal(0) DocComment(0 1));
|
||||
spanless_eq_enum!(AttrAnnotatedTokenTree; Token(0) Delimited(0 1 2) Attributes(0));
|
||||
spanless_eq_enum!(AttrStyle; Outer Inner);
|
||||
spanless_eq_enum!(BinOpKind; Add Sub Mul Div Rem And Or BitXor BitAnd BitOr Shl Shr Eq Lt Le Ne Ge Gt);
|
||||
spanless_eq_enum!(BindingMode; ByRef(0) ByValue(0));
|
||||
@ -325,12 +360,13 @@ spanless_eq_enum!(Defaultness; Default(0) Final);
|
||||
spanless_eq_enum!(Extern; None Implicit Explicit(0));
|
||||
spanless_eq_enum!(FloatTy; F32 F64);
|
||||
spanless_eq_enum!(FnRetTy; Default(0) Ty(0));
|
||||
spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0 1 2 3) TyAlias(0 1 2 3) MacCall(0));
|
||||
spanless_eq_enum!(ForeignItemKind; Static(0 1 2) Fn(0) TyAlias(0) MacCall(0));
|
||||
spanless_eq_enum!(GenericArg; Lifetime(0) Type(0) Const(0));
|
||||
spanless_eq_enum!(GenericArgs; AngleBracketed(0) Parenthesized(0));
|
||||
spanless_eq_enum!(GenericBound; Trait(0 1) Outlives(0));
|
||||
spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span));
|
||||
spanless_eq_enum!(GenericParamKind; Lifetime Type(default) Const(ty kw_span default));
|
||||
spanless_eq_enum!(ImplPolarity; Positive Negative(0));
|
||||
spanless_eq_enum!(Inline; Yes No);
|
||||
spanless_eq_enum!(InlineAsmRegOrRegClass; Reg(0) RegClass(0));
|
||||
spanless_eq_enum!(InlineAsmTemplatePiece; String(0) Placeholder(operand_idx modifier span));
|
||||
spanless_eq_enum!(IntTy; Isize I8 I16 I32 I64 I128);
|
||||
@ -341,12 +377,14 @@ spanless_eq_enum!(LlvmAsmDialect; Att Intel);
|
||||
spanless_eq_enum!(MacArgs; Empty Delimited(0 1 2) Eq(0 1));
|
||||
spanless_eq_enum!(MacDelimiter; Parenthesis Bracket Brace);
|
||||
spanless_eq_enum!(MacStmtStyle; Semicolon Braces NoBraces);
|
||||
spanless_eq_enum!(ModKind; Loaded(0 1 2) Unloaded);
|
||||
spanless_eq_enum!(Movability; Static Movable);
|
||||
spanless_eq_enum!(Mutability; Mut Not);
|
||||
spanless_eq_enum!(RangeEnd; Included(0) Excluded);
|
||||
spanless_eq_enum!(RangeLimits; HalfOpen Closed);
|
||||
spanless_eq_enum!(StmtKind; Local(0) Item(0) Expr(0) Semi(0) Empty MacCall(0));
|
||||
spanless_eq_enum!(StrStyle; Cooked Raw(0));
|
||||
spanless_eq_enum!(StructRest; Base(0) Rest(0) None);
|
||||
spanless_eq_enum!(TokenTree; Token(0) Delimited(0 1 2));
|
||||
spanless_eq_enum!(TraitBoundModifier; None Maybe MaybeConst MaybeConstMaybe);
|
||||
spanless_eq_enum!(TraitObjectSyntax; Dyn None);
|
||||
@ -358,29 +396,29 @@ spanless_eq_enum!(UseTreeKind; Simple(0 1 2) Nested(0) Glob);
|
||||
spanless_eq_enum!(VariantData; Struct(0 1) Tuple(0 1) Unit(0));
|
||||
spanless_eq_enum!(VisibilityKind; Public Crate(0) Restricted(path id) Inherited);
|
||||
spanless_eq_enum!(WherePredicate; BoundPredicate(0) RegionPredicate(0) EqPredicate(0));
|
||||
spanless_eq_enum!(ExprKind; Box(0) Array(0) Call(0 1) MethodCall(0 1 2) Tup(0)
|
||||
Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1) Let(0 1) If(0 1 2)
|
||||
While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1) Closure(0 1 2 3 4 5)
|
||||
Block(0 1) Async(0 1 2) Await(0) TryBlock(0) Assign(0 1 2) AssignOp(0 1 2)
|
||||
Field(0 1) Index(0 1) Range(0 1 2) Path(0 1) AddrOf(0 1 2) Break(0 1)
|
||||
Continue(0) Ret(0) InlineAsm(0) LlvmInlineAsm(0) MacCall(0) Struct(0 1 2)
|
||||
Repeat(0 1) Paren(0) Try(0) Yield(0) Err);
|
||||
spanless_eq_enum!(ExprKind; Box(0) Array(0) ConstBlock(0) Call(0 1)
|
||||
MethodCall(0 1 2) Tup(0) Binary(0 1 2) Unary(0 1) Lit(0) Cast(0 1) Type(0 1)
|
||||
Let(0 1) If(0 1 2) While(0 1 2) ForLoop(0 1 2 3) Loop(0 1) Match(0 1)
|
||||
Closure(0 1 2 3 4 5) Block(0 1) Async(0 1 2) Await(0) TryBlock(0)
|
||||
Assign(0 1 2) AssignOp(0 1 2) Field(0 1) Index(0 1) Underscore Range(0 1 2)
|
||||
Path(0 1) AddrOf(0 1 2) Break(0 1) Continue(0) Ret(0) InlineAsm(0)
|
||||
LlvmInlineAsm(0) MacCall(0) Struct(0) Repeat(0 1) Paren(0) Try(0) Yield(0)
|
||||
Err);
|
||||
spanless_eq_enum!(InlineAsmOperand; In(reg expr) Out(reg late expr)
|
||||
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(expr)
|
||||
InOut(reg late expr) SplitInOut(reg late in_expr out_expr) Const(anon_const)
|
||||
Sym(expr));
|
||||
spanless_eq_enum!(ItemKind; ExternCrate(0) Use(0) Static(0 1 2) Const(0 1 2)
|
||||
Fn(0 1 2 3) Mod(0) ForeignMod(0) GlobalAsm(0) TyAlias(0 1 2 3) Enum(0 1)
|
||||
Struct(0 1) Union(0 1) Trait(0 1 2 3 4) TraitAlias(0 1)
|
||||
Impl(unsafety polarity defaultness constness generics of_trait self_ty items)
|
||||
MacCall(0) MacroDef(0));
|
||||
Fn(0) Mod(0 1) ForeignMod(0) GlobalAsm(0) TyAlias(0) Enum(0 1) Struct(0 1)
|
||||
Union(0 1) Trait(0) TraitAlias(0 1) Impl(0) MacCall(0) MacroDef(0));
|
||||
spanless_eq_enum!(LitKind; Str(0 1) ByteStr(0) Byte(0) Char(0) Int(0 1)
|
||||
Float(0 1) Bool(0) Err(0));
|
||||
spanless_eq_enum!(PatKind; Wild Ident(0 1 2) Struct(0 1 2) TupleStruct(0 1)
|
||||
Or(0) Path(0 1) Tuple(0) Box(0) Ref(0 1) Lit(0) Range(0 1 2) Slice(0) Rest
|
||||
Paren(0) MacCall(0));
|
||||
spanless_eq_enum!(TyKind; Slice(0) Array(0 1) Ptr(0) Rptr(0 1) BareFn(0) Never
|
||||
Tup(0) Path(0 1) TraitObject(0 1) ImplTrait(0 1) Paren(0) Typeof(0) Infer
|
||||
ImplicitSelf MacCall(0) Err CVarArgs);
|
||||
Tup(0) AnonymousStruct(0 1) AnonymousUnion(0 1) Path(0 1) TraitObject(0 1)
|
||||
ImplTrait(0 1) Paren(0) Typeof(0) Infer ImplicitSelf MacCall(0) Err
|
||||
CVarArgs);
|
||||
|
||||
impl SpanlessEq for Ident {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
@ -388,14 +426,6 @@ impl SpanlessEq for Ident {
|
||||
}
|
||||
}
|
||||
|
||||
// Give up on comparing literals inside of macros because there are so many
|
||||
// equivalent representations of the same literal; they are tested elsewhere
|
||||
impl SpanlessEq for token::Lit {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
mem::discriminant(self) == mem::discriminant(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for RangeSyntax {
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
match self {
|
||||
@ -404,6 +434,34 @@ impl SpanlessEq for RangeSyntax {
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for Param {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let Param {
|
||||
attrs,
|
||||
ty,
|
||||
pat,
|
||||
id,
|
||||
span: _,
|
||||
is_placeholder,
|
||||
} = self;
|
||||
let Param {
|
||||
attrs: attrs2,
|
||||
ty: ty2,
|
||||
pat: pat2,
|
||||
id: id2,
|
||||
span: _,
|
||||
is_placeholder: is_placeholder2,
|
||||
} = other;
|
||||
SpanlessEq::eq(id, id2)
|
||||
&& SpanlessEq::eq(is_placeholder, is_placeholder2)
|
||||
&& (matches!(ty.kind, TyKind::Err)
|
||||
|| matches!(ty2.kind, TyKind::Err)
|
||||
|| SpanlessEq::eq(attrs, attrs2)
|
||||
&& SpanlessEq::eq(ty, ty2)
|
||||
&& SpanlessEq::eq(pat, pat2))
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for TokenKind {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
@ -412,6 +470,14 @@ impl SpanlessEq for TokenKind {
|
||||
TokenKind::DotDotEq | TokenKind::DotDotDot => true,
|
||||
_ => false,
|
||||
},
|
||||
(TokenKind::Interpolated(this), TokenKind::Interpolated(other)) => {
|
||||
match (this.as_ref(), other.as_ref()) {
|
||||
(Nonterminal::NtExpr(this), Nonterminal::NtExpr(other)) => {
|
||||
SpanlessEq::eq(this, other)
|
||||
}
|
||||
_ => this == other,
|
||||
}
|
||||
}
|
||||
_ => self == other,
|
||||
}
|
||||
}
|
||||
@ -419,20 +485,146 @@ impl SpanlessEq for TokenKind {
|
||||
|
||||
impl SpanlessEq for TokenStream {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let mut this = self.clone().into_trees();
|
||||
let mut other = other.clone().into_trees();
|
||||
let mut this_trees = self.trees();
|
||||
let mut other_trees = other.trees();
|
||||
loop {
|
||||
let this = match this.next() {
|
||||
None => return other.next().is_none(),
|
||||
Some(val) => val,
|
||||
let this = match this_trees.next() {
|
||||
None => return other_trees.next().is_none(),
|
||||
Some(tree) => tree,
|
||||
};
|
||||
let other = match other.next() {
|
||||
let other = match other_trees.next() {
|
||||
None => return false,
|
||||
Some(val) => val,
|
||||
Some(tree) => tree,
|
||||
};
|
||||
if !SpanlessEq::eq(&this, &other) {
|
||||
return false;
|
||||
if SpanlessEq::eq(&this, &other) {
|
||||
continue;
|
||||
}
|
||||
if let (TokenTree::Token(this), TokenTree::Token(other)) = (this, other) {
|
||||
if match (&this.kind, &other.kind) {
|
||||
(TokenKind::Literal(this), TokenKind::Literal(other)) => {
|
||||
SpanlessEq::eq(this, other)
|
||||
}
|
||||
(TokenKind::DocComment(_kind, style, symbol), TokenKind::Pound) => {
|
||||
doc_comment(*style, *symbol, &mut other_trees)
|
||||
}
|
||||
(TokenKind::Pound, TokenKind::DocComment(_kind, style, symbol)) => {
|
||||
doc_comment(*style, *symbol, &mut this_trees)
|
||||
}
|
||||
_ => false,
|
||||
} {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_comment<'a>(
|
||||
style: AttrStyle,
|
||||
unescaped: Symbol,
|
||||
trees: &mut impl Iterator<Item = TokenTree>,
|
||||
) -> bool {
|
||||
if match style {
|
||||
AttrStyle::Outer => false,
|
||||
AttrStyle::Inner => true,
|
||||
} {
|
||||
match trees.next() {
|
||||
Some(TokenTree::Token(Token {
|
||||
kind: TokenKind::Not,
|
||||
span: _,
|
||||
})) => {}
|
||||
_ => return false,
|
||||
}
|
||||
}
|
||||
let stream = match trees.next() {
|
||||
Some(TokenTree::Delimited(_span, DelimToken::Bracket, stream)) => stream,
|
||||
_ => return false,
|
||||
};
|
||||
let mut trees = stream.trees();
|
||||
match trees.next() {
|
||||
Some(TokenTree::Token(Token {
|
||||
kind: TokenKind::Ident(symbol, false),
|
||||
span: _,
|
||||
})) if symbol == sym::doc => {}
|
||||
_ => return false,
|
||||
}
|
||||
match trees.next() {
|
||||
Some(TokenTree::Token(Token {
|
||||
kind: TokenKind::Eq,
|
||||
span: _,
|
||||
})) => {}
|
||||
_ => return false,
|
||||
}
|
||||
match trees.next() {
|
||||
Some(TokenTree::Token(token)) => {
|
||||
is_escaped_literal(&token, unescaped) && trees.next().is_none()
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_escaped_literal(token: &Token, unescaped: Symbol) -> bool {
|
||||
match match token {
|
||||
Token {
|
||||
kind: TokenKind::Literal(lit),
|
||||
span: _,
|
||||
} => Lit::from_lit_token(*lit, DUMMY_SP),
|
||||
Token {
|
||||
kind: TokenKind::Interpolated(nonterminal),
|
||||
span: _,
|
||||
} => match nonterminal.as_ref() {
|
||||
Nonterminal::NtExpr(expr) => match &expr.kind {
|
||||
ExprKind::Lit(lit) => Ok(lit.clone()),
|
||||
_ => return false,
|
||||
},
|
||||
_ => return false,
|
||||
},
|
||||
_ => return false,
|
||||
} {
|
||||
Ok(Lit {
|
||||
token:
|
||||
token::Lit {
|
||||
kind: token::LitKind::Str,
|
||||
symbol: _,
|
||||
suffix: None,
|
||||
},
|
||||
kind: LitKind::Str(symbol, StrStyle::Cooked),
|
||||
span: _,
|
||||
}) => symbol.as_str().replace('\r', "") == unescaped.as_str().replace('\r', ""),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for LazyTokenStream {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let this = self.create_token_stream();
|
||||
let other = other.create_token_stream();
|
||||
SpanlessEq::eq(&this, &other)
|
||||
}
|
||||
}
|
||||
|
||||
impl SpanlessEq for AttrKind {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(AttrKind::Normal(item, tokens), AttrKind::Normal(item2, tokens2)) => {
|
||||
SpanlessEq::eq(item, item2) && SpanlessEq::eq(tokens, tokens2)
|
||||
}
|
||||
(AttrKind::DocComment(kind, symbol), AttrKind::DocComment(kind2, symbol2)) => {
|
||||
SpanlessEq::eq(kind, kind2) && SpanlessEq::eq(symbol, symbol2)
|
||||
}
|
||||
(AttrKind::DocComment(kind, unescaped), AttrKind::Normal(item2, _tokens)) => {
|
||||
match kind {
|
||||
CommentKind::Line | CommentKind::Block => {}
|
||||
}
|
||||
let path = Path::from_ident(Ident::with_dummy_span(sym::doc));
|
||||
SpanlessEq::eq(&path, &item2.path)
|
||||
&& match &item2.args {
|
||||
MacArgs::Empty | MacArgs::Delimited(..) => false,
|
||||
MacArgs::Eq(_span, token) => is_escaped_literal(token, *unescaped),
|
||||
}
|
||||
}
|
||||
(AttrKind::Normal(..), AttrKind::DocComment(..)) => SpanlessEq::eq(other, self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
1
third_party/rust/syn/tests/common/parse.rs
vendored
1
third_party/rust/syn/tests/common/parse.rs
vendored
@ -9,7 +9,6 @@ use rustc_ast::ptr::P;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::FilePathMapping;
|
||||
use rustc_span::FileName;
|
||||
|
||||
use std::panic;
|
||||
|
||||
pub fn librustc_expr(input: &str) -> Option<P<ast::Expr>> {
|
||||
|
24
third_party/rust/syn/tests/debug/gen.rs
vendored
24
third_party/rust/syn/tests/debug/gen.rs
vendored
@ -3424,12 +3424,24 @@ impl Debug for Lite<syn::Lit> {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
let _val = &self.value;
|
||||
match _val {
|
||||
syn::Lit::Str(_val) => write!(formatter, "{:?}", _val.value()),
|
||||
syn::Lit::ByteStr(_val) => write!(formatter, "{:?}", _val.value()),
|
||||
syn::Lit::Byte(_val) => write!(formatter, "{:?}", _val.value()),
|
||||
syn::Lit::Char(_val) => write!(formatter, "{:?}", _val.value()),
|
||||
syn::Lit::Int(_val) => write!(formatter, "{}", _val),
|
||||
syn::Lit::Float(_val) => write!(formatter, "{}", _val),
|
||||
syn::Lit::Str(_val) => {
|
||||
write!(formatter, "{:?}", _val.value())
|
||||
}
|
||||
syn::Lit::ByteStr(_val) => {
|
||||
write!(formatter, "{:?}", _val.value())
|
||||
}
|
||||
syn::Lit::Byte(_val) => {
|
||||
write!(formatter, "{:?}", _val.value())
|
||||
}
|
||||
syn::Lit::Char(_val) => {
|
||||
write!(formatter, "{:?}", _val.value())
|
||||
}
|
||||
syn::Lit::Int(_val) => {
|
||||
write!(formatter, "{}", _val)
|
||||
}
|
||||
syn::Lit::Float(_val) => {
|
||||
write!(formatter, "{}", _val)
|
||||
}
|
||||
syn::Lit::Bool(_val) => {
|
||||
let mut formatter = formatter.debug_struct("Lit::Bool");
|
||||
formatter.field("value", Lite(&_val.value));
|
||||
|
1
third_party/rust/syn/tests/macros/mod.rs
vendored
1
third_party/rust/syn/tests/macros/mod.rs
vendored
@ -1,7 +1,6 @@
|
||||
#[path = "../debug/mod.rs"]
|
||||
pub mod debug;
|
||||
|
||||
use syn;
|
||||
use syn::parse::{Parse, Result};
|
||||
|
||||
#[macro_export]
|
||||
|
51
third_party/rust/syn/tests/repo/mod.rs
vendored
51
third_party/rust/syn/tests/repo/mod.rs
vendored
@ -8,29 +8,34 @@ use std::path::Path;
|
||||
use tar::Archive;
|
||||
use walkdir::DirEntry;
|
||||
|
||||
const REVISION: &str = "792c645ca7d11a8d254df307d019c5bf01445c37";
|
||||
const REVISION: &str = "716394d6581b60c75cfdd88b8e5b876f2db88b62";
|
||||
|
||||
#[rustfmt::skip]
|
||||
static EXCLUDE: &[&str] = &[
|
||||
// Rustc loses some attributes
|
||||
// https://github.com/rust-lang/rust/issues/84879
|
||||
"src/test/ui/proc-macro/issue-81555.rs",
|
||||
|
||||
// Compile-fail expr parameter in const generic position: f::<1 + 2>()
|
||||
"test/ui/const-generics/const-expression-parameter.rs",
|
||||
"src/test/ui/const-generics/closing-args-token.rs",
|
||||
"src/test/ui/const-generics/const-expression-parameter.rs",
|
||||
|
||||
// Deprecated anonymous parameter syntax in traits
|
||||
"test/ui/issues/issue-13105.rs",
|
||||
"test/ui/issues/issue-13775.rs",
|
||||
"test/ui/issues/issue-34074.rs",
|
||||
"test/ui/proc-macro/trait-fn-args-2015.rs",
|
||||
"src/test/ui/issues/issue-13105.rs",
|
||||
"src/test/ui/issues/issue-13775.rs",
|
||||
"src/test/ui/issues/issue-34074.rs",
|
||||
"src/test/ui/proc-macro/trait-fn-args-2015.rs",
|
||||
|
||||
// Not actually test cases
|
||||
"test/rustdoc-ui/test-compile-fail2.rs",
|
||||
"test/rustdoc-ui/test-compile-fail3.rs",
|
||||
"test/ui/include-single-expr-helper.rs",
|
||||
"test/ui/include-single-expr-helper-1.rs",
|
||||
"test/ui/issues/auxiliary/issue-21146-inc.rs",
|
||||
"test/ui/json-bom-plus-crlf-multifile-aux.rs",
|
||||
"test/ui/lint/expansion-time-include.rs",
|
||||
"test/ui/macros/auxiliary/macro-comma-support.rs",
|
||||
"test/ui/macros/auxiliary/macro-include-items-expr.rs",
|
||||
"src/test/rustdoc-ui/test-compile-fail2.rs",
|
||||
"src/test/rustdoc-ui/test-compile-fail3.rs",
|
||||
"src/test/ui/include-single-expr-helper.rs",
|
||||
"src/test/ui/include-single-expr-helper-1.rs",
|
||||
"src/test/ui/json-bom-plus-crlf-multifile-aux.rs",
|
||||
"src/test/ui/lint/expansion-time-include.rs",
|
||||
"src/test/ui/macros/auxiliary/macro-comma-support.rs",
|
||||
"src/test/ui/macros/auxiliary/macro-include-items-expr.rs",
|
||||
"src/test/ui/parser/auxiliary/issue-21146-inc.rs",
|
||||
];
|
||||
|
||||
pub fn base_dir_filter(entry: &DirEntry) -> bool {
|
||||
@ -46,23 +51,17 @@ pub fn base_dir_filter(entry: &DirEntry) -> bool {
|
||||
if cfg!(windows) {
|
||||
path_string = path_string.replace('\\', "/").into();
|
||||
}
|
||||
let path = if let Some(path) = path_string.strip_prefix("tests/rust/src/") {
|
||||
path
|
||||
} else if let Some(path) = path_string.strip_prefix("tests/rust/library/") {
|
||||
let path = if let Some(path) = path_string.strip_prefix("tests/rust/") {
|
||||
path
|
||||
} else {
|
||||
panic!("unexpected path in Rust dist: {}", path_string);
|
||||
};
|
||||
|
||||
// TODO assert that parsing fails on the parse-fail cases
|
||||
if path.starts_with("test/parse-fail")
|
||||
|| path.starts_with("test/compile-fail")
|
||||
|| path.starts_with("test/rustfix")
|
||||
{
|
||||
if path.starts_with("src/test/compile-fail") || path.starts_with("src/test/rustfix") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if path.starts_with("test/ui") {
|
||||
if path.starts_with("src/test/ui") {
|
||||
let stderr_path = entry.path().with_extension("stderr");
|
||||
if stderr_path.exists() {
|
||||
// Expected to fail in some way
|
||||
@ -91,10 +90,10 @@ pub fn clone_rust() {
|
||||
download_and_unpack().unwrap();
|
||||
}
|
||||
let mut missing = String::new();
|
||||
let test_src = Path::new("tests/rust/src");
|
||||
let test_src = Path::new("tests/rust");
|
||||
for exclude in EXCLUDE {
|
||||
if !test_src.join(exclude).exists() {
|
||||
missing += "\ntests/rust/src/";
|
||||
missing += "\ntests/rust/";
|
||||
missing += exclude;
|
||||
}
|
||||
}
|
||||
|
18
third_party/rust/syn/tests/test_expr.rs
vendored
18
third_party/rust/syn/tests/test_expr.rs
vendored
@ -300,3 +300,21 @@ fn test_macro_variable_match_arm() {
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
// https://github.com/dtolnay/syn/issues/1019
|
||||
#[test]
|
||||
fn test_closure_vs_rangefull() {
|
||||
#[rustfmt::skip] // rustfmt bug: https://github.com/rust-lang/rustfmt/issues/4808
|
||||
let tokens = quote!(|| .. .method());
|
||||
snapshot!(tokens as Expr, @r###"
|
||||
Expr::MethodCall {
|
||||
receiver: Expr::Closure {
|
||||
output: Default,
|
||||
body: Expr::Range {
|
||||
limits: HalfOpen,
|
||||
},
|
||||
},
|
||||
method: "method",
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
3
third_party/rust/syn/tests/test_grouping.rs
vendored
3
third_party/rust/syn/tests/test_grouping.rs
vendored
@ -2,9 +2,8 @@
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, TokenStream, TokenTree};
|
||||
use syn::Expr;
|
||||
|
||||
use std::iter::FromIterator;
|
||||
use syn::Expr;
|
||||
|
||||
#[test]
|
||||
fn test_grouping() {
|
||||
|
258
third_party/rust/syn/tests/test_item.rs
vendored
258
third_party/rust/syn/tests/test_item.rs
vendored
@ -4,7 +4,7 @@ mod macros;
|
||||
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
|
||||
use quote::quote;
|
||||
use std::iter::FromIterator;
|
||||
use syn::Item;
|
||||
use syn::{Item, ItemTrait};
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_attr() {
|
||||
@ -43,3 +43,259 @@ fn test_macro_variable_attr() {
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_negative_impl() {
|
||||
// Rustc parses all of the following.
|
||||
|
||||
#[cfg(any())]
|
||||
impl ! {}
|
||||
let tokens = quote! {
|
||||
impl ! {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
self_ty: Type::Never,
|
||||
}
|
||||
"###);
|
||||
|
||||
#[cfg(any())]
|
||||
#[rustfmt::skip]
|
||||
impl !Trait {}
|
||||
let tokens = quote! {
|
||||
impl !Trait {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
self_ty: Verbatim(`! Trait`),
|
||||
}
|
||||
"###);
|
||||
|
||||
#[cfg(any())]
|
||||
impl !Trait for T {}
|
||||
let tokens = quote! {
|
||||
impl !Trait for T {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
trait_: Some((
|
||||
Some,
|
||||
Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Trait",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
)),
|
||||
self_ty: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
#[cfg(any())]
|
||||
#[rustfmt::skip]
|
||||
impl !! {}
|
||||
let tokens = quote! {
|
||||
impl !! {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
self_ty: Verbatim(`! !`),
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_macro_variable_impl() {
|
||||
// mimics the token stream corresponding to `impl $trait for $ty {}`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("impl", Span::call_site())),
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote!(Trait))),
|
||||
TokenTree::Ident(Ident::new("for", Span::call_site())),
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote!(Type))),
|
||||
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics,
|
||||
trait_: Some((
|
||||
None,
|
||||
Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Trait",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
)),
|
||||
self_ty: Type::Group {
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Type",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_supertraits() {
|
||||
// Rustc parses all of the following.
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(trait Trait where {});
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Inherited,
|
||||
ident: "Trait",
|
||||
generics: Generics {
|
||||
where_clause: Some(WhereClause),
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(trait Trait: where {});
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Inherited,
|
||||
ident: "Trait",
|
||||
generics: Generics {
|
||||
where_clause: Some(WhereClause),
|
||||
},
|
||||
colon_token: Some,
|
||||
}
|
||||
"###);
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(trait Trait: Sized where {});
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Inherited,
|
||||
ident: "Trait",
|
||||
generics: Generics {
|
||||
where_clause: Some(WhereClause),
|
||||
},
|
||||
colon_token: Some,
|
||||
supertraits: [
|
||||
Trait(TraitBound {
|
||||
modifier: None,
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Sized",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
}
|
||||
"###);
|
||||
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote!(trait Trait: Sized + where {});
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Inherited,
|
||||
ident: "Trait",
|
||||
generics: Generics {
|
||||
where_clause: Some(WhereClause),
|
||||
},
|
||||
colon_token: Some,
|
||||
supertraits: [
|
||||
Trait(TraitBound {
|
||||
modifier: None,
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Sized",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_type_empty_bounds() {
|
||||
#[rustfmt::skip]
|
||||
let tokens = quote! {
|
||||
trait Foo {
|
||||
type Bar: ;
|
||||
}
|
||||
};
|
||||
|
||||
snapshot!(tokens as ItemTrait, @r###"
|
||||
ItemTrait {
|
||||
vis: Inherited,
|
||||
ident: "Foo",
|
||||
generics: Generics,
|
||||
items: [
|
||||
TraitItem::Type {
|
||||
ident: "Bar",
|
||||
generics: Generics,
|
||||
colon_token: Some,
|
||||
},
|
||||
],
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_impl_visibility() {
|
||||
let tokens = quote! {
|
||||
pub default unsafe impl union {}
|
||||
};
|
||||
|
||||
snapshot!(tokens as Item, @"Verbatim(`pub default unsafe impl union { }`)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_impl_type_parameter_defaults() {
|
||||
#[cfg(any())]
|
||||
impl<T = ()> () {}
|
||||
let tokens = quote! {
|
||||
impl<T = ()> () {}
|
||||
};
|
||||
snapshot!(tokens as Item, @r###"
|
||||
Item::Impl {
|
||||
generics: Generics {
|
||||
lt_token: Some,
|
||||
params: [
|
||||
Type(TypeParam {
|
||||
ident: "T",
|
||||
eq_token: Some,
|
||||
default: Some(Type::Tuple),
|
||||
}),
|
||||
],
|
||||
gt_token: Some,
|
||||
},
|
||||
self_ty: Type::Tuple,
|
||||
}"###);
|
||||
}
|
||||
|
24
third_party/rust/syn/tests/test_lit.rs
vendored
24
third_party/rust/syn/tests/test_lit.rs
vendored
@ -5,7 +5,7 @@ use proc_macro2::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
|
||||
use quote::ToTokens;
|
||||
use std::iter::FromIterator;
|
||||
use std::str::FromStr;
|
||||
use syn::{Lit, LitFloat, LitInt};
|
||||
use syn::{Lit, LitFloat, LitInt, LitStr};
|
||||
|
||||
fn lit(s: &str) -> Lit {
|
||||
match TokenStream::from_str(s)
|
||||
@ -43,6 +43,7 @@ fn strings() {
|
||||
test_string("\"'\"", "'");
|
||||
test_string("\"\"", "");
|
||||
test_string("\"\\u{1F415}\"", "\u{1F415}");
|
||||
test_string("\"\\u{1_2__3_}\"", "\u{123}");
|
||||
test_string(
|
||||
"\"contains\nnewlines\\\nescaped newlines\"",
|
||||
"contains\nnewlinesescaped newlines",
|
||||
@ -151,6 +152,9 @@ fn ints() {
|
||||
|
||||
test_int("5", 5, "");
|
||||
test_int("5u32", 5, "u32");
|
||||
test_int("0E", 0, "E");
|
||||
test_int("0ECMA", 0, "ECMA");
|
||||
test_int("0o0A", 0, "A");
|
||||
test_int("5_0", 50, "");
|
||||
test_int("5_____0_____", 50, "");
|
||||
test_int("0x7f", 127, "");
|
||||
@ -167,6 +171,7 @@ fn ints() {
|
||||
test_int("0x_7F__u8", 127, "u8");
|
||||
test_int("0b__10__0_1i8", 9, "i8");
|
||||
test_int("0o__7__________________3u32", 59, "u32");
|
||||
test_int("0e1\u{5c5}", 0, "e1\u{5c5}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -192,6 +197,8 @@ fn floats() {
|
||||
test_float("1.0__3e-12", 1.03e-12, "");
|
||||
test_float("1.03e+12", 1.03e12, "");
|
||||
test_float("9e99e99", 9e99, "e99");
|
||||
test_float("1e_0", 1.0, "");
|
||||
test_float("0.0ECMA", 0.0, "ECMA");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -207,6 +214,12 @@ fn negative() {
|
||||
assert_eq!("-1.5f64", LitFloat::new("-1.5f64", span).to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn negative_overflow() {
|
||||
assert!(syn::parse_str::<LitFloat>("-1.0e99f64").is_ok());
|
||||
assert!(syn::parse_str::<LitFloat>("-1.0e999f64").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn suffix() {
|
||||
fn get_suffix(token: &str) -> String {
|
||||
@ -247,3 +260,12 @@ fn test_deep_group_empty() {
|
||||
|
||||
snapshot!(tokens as Lit, @r#""hi""# );
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error() {
|
||||
let err = syn::parse_str::<LitStr>("...").unwrap_err();
|
||||
assert_eq!("expected string literal", err.to_string());
|
||||
|
||||
let err = syn::parse_str::<LitStr>("5").unwrap_err();
|
||||
assert_eq!("expected string literal", err.to_string());
|
||||
}
|
||||
|
37
third_party/rust/syn/tests/test_meta.rs
vendored
37
third_party/rust/syn/tests/test_meta.rs
vendored
@ -337,3 +337,40 @@ fn test_parse_nested_meta() {
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path() {
|
||||
let input = "::serde::Serialize";
|
||||
snapshot!(input as Meta, @r###"
|
||||
Path(Path {
|
||||
leading_colon: Some,
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "serde",
|
||||
arguments: None,
|
||||
},
|
||||
PathSegment {
|
||||
ident: "Serialize",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
})
|
||||
"###);
|
||||
|
||||
let input = "::serde::Serialize";
|
||||
snapshot!(input as NestedMeta, @r###"
|
||||
Meta(Path(Path {
|
||||
leading_colon: Some,
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "serde",
|
||||
arguments: None,
|
||||
},
|
||||
PathSegment {
|
||||
ident: "Serialize",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
}))
|
||||
"###);
|
||||
}
|
||||
|
39
third_party/rust/syn/tests/test_pat.rs
vendored
39
third_party/rust/syn/tests/test_pat.rs
vendored
@ -1,4 +1,9 @@
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, TokenStream, TokenTree};
|
||||
use quote::quote;
|
||||
use std::iter::FromIterator;
|
||||
use syn::{Item, Pat, Stmt};
|
||||
|
||||
#[test]
|
||||
@ -26,13 +31,37 @@ fn test_leading_vert() {
|
||||
syn::parse_str::<Item>("fn fun2(|| A: E) {}").unwrap_err();
|
||||
|
||||
syn::parse_str::<Stmt>("let | () = ();").unwrap();
|
||||
syn::parse_str::<Stmt>("let (| A): E;").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let (| A): E;").unwrap();
|
||||
syn::parse_str::<Stmt>("let (|| A): (E);").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let (| A,): (E,);").unwrap();
|
||||
syn::parse_str::<Stmt>("let [| A]: [E; 1];").unwrap();
|
||||
syn::parse_str::<Stmt>("let [|| A]: [E; 1];").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let TS(| A): TS;").unwrap();
|
||||
syn::parse_str::<Stmt>("let TS(|| A): TS;").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap_err();
|
||||
syn::parse_str::<Stmt>("let NS { f: | A }: NS;").unwrap();
|
||||
syn::parse_str::<Stmt>("let NS { f: || A }: NS;").unwrap_err();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group() {
|
||||
let group = Group::new(Delimiter::None, quote!(Some(_)));
|
||||
let tokens = TokenStream::from_iter(vec![TokenTree::Group(group)]);
|
||||
|
||||
snapshot!(tokens as Pat, @r###"
|
||||
Pat::TupleStruct {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Some",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
pat: PatTuple {
|
||||
elems: [
|
||||
Pat::Wild,
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
56
third_party/rust/syn/tests/test_path.rs
vendored
56
third_party/rust/syn/tests/test_path.rs
vendored
@ -2,9 +2,9 @@
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use quote::quote;
|
||||
use quote::{quote, ToTokens};
|
||||
use std::iter::FromIterator;
|
||||
use syn::{Expr, Type};
|
||||
use syn::{parse_quote, Expr, Type, TypePath};
|
||||
|
||||
#[test]
|
||||
fn parse_interpolated_leading_component() {
|
||||
@ -50,3 +50,55 @@ fn parse_interpolated_leading_component() {
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn print_incomplete_qpath() {
|
||||
// qpath with `as` token
|
||||
let mut ty: TypePath = parse_quote!(<Self as A>::Q);
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self as A > :: Q`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self as A > ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self >`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_none());
|
||||
|
||||
// qpath without `as` token
|
||||
let mut ty: TypePath = parse_quote!(<Self>::A::B);
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self > :: A :: B`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self > :: A ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`< Self > ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_none());
|
||||
|
||||
// normal path
|
||||
let mut ty: TypePath = parse_quote!(Self::A::B);
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`Self :: A :: B`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`Self :: A ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(`Self ::`)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_some());
|
||||
snapshot!(ty.to_token_stream(), @r###"
|
||||
TokenStream(``)
|
||||
"###);
|
||||
assert!(ty.path.segments.pop().is_none());
|
||||
}
|
||||
|
46
third_party/rust/syn/tests/test_precedence.rs
vendored
46
third_party/rust/syn/tests/test_precedence.rs
vendored
@ -18,21 +18,18 @@ extern crate rustc_ast;
|
||||
extern crate rustc_data_structures;
|
||||
extern crate rustc_span;
|
||||
|
||||
use crate::common::eq::SpanlessEq;
|
||||
use crate::common::parse;
|
||||
use quote::quote;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
||||
use regex::Regex;
|
||||
use rustc_ast::ast;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_span::edition::Edition;
|
||||
use walkdir::{DirEntry, WalkDir};
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::fs;
|
||||
use std::process;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
use common::eq::SpanlessEq;
|
||||
use common::parse;
|
||||
use walkdir::{DirEntry, WalkDir};
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
@ -109,9 +106,7 @@ fn test_rustc_precedence() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut file = File::open(path).unwrap();
|
||||
let mut content = String::new();
|
||||
file.read_to_string(&mut content).unwrap();
|
||||
let content = fs::read_to_string(path).unwrap();
|
||||
let content = edition_regex.replace_all(&content, "_$0");
|
||||
|
||||
let (l_passed, l_failed) = match syn::parse_file(&content) {
|
||||
@ -200,19 +195,21 @@ fn librustc_parse_and_rewrite(input: &str) -> Option<P<ast::Expr>> {
|
||||
/// This method operates on librustc objects.
|
||||
fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
||||
use rustc_ast::ast::{
|
||||
Block, BorrowKind, Expr, ExprKind, Field, GenericArg, MacCall, Pat, Stmt, StmtKind, Ty,
|
||||
Block, BorrowKind, Expr, ExprField, ExprKind, GenericArg, Pat, Stmt, StmtKind, StructExpr,
|
||||
StructRest, Ty,
|
||||
};
|
||||
use rustc_ast::mut_visit::{noop_visit_generic_arg, MutVisitor};
|
||||
use rustc_data_structures::map_in_place::MapInPlace;
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_span::DUMMY_SP;
|
||||
use std::mem;
|
||||
use std::ops::DerefMut;
|
||||
|
||||
struct BracketsVisitor {
|
||||
failed: bool,
|
||||
};
|
||||
}
|
||||
|
||||
fn flat_map_field<T: MutVisitor>(mut f: Field, vis: &mut T) -> Vec<Field> {
|
||||
fn flat_map_field<T: MutVisitor>(mut f: ExprField, vis: &mut T) -> Vec<ExprField> {
|
||||
if f.is_shorthand {
|
||||
noop_visit_expr(&mut f.expr, vis);
|
||||
} else {
|
||||
@ -239,13 +236,16 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
||||
}
|
||||
|
||||
fn noop_visit_expr<T: MutVisitor>(e: &mut Expr, vis: &mut T) {
|
||||
use rustc_ast::mut_visit::{noop_visit_expr, visit_opt, visit_thin_attrs};
|
||||
use rustc_ast::mut_visit::{noop_visit_expr, visit_thin_attrs};
|
||||
match &mut e.kind {
|
||||
ExprKind::AddrOf(BorrowKind::Raw, ..) => {}
|
||||
ExprKind::Struct(path, fields, expr) => {
|
||||
ExprKind::Struct(expr) => {
|
||||
let StructExpr { path, fields, rest } = expr.deref_mut();
|
||||
vis.visit_path(path);
|
||||
fields.flat_map_in_place(|field| flat_map_field(field, vis));
|
||||
visit_opt(expr, |expr| vis.visit_expr(expr));
|
||||
if let StructRest::Base(rest) = rest {
|
||||
vis.visit_expr(rest);
|
||||
}
|
||||
vis.visit_id(&mut e.id);
|
||||
vis.visit_span(&mut e.span);
|
||||
visit_thin_attrs(&mut e.attrs, vis);
|
||||
@ -256,7 +256,10 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
||||
|
||||
impl MutVisitor for BracketsVisitor {
|
||||
fn visit_expr(&mut self, e: &mut P<Expr>) {
|
||||
noop_visit_expr(e, self);
|
||||
match e.kind {
|
||||
ExprKind::ConstBlock(..) => {}
|
||||
_ => noop_visit_expr(e, self),
|
||||
}
|
||||
match e.kind {
|
||||
ExprKind::If(..) | ExprKind::Block(..) | ExprKind::Let(..) => {}
|
||||
_ => {
|
||||
@ -301,15 +304,6 @@ fn librustc_brackets(mut librustc_expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
||||
fn visit_ty(&mut self, ty: &mut P<Ty>) {
|
||||
let _ = ty;
|
||||
}
|
||||
|
||||
fn visit_mac(&mut self, mac: &mut MacCall) {
|
||||
// By default when folding over macros, librustc panics. This is
|
||||
// because it's usually not what you want, you want to run after
|
||||
// macro expansion. We do want to do that (syn doesn't do macro
|
||||
// expansion), so we implement visit_mac to just return the macro
|
||||
// unchanged.
|
||||
let _ = mac;
|
||||
}
|
||||
}
|
||||
|
||||
let mut folder = BracketsVisitor { failed: false };
|
||||
|
230
third_party/rust/syn/tests/test_round_trip.rs
vendored
230
third_party/rust/syn/tests/test_round_trip.rs
vendored
@ -9,21 +9,24 @@ extern crate rustc_parse as parse;
|
||||
extern crate rustc_session;
|
||||
extern crate rustc_span;
|
||||
|
||||
use crate::common::eq::SpanlessEq;
|
||||
use quote::quote;
|
||||
use rayon::iter::{IntoParallelIterator, ParallelIterator};
|
||||
use rustc_ast::ast;
|
||||
use rustc_ast::ast::{
|
||||
AngleBracketedArg, AngleBracketedArgs, Crate, GenericArg, GenericParamKind, Generics,
|
||||
};
|
||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||
use rustc_errors::PResult;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::FilePathMapping;
|
||||
use rustc_span::FileName;
|
||||
use walkdir::{DirEntry, WalkDir};
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::fs;
|
||||
use std::panic;
|
||||
use std::path::Path;
|
||||
use std::process;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::time::Instant;
|
||||
use walkdir::{DirEntry, WalkDir};
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
@ -33,8 +36,6 @@ mod common;
|
||||
|
||||
mod repo;
|
||||
|
||||
use common::eq::SpanlessEq;
|
||||
|
||||
#[test]
|
||||
fn test_round_trip() {
|
||||
common::rayon_init();
|
||||
@ -55,88 +56,8 @@ fn test_round_trip() {
|
||||
.into_par_iter()
|
||||
.for_each(|entry| {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut file = File::open(path).unwrap();
|
||||
let mut content = String::new();
|
||||
file.read_to_string(&mut content).unwrap();
|
||||
|
||||
let start = Instant::now();
|
||||
let (krate, elapsed) = match syn::parse_file(&content) {
|
||||
Ok(krate) => (krate, start.elapsed()),
|
||||
Err(msg) => {
|
||||
errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg);
|
||||
let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
|
||||
if prev_failed + 1 >= abort_after {
|
||||
process::exit(1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
let back = quote!(#krate).to_string();
|
||||
let edition = repo::edition(path).parse().unwrap();
|
||||
|
||||
let equal = panic::catch_unwind(|| {
|
||||
rustc_span::with_session_globals(edition, || {
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let before = match librustc_parse(content, &sess) {
|
||||
Ok(before) => before,
|
||||
Err(mut diagnostic) => {
|
||||
diagnostic.cancel();
|
||||
if diagnostic
|
||||
.message()
|
||||
.starts_with("file not found for module")
|
||||
{
|
||||
errorf!("=== {}: ignore\n", path.display());
|
||||
} else {
|
||||
errorf!(
|
||||
"=== {}: ignore - librustc failed to parse original content: {}\n",
|
||||
path.display(),
|
||||
diagnostic.message()
|
||||
);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
let after = match librustc_parse(back, &sess) {
|
||||
Ok(after) => after,
|
||||
Err(mut diagnostic) => {
|
||||
errorf!("=== {}: librustc failed to parse", path.display());
|
||||
diagnostic.emit();
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
if SpanlessEq::eq(&before, &after) {
|
||||
errorf!(
|
||||
"=== {}: pass in {}ms\n",
|
||||
path.display(),
|
||||
elapsed.as_secs() * 1000
|
||||
+ u64::from(elapsed.subsec_nanos()) / 1_000_000
|
||||
);
|
||||
true
|
||||
} else {
|
||||
errorf!(
|
||||
"=== {}: FAIL\nbefore: {:#?}\nafter: {:#?}\n",
|
||||
path.display(),
|
||||
before,
|
||||
after,
|
||||
);
|
||||
false
|
||||
}
|
||||
})
|
||||
});
|
||||
match equal {
|
||||
Err(_) => errorf!("=== {}: ignoring librustc panic\n", path.display()),
|
||||
Ok(true) => {}
|
||||
Ok(false) => {
|
||||
let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
|
||||
if prev_failed + 1 >= abort_after {
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
if !path.is_dir() {
|
||||
test(path, &failed, abort_after);
|
||||
}
|
||||
});
|
||||
|
||||
@ -146,7 +67,134 @@ fn test_round_trip() {
|
||||
}
|
||||
}
|
||||
|
||||
fn librustc_parse(content: String, sess: &ParseSess) -> PResult<ast::Crate> {
|
||||
let name = FileName::Custom("test_round_trip".to_string());
|
||||
fn test(path: &Path, failed: &AtomicUsize, abort_after: usize) {
|
||||
let content = fs::read_to_string(path).unwrap();
|
||||
|
||||
let start = Instant::now();
|
||||
let (krate, elapsed) = match syn::parse_file(&content) {
|
||||
Ok(krate) => (krate, start.elapsed()),
|
||||
Err(msg) => {
|
||||
errorf!("=== {}: syn failed to parse\n{:?}\n", path.display(), msg);
|
||||
let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
|
||||
if prev_failed + 1 >= abort_after {
|
||||
process::exit(1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
};
|
||||
let back = quote!(#krate).to_string();
|
||||
let edition = repo::edition(path).parse().unwrap();
|
||||
|
||||
rustc_span::with_session_globals(edition, || {
|
||||
let equal = match panic::catch_unwind(|| {
|
||||
let sess = ParseSess::new(FilePathMapping::empty());
|
||||
let before = match librustc_parse(content, &sess) {
|
||||
Ok(before) => before,
|
||||
Err(mut diagnostic) => {
|
||||
diagnostic.cancel();
|
||||
if diagnostic
|
||||
.message()
|
||||
.starts_with("file not found for module")
|
||||
{
|
||||
errorf!("=== {}: ignore\n", path.display());
|
||||
} else {
|
||||
errorf!(
|
||||
"=== {}: ignore - librustc failed to parse original content: {}\n",
|
||||
path.display(),
|
||||
diagnostic.message(),
|
||||
);
|
||||
}
|
||||
return Err(true);
|
||||
}
|
||||
};
|
||||
let after = match librustc_parse(back, &sess) {
|
||||
Ok(after) => after,
|
||||
Err(mut diagnostic) => {
|
||||
errorf!("=== {}: librustc failed to parse", path.display());
|
||||
diagnostic.emit();
|
||||
return Err(false);
|
||||
}
|
||||
};
|
||||
Ok((before, after))
|
||||
}) {
|
||||
Err(_) => {
|
||||
errorf!("=== {}: ignoring librustc panic\n", path.display());
|
||||
true
|
||||
}
|
||||
Ok(Err(equal)) => equal,
|
||||
Ok(Ok((mut before, mut after))) => {
|
||||
normalize(&mut before);
|
||||
normalize(&mut after);
|
||||
if SpanlessEq::eq(&before, &after) {
|
||||
errorf!(
|
||||
"=== {}: pass in {}ms\n",
|
||||
path.display(),
|
||||
elapsed.as_secs() * 1000 + u64::from(elapsed.subsec_nanos()) / 1_000_000
|
||||
);
|
||||
true
|
||||
} else {
|
||||
errorf!(
|
||||
"=== {}: FAIL\nbefore: {:#?}\nafter: {:#?}\n",
|
||||
path.display(),
|
||||
before,
|
||||
after,
|
||||
);
|
||||
false
|
||||
}
|
||||
}
|
||||
};
|
||||
if !equal {
|
||||
let prev_failed = failed.fetch_add(1, Ordering::SeqCst);
|
||||
if prev_failed + 1 >= abort_after {
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn librustc_parse(content: String, sess: &ParseSess) -> PResult<Crate> {
|
||||
static COUNTER: AtomicUsize = AtomicUsize::new(0);
|
||||
let counter = COUNTER.fetch_add(1, Ordering::Relaxed);
|
||||
let name = FileName::Custom(format!("test_round_trip{}", counter));
|
||||
parse::parse_crate_from_source_str(name, content, sess)
|
||||
}
|
||||
|
||||
fn normalize(krate: &mut Crate) {
|
||||
struct NormalizeVisitor;
|
||||
|
||||
impl MutVisitor for NormalizeVisitor {
|
||||
fn visit_angle_bracketed_parameter_data(&mut self, e: &mut AngleBracketedArgs) {
|
||||
#[derive(Ord, PartialOrd, Eq, PartialEq)]
|
||||
enum Group {
|
||||
Lifetimes,
|
||||
TypesAndConsts,
|
||||
Constraints,
|
||||
}
|
||||
e.args.sort_by_key(|arg| match arg {
|
||||
AngleBracketedArg::Arg(arg) => match arg {
|
||||
GenericArg::Lifetime(_) => Group::Lifetimes,
|
||||
GenericArg::Type(_) | GenericArg::Const(_) => Group::TypesAndConsts,
|
||||
},
|
||||
AngleBracketedArg::Constraint(_) => Group::Constraints,
|
||||
});
|
||||
mut_visit::noop_visit_angle_bracketed_parameter_data(e, self);
|
||||
}
|
||||
|
||||
fn visit_generics(&mut self, e: &mut Generics) {
|
||||
#[derive(Ord, PartialOrd, Eq, PartialEq)]
|
||||
enum Group {
|
||||
Lifetimes,
|
||||
TypesAndConsts,
|
||||
}
|
||||
e.params.sort_by_key(|param| match param.kind {
|
||||
GenericParamKind::Lifetime => Group::Lifetimes,
|
||||
GenericParamKind::Type { .. } | GenericParamKind::Const { .. } => {
|
||||
Group::TypesAndConsts
|
||||
}
|
||||
});
|
||||
mut_visit::noop_visit_generics(e, self);
|
||||
}
|
||||
}
|
||||
|
||||
NormalizeVisitor.visit_crate(krate);
|
||||
}
|
||||
|
30
third_party/rust/syn/tests/test_stmt.rs
vendored
30
third_party/rust/syn/tests/test_stmt.rs
vendored
@ -1,6 +1,8 @@
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
|
||||
use std::iter::FromIterator;
|
||||
use syn::Stmt;
|
||||
|
||||
#[test]
|
||||
@ -42,3 +44,31 @@ fn test_raw_variable() {
|
||||
fn test_raw_invalid() {
|
||||
assert!(syn::parse_str::<Stmt>("let _ = &raw x;").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_none_group() {
|
||||
// <Ø async fn f() {} Ø>
|
||||
let tokens = TokenStream::from_iter(vec![TokenTree::Group(Group::new(
|
||||
Delimiter::None,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("async", Span::call_site())),
|
||||
TokenTree::Ident(Ident::new("fn", Span::call_site())),
|
||||
TokenTree::Ident(Ident::new("f", Span::call_site())),
|
||||
TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::new())),
|
||||
TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::new())),
|
||||
]),
|
||||
))]);
|
||||
|
||||
snapshot!(tokens as Stmt, @r###"
|
||||
Item(Item::Fn {
|
||||
vis: Inherited,
|
||||
sig: Signature {
|
||||
asyncness: Some,
|
||||
ident: "f",
|
||||
generics: Generics,
|
||||
output: Default,
|
||||
},
|
||||
block: Block,
|
||||
})
|
||||
"###);
|
||||
}
|
||||
|
166
third_party/rust/syn/tests/test_ty.rs
vendored
166
third_party/rust/syn/tests/test_ty.rs
vendored
@ -50,4 +50,170 @@ fn test_macro_variable_type() {
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
// mimics the token stream corresponding to `$ty::<T>`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { ty })),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("T", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "ty",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
colon2_token: Some,
|
||||
args: [
|
||||
Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group_angle_brackets() {
|
||||
// mimics the token stream corresponding to `Option<$ty>`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("Option", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new('<', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
|
||||
TokenTree::Punct(Punct::new('>', Spacing::Alone)),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Option",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
Type(Type::Group {
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Vec",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "u8",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group_colons() {
|
||||
// mimics the token stream corresponding to `$ty::Item`
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { Vec<u8> })),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("Item", Span::call_site())),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Vec",
|
||||
arguments: PathArguments::AngleBracketed {
|
||||
args: [
|
||||
Type(Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "u8",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
],
|
||||
},
|
||||
},
|
||||
PathSegment {
|
||||
ident: "Item",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
|
||||
let tokens = TokenStream::from_iter(vec![
|
||||
TokenTree::Group(Group::new(Delimiter::None, quote! { [T] })),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("Element", Span::call_site())),
|
||||
]);
|
||||
|
||||
snapshot!(tokens as Type, @r###"
|
||||
Type::Path {
|
||||
qself: Some(QSelf {
|
||||
ty: Type::Slice {
|
||||
elem: Type::Path {
|
||||
path: Path {
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "T",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
position: 0,
|
||||
}),
|
||||
path: Path {
|
||||
leading_colon: Some,
|
||||
segments: [
|
||||
PathSegment {
|
||||
ident: "Element",
|
||||
arguments: None,
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
@ -92,7 +92,10 @@ fn test_missing_in_path() {
|
||||
|
||||
#[test]
|
||||
fn test_crate_path() {
|
||||
assert_vis_parse!("pub(crate::A, crate::B)", Ok(Visibility::Public(_)) + "(crate::A, crate::B)");
|
||||
assert_vis_parse!(
|
||||
"pub(crate::A, crate::B)",
|
||||
Ok(Visibility::Public(_)) + "(crate::A, crate::B)"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
Loading…
Reference in New Issue
Block a user