mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-23 12:51:06 +00:00
Bug 1929483 - build(rust): update syn
to 2.0.86 → 2.0.87 r=supply-chain-reviewers
Differential Revision: https://phabricator.services.mozilla.com/D228139
This commit is contained in:
parent
b8dd1c1bb1
commit
215c87c0dc
4
Cargo.lock
generated
4
Cargo.lock
generated
@ -5996,9 +5996,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.86"
|
version = "2.0.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e89275301d38033efb81a6e60e3497e734dfcc62571f2854bf4b16690398824c"
|
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
@ -613,8 +613,8 @@ user-login = "mbrubeck"
|
|||||||
user-name = "Matt Brubeck"
|
user-name = "Matt Brubeck"
|
||||||
|
|
||||||
[[publisher.syn]]
|
[[publisher.syn]]
|
||||||
version = "2.0.86"
|
version = "2.0.87"
|
||||||
when = "2024-10-31"
|
when = "2024-11-02"
|
||||||
user-id = 3618
|
user-id = 3618
|
||||||
user-login = "dtolnay"
|
user-login = "dtolnay"
|
||||||
user-name = "David Tolnay"
|
user-name = "David Tolnay"
|
||||||
|
2
third_party/rust/syn/.cargo-checksum.json
vendored
2
third_party/rust/syn/.cargo-checksum.json
vendored
File diff suppressed because one or more lines are too long
2
third_party/rust/syn/Cargo.toml
vendored
2
third_party/rust/syn/Cargo.toml
vendored
@ -13,7 +13,7 @@
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.61"
|
rust-version = "1.61"
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.86"
|
version = "2.0.87"
|
||||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||||
build = false
|
build = false
|
||||||
include = [
|
include = [
|
||||||
|
94
third_party/rust/syn/src/buffer.rs
vendored
94
third_party/rust/syn/src/buffer.rs
vendored
@ -183,52 +183,6 @@ impl<'a> Cursor<'a> {
|
|||||||
self.ptr == self.scope
|
self.ptr == self.scope
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If the cursor is pointing at a `Group` with the given delimiter, returns
|
|
||||||
/// a cursor into that group and one pointing to the next `TokenTree`.
|
|
||||||
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> {
|
|
||||||
// If we're not trying to enter a none-delimited group, we want to
|
|
||||||
// ignore them. We have to make sure to _not_ ignore them when we want
|
|
||||||
// to enter them, of course. For obvious reasons.
|
|
||||||
if delim != Delimiter::None {
|
|
||||||
self.ignore_none();
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Entry::Group(group, end_offset) = self.entry() {
|
|
||||||
if group.delimiter() == delim {
|
|
||||||
let span = group.delim_span();
|
|
||||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
|
||||||
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
|
|
||||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
|
||||||
return Some((inside_of_group, span, after_group));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> {
|
|
||||||
if let Entry::Group(group, end_offset) = self.entry() {
|
|
||||||
let delimiter = group.delimiter();
|
|
||||||
let span = group.delim_span();
|
|
||||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
|
||||||
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
|
|
||||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
|
||||||
return Some((inside_of_group, delimiter, span, after_group));
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
|
|
||||||
if let Entry::Group(group, end_offset) = self.entry() {
|
|
||||||
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
|
||||||
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
|
||||||
return Some((group.clone(), after_group));
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
/// If the cursor is pointing at a `Ident`, returns it along with a cursor
|
/// If the cursor is pointing at a `Ident`, returns it along with a cursor
|
||||||
/// pointing at the next `TokenTree`.
|
/// pointing at the next `TokenTree`.
|
||||||
pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
|
pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
|
||||||
@ -279,6 +233,54 @@ impl<'a> Cursor<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// If the cursor is pointing at a `Group` with the given delimiter, returns
|
||||||
|
/// a cursor into that group and one pointing to the next `TokenTree`.
|
||||||
|
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> {
|
||||||
|
// If we're not trying to enter a none-delimited group, we want to
|
||||||
|
// ignore them. We have to make sure to _not_ ignore them when we want
|
||||||
|
// to enter them, of course. For obvious reasons.
|
||||||
|
if delim != Delimiter::None {
|
||||||
|
self.ignore_none();
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Entry::Group(group, end_offset) = self.entry() {
|
||||||
|
if group.delimiter() == delim {
|
||||||
|
let span = group.delim_span();
|
||||||
|
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||||
|
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
|
||||||
|
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||||
|
return Some((inside_of_group, span, after_group));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If the cursor is pointing at a `Group`, returns a cursor into the group
|
||||||
|
/// and one pointing to the next `TokenTree`.
|
||||||
|
pub fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> {
|
||||||
|
if let Entry::Group(group, end_offset) = self.entry() {
|
||||||
|
let delimiter = group.delimiter();
|
||||||
|
let span = group.delim_span();
|
||||||
|
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||||
|
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
|
||||||
|
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||||
|
return Some((inside_of_group, delimiter, span, after_group));
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
|
||||||
|
if let Entry::Group(group, end_offset) = self.entry() {
|
||||||
|
let end_of_group = unsafe { self.ptr.add(*end_offset) };
|
||||||
|
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
|
||||||
|
return Some((group.clone(), after_group));
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
/// Copies all remaining tokens visible from this cursor into a
|
/// Copies all remaining tokens visible from this cursor into a
|
||||||
/// `TokenStream`.
|
/// `TokenStream`.
|
||||||
pub fn token_stream(self) -> TokenStream {
|
pub fn token_stream(self) -> TokenStream {
|
||||||
|
83
third_party/rust/syn/src/data.rs
vendored
83
third_party/rust/syn/src/data.rs
vendored
@ -248,6 +248,8 @@ pub(crate) mod parsing {
|
|||||||
use crate::parse::discouraged::Speculative as _;
|
use crate::parse::discouraged::Speculative as _;
|
||||||
use crate::parse::{Parse, ParseStream};
|
use crate::parse::{Parse, ParseStream};
|
||||||
use crate::restriction::{FieldMutability, Visibility};
|
use crate::restriction::{FieldMutability, Visibility};
|
||||||
|
#[cfg(not(feature = "full"))]
|
||||||
|
use crate::scan_expr::scan_expr;
|
||||||
use crate::token;
|
use crate::token;
|
||||||
use crate::ty::Type;
|
use crate::ty::Type;
|
||||||
use crate::verbatim;
|
use crate::verbatim;
|
||||||
@ -276,7 +278,7 @@ pub(crate) mod parsing {
|
|||||||
let mut discriminant: Result<Expr> = ahead.parse();
|
let mut discriminant: Result<Expr> = ahead.parse();
|
||||||
if discriminant.is_ok() {
|
if discriminant.is_ok() {
|
||||||
input.advance_to(&ahead);
|
input.advance_to(&ahead);
|
||||||
} else if scan_lenient_discriminant(input).is_ok() {
|
} else if scan_expr(input).is_ok() {
|
||||||
discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input)));
|
discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input)));
|
||||||
}
|
}
|
||||||
discriminant?
|
discriminant?
|
||||||
@ -294,85 +296,6 @@ pub(crate) mod parsing {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(feature = "full"))]
|
|
||||||
pub(crate) fn scan_lenient_discriminant(input: ParseStream) -> Result<()> {
|
|
||||||
use crate::expr::Member;
|
|
||||||
use crate::lifetime::Lifetime;
|
|
||||||
use crate::lit::Lit;
|
|
||||||
use crate::lit::LitFloat;
|
|
||||||
use crate::op::{BinOp, UnOp};
|
|
||||||
use crate::path::{self, AngleBracketedGenericArguments};
|
|
||||||
use proc_macro2::Delimiter::{self, Brace, Bracket, Parenthesis};
|
|
||||||
|
|
||||||
let consume = |delimiter: Delimiter| {
|
|
||||||
Result::unwrap(input.step(|cursor| match cursor.group(delimiter) {
|
|
||||||
Some((_inside, _span, rest)) => Ok((true, rest)),
|
|
||||||
None => Ok((false, *cursor)),
|
|
||||||
}))
|
|
||||||
};
|
|
||||||
|
|
||||||
macro_rules! consume {
|
|
||||||
[$token:tt] => {
|
|
||||||
input.parse::<Option<Token![$token]>>().unwrap().is_some()
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut initial = true;
|
|
||||||
let mut depth = 0usize;
|
|
||||||
loop {
|
|
||||||
if initial {
|
|
||||||
if consume![&] {
|
|
||||||
input.parse::<Option<Token![mut]>>()?;
|
|
||||||
} else if consume![if] || consume![match] || consume![while] {
|
|
||||||
depth += 1;
|
|
||||||
} else if input.parse::<Option<Lit>>()?.is_some()
|
|
||||||
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis))
|
|
||||||
|| (consume![async] || consume![const] || consume![loop] || consume![unsafe])
|
|
||||||
&& (consume(Brace) || break)
|
|
||||||
{
|
|
||||||
initial = false;
|
|
||||||
} else if consume![let] {
|
|
||||||
while !consume![=] {
|
|
||||||
if !((consume![|] || consume![ref] || consume![mut] || consume![@])
|
|
||||||
|| (consume![!] || input.parse::<Option<Lit>>()?.is_some())
|
|
||||||
|| (consume![..=] || consume![..] || consume![&] || consume![_])
|
|
||||||
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis)))
|
|
||||||
{
|
|
||||||
path::parsing::qpath(input, true)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if input.parse::<Option<Lifetime>>()?.is_some() && !consume![:] {
|
|
||||||
break;
|
|
||||||
} else if input.parse::<UnOp>().is_err() {
|
|
||||||
path::parsing::qpath(input, true)?;
|
|
||||||
initial = consume![!] || depth == 0 && input.peek(token::Brace);
|
|
||||||
}
|
|
||||||
} else if input.is_empty() || input.peek(Token![,]) {
|
|
||||||
return Ok(());
|
|
||||||
} else if depth > 0 && consume(Brace) {
|
|
||||||
if consume![else] && !consume(Brace) {
|
|
||||||
initial = consume![if] || break;
|
|
||||||
} else {
|
|
||||||
depth -= 1;
|
|
||||||
}
|
|
||||||
} else if input.parse::<BinOp>().is_ok() || (consume![..] | consume![=]) {
|
|
||||||
initial = true;
|
|
||||||
} else if consume![.] {
|
|
||||||
if input.parse::<Option<LitFloat>>()?.is_none()
|
|
||||||
&& (input.parse::<Member>()?.is_named() && consume![::])
|
|
||||||
{
|
|
||||||
AngleBracketedGenericArguments::do_parse(None, input)?;
|
|
||||||
}
|
|
||||||
} else if consume![as] {
|
|
||||||
input.parse::<Type>()?;
|
|
||||||
} else if !(consume(Brace) || consume(Bracket) || consume(Parenthesis)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(input.error("unsupported expression"))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||||
impl Parse for FieldsNamed {
|
impl Parse for FieldsNamed {
|
||||||
fn parse(input: ParseStream) -> Result<Self> {
|
fn parse(input: ParseStream) -> Result<Self> {
|
||||||
|
63
third_party/rust/syn/src/expr.rs
vendored
63
third_party/rust/syn/src/expr.rs
vendored
@ -1,15 +1,17 @@
|
|||||||
use crate::attr::Attribute;
|
use crate::attr::Attribute;
|
||||||
#[cfg(all(feature = "parsing", feature = "full"))]
|
#[cfg(all(feature = "parsing", feature = "full"))]
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
|
#[cfg(feature = "parsing")]
|
||||||
|
use crate::ext::IdentExt as _;
|
||||||
#[cfg(feature = "full")]
|
#[cfg(feature = "full")]
|
||||||
use crate::generics::BoundLifetimes;
|
use crate::generics::BoundLifetimes;
|
||||||
use crate::ident::Ident;
|
use crate::ident::Ident;
|
||||||
#[cfg(feature = "full")]
|
#[cfg(any(feature = "parsing", feature = "full"))]
|
||||||
use crate::lifetime::Lifetime;
|
use crate::lifetime::Lifetime;
|
||||||
use crate::lit::Lit;
|
use crate::lit::Lit;
|
||||||
use crate::mac::Macro;
|
use crate::mac::Macro;
|
||||||
use crate::op::{BinOp, UnOp};
|
use crate::op::{BinOp, UnOp};
|
||||||
#[cfg(all(feature = "parsing", feature = "full"))]
|
#[cfg(feature = "parsing")]
|
||||||
use crate::parse::ParseStream;
|
use crate::parse::ParseStream;
|
||||||
#[cfg(feature = "full")]
|
#[cfg(feature = "full")]
|
||||||
use crate::pat::Pat;
|
use crate::pat::Pat;
|
||||||
@ -889,6 +891,36 @@ impl Expr {
|
|||||||
parsing::parse_with_earlier_boundary_rule(input)
|
parsing::parse_with_earlier_boundary_rule(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns whether the next token in the parse stream is one that might
|
||||||
|
/// possibly form the beginning of an expr.
|
||||||
|
///
|
||||||
|
/// This classification is a load-bearing part of the grammar of some Rust
|
||||||
|
/// expressions, notably `return` and `break`. For example `return < …` will
|
||||||
|
/// never parse `<` as a binary operator regardless of what comes after,
|
||||||
|
/// because `<` is a legal starting token for an expression and so it's
|
||||||
|
/// required to be continued as a return value, such as `return <Struct as
|
||||||
|
/// Trait>::CONST`. Meanwhile `return > …` treats the `>` as a binary
|
||||||
|
/// operator because it cannot be a starting token for any Rust expression.
|
||||||
|
#[cfg(feature = "parsing")]
|
||||||
|
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
|
||||||
|
pub fn peek(input: ParseStream) -> bool {
|
||||||
|
input.peek(Ident::peek_any) // value name or keyword
|
||||||
|
|| input.peek(token::Paren) // tuple
|
||||||
|
|| input.peek(token::Bracket) // array
|
||||||
|
|| input.peek(token::Brace) // block
|
||||||
|
|| input.peek(Lit) // literal
|
||||||
|
|| input.peek(Token![!]) && !input.peek(Token![!=]) // operator not
|
||||||
|
|| input.peek(Token![-]) && !input.peek(Token![-=]) && !input.peek(Token![->]) // unary minus
|
||||||
|
|| input.peek(Token![*]) && !input.peek(Token![*=]) // dereference
|
||||||
|
|| input.peek(Token![|]) && !input.peek(Token![|=]) // closure
|
||||||
|
|| input.peek(Token![&]) && !input.peek(Token![&=]) // reference
|
||||||
|
|| input.peek(Token![..]) // range
|
||||||
|
|| input.peek(Token![<]) && !input.peek(Token![<=]) && !input.peek(Token![<<=]) // associated path
|
||||||
|
|| input.peek(Token![::]) // absolute path
|
||||||
|
|| input.peek(Lifetime) // labeled loop
|
||||||
|
|| input.peek(Token![#]) // expression attributes
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(all(feature = "parsing", feature = "full"))]
|
#[cfg(all(feature = "parsing", feature = "full"))]
|
||||||
pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
|
pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
|
||||||
match self {
|
match self {
|
||||||
@ -1147,8 +1179,6 @@ pub(crate) mod parsing {
|
|||||||
FieldValue, Index, Member,
|
FieldValue, Index, Member,
|
||||||
};
|
};
|
||||||
#[cfg(feature = "full")]
|
#[cfg(feature = "full")]
|
||||||
use crate::ext::IdentExt as _;
|
|
||||||
#[cfg(feature = "full")]
|
|
||||||
use crate::generics::BoundLifetimes;
|
use crate::generics::BoundLifetimes;
|
||||||
use crate::ident::Ident;
|
use crate::ident::Ident;
|
||||||
#[cfg(feature = "full")]
|
#[cfg(feature = "full")]
|
||||||
@ -1266,25 +1296,6 @@ pub(crate) mod parsing {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "full")]
|
|
||||||
fn can_begin_expr(input: ParseStream) -> bool {
|
|
||||||
input.peek(Ident::peek_any) // value name or keyword
|
|
||||||
|| input.peek(token::Paren) // tuple
|
|
||||||
|| input.peek(token::Bracket) // array
|
|
||||||
|| input.peek(token::Brace) // block
|
|
||||||
|| input.peek(Lit) // literal
|
|
||||||
|| input.peek(Token![!]) && !input.peek(Token![!=]) // operator not
|
|
||||||
|| input.peek(Token![-]) && !input.peek(Token![-=]) && !input.peek(Token![->]) // unary minus
|
|
||||||
|| input.peek(Token![*]) && !input.peek(Token![*=]) // dereference
|
|
||||||
|| input.peek(Token![|]) && !input.peek(Token![|=]) // closure
|
|
||||||
|| input.peek(Token![&]) && !input.peek(Token![&=]) // reference
|
|
||||||
|| input.peek(Token![..]) // range notation
|
|
||||||
|| input.peek(Token![<]) && !input.peek(Token![<=]) && !input.peek(Token![<<=]) // associated path
|
|
||||||
|| input.peek(Token![::]) // global path
|
|
||||||
|| input.peek(Lifetime) // labeled loop
|
|
||||||
|| input.peek(Token![#]) // expression attributes
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "full")]
|
#[cfg(feature = "full")]
|
||||||
fn parse_expr(
|
fn parse_expr(
|
||||||
input: ParseStream,
|
input: ParseStream,
|
||||||
@ -2439,7 +2450,7 @@ pub(crate) mod parsing {
|
|||||||
attrs: Vec::new(),
|
attrs: Vec::new(),
|
||||||
return_token: input.parse()?,
|
return_token: input.parse()?,
|
||||||
expr: {
|
expr: {
|
||||||
if can_begin_expr(input) {
|
if Expr::peek(input) {
|
||||||
Some(input.parse()?)
|
Some(input.parse()?)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
@ -2477,7 +2488,7 @@ pub(crate) mod parsing {
|
|||||||
attrs: Vec::new(),
|
attrs: Vec::new(),
|
||||||
yield_token: input.parse()?,
|
yield_token: input.parse()?,
|
||||||
expr: {
|
expr: {
|
||||||
if can_begin_expr(input) {
|
if Expr::peek(input) {
|
||||||
Some(input.parse()?)
|
Some(input.parse()?)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
@ -2690,7 +2701,7 @@ pub(crate) mod parsing {
|
|||||||
}
|
}
|
||||||
|
|
||||||
input.advance_to(&ahead);
|
input.advance_to(&ahead);
|
||||||
let expr = if can_begin_expr(input) && (allow_struct.0 || !input.peek(token::Brace)) {
|
let expr = if Expr::peek(input) && (allow_struct.0 || !input.peek(token::Brace)) {
|
||||||
Some(input.parse()?)
|
Some(input.parse()?)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
8
third_party/rust/syn/src/lib.rs
vendored
8
third_party/rust/syn/src/lib.rs
vendored
@ -249,7 +249,7 @@
|
|||||||
//! dynamic library libproc_macro from rustc toolchain.
|
//! dynamic library libproc_macro from rustc toolchain.
|
||||||
|
|
||||||
// Syn types in rustdoc of other crates get linked to here.
|
// Syn types in rustdoc of other crates get linked to here.
|
||||||
#![doc(html_root_url = "https://docs.rs/syn/2.0.86")]
|
#![doc(html_root_url = "https://docs.rs/syn/2.0.87")]
|
||||||
#![cfg_attr(docsrs, feature(doc_cfg))]
|
#![cfg_attr(docsrs, feature(doc_cfg))]
|
||||||
#![deny(unsafe_op_in_unsafe_fn)]
|
#![deny(unsafe_op_in_unsafe_fn)]
|
||||||
#![allow(non_camel_case_types)]
|
#![allow(non_camel_case_types)]
|
||||||
@ -264,6 +264,7 @@
|
|||||||
clippy::derivable_impls,
|
clippy::derivable_impls,
|
||||||
clippy::diverging_sub_expression,
|
clippy::diverging_sub_expression,
|
||||||
clippy::doc_markdown,
|
clippy::doc_markdown,
|
||||||
|
clippy::enum_glob_use,
|
||||||
clippy::expl_impl_clone_on_copy,
|
clippy::expl_impl_clone_on_copy,
|
||||||
clippy::explicit_auto_deref,
|
clippy::explicit_auto_deref,
|
||||||
clippy::if_not_else,
|
clippy::if_not_else,
|
||||||
@ -307,6 +308,8 @@
|
|||||||
clippy::wildcard_imports,
|
clippy::wildcard_imports,
|
||||||
)]
|
)]
|
||||||
|
|
||||||
|
extern crate self as syn;
|
||||||
|
|
||||||
#[cfg(feature = "proc-macro")]
|
#[cfg(feature = "proc-macro")]
|
||||||
extern crate proc_macro;
|
extern crate proc_macro;
|
||||||
|
|
||||||
@ -509,6 +512,9 @@ pub use crate::restriction::{FieldMutability, VisRestricted, Visibility};
|
|||||||
|
|
||||||
mod sealed;
|
mod sealed;
|
||||||
|
|
||||||
|
#[cfg(all(feature = "parsing", feature = "derive", not(feature = "full")))]
|
||||||
|
mod scan_expr;
|
||||||
|
|
||||||
mod span;
|
mod span;
|
||||||
|
|
||||||
#[cfg(all(feature = "parsing", feature = "printing"))]
|
#[cfg(all(feature = "parsing", feature = "printing"))]
|
||||||
|
264
third_party/rust/syn/src/scan_expr.rs
vendored
Normal file
264
third_party/rust/syn/src/scan_expr.rs
vendored
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
use self::{Action::*, Input::*};
|
||||||
|
use proc_macro2::{Delimiter, Ident, Spacing, TokenTree};
|
||||||
|
use syn::parse::{ParseStream, Result};
|
||||||
|
use syn::{AngleBracketedGenericArguments, BinOp, Expr, ExprPath, Lifetime, Lit, Token, Type};
|
||||||
|
|
||||||
|
enum Input {
|
||||||
|
Keyword(&'static str),
|
||||||
|
Punct(&'static str),
|
||||||
|
ConsumeAny,
|
||||||
|
ConsumeBinOp,
|
||||||
|
ConsumeBrace,
|
||||||
|
ConsumeDelimiter,
|
||||||
|
ConsumeIdent,
|
||||||
|
ConsumeLifetime,
|
||||||
|
ConsumeLiteral,
|
||||||
|
ConsumeNestedBrace,
|
||||||
|
ExpectPath,
|
||||||
|
ExpectTurbofish,
|
||||||
|
ExpectType,
|
||||||
|
CanBeginExpr,
|
||||||
|
Otherwise,
|
||||||
|
Empty,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum Action {
|
||||||
|
SetState(&'static [(Input, Action)]),
|
||||||
|
IncDepth,
|
||||||
|
DecDepth,
|
||||||
|
Finish,
|
||||||
|
}
|
||||||
|
|
||||||
|
static INIT: [(Input, Action); 28] = [
|
||||||
|
(ConsumeDelimiter, SetState(&POSTFIX)),
|
||||||
|
(Keyword("async"), SetState(&ASYNC)),
|
||||||
|
(Keyword("break"), SetState(&BREAK_LABEL)),
|
||||||
|
(Keyword("const"), SetState(&CONST)),
|
||||||
|
(Keyword("continue"), SetState(&CONTINUE)),
|
||||||
|
(Keyword("for"), SetState(&FOR)),
|
||||||
|
(Keyword("if"), IncDepth),
|
||||||
|
(Keyword("let"), SetState(&PATTERN)),
|
||||||
|
(Keyword("loop"), SetState(&BLOCK)),
|
||||||
|
(Keyword("match"), IncDepth),
|
||||||
|
(Keyword("move"), SetState(&CLOSURE)),
|
||||||
|
(Keyword("return"), SetState(&RETURN)),
|
||||||
|
(Keyword("static"), SetState(&CLOSURE)),
|
||||||
|
(Keyword("unsafe"), SetState(&BLOCK)),
|
||||||
|
(Keyword("while"), IncDepth),
|
||||||
|
(Keyword("yield"), SetState(&RETURN)),
|
||||||
|
(Keyword("_"), SetState(&POSTFIX)),
|
||||||
|
(Punct("!"), SetState(&INIT)),
|
||||||
|
(Punct("#"), SetState(&[(ConsumeDelimiter, SetState(&INIT))])),
|
||||||
|
(Punct("&"), SetState(&REFERENCE)),
|
||||||
|
(Punct("*"), SetState(&INIT)),
|
||||||
|
(Punct("-"), SetState(&INIT)),
|
||||||
|
(Punct("..="), SetState(&INIT)),
|
||||||
|
(Punct(".."), SetState(&RANGE)),
|
||||||
|
(Punct("|"), SetState(&CLOSURE_ARGS)),
|
||||||
|
(ConsumeLifetime, SetState(&[(Punct(":"), SetState(&INIT))])),
|
||||||
|
(ConsumeLiteral, SetState(&POSTFIX)),
|
||||||
|
(ExpectPath, SetState(&PATH)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static POSTFIX: [(Input, Action); 10] = [
|
||||||
|
(Keyword("as"), SetState(&[(ExpectType, SetState(&POSTFIX))])),
|
||||||
|
(Punct("..="), SetState(&INIT)),
|
||||||
|
(Punct(".."), SetState(&RANGE)),
|
||||||
|
(Punct("."), SetState(&DOT)),
|
||||||
|
(Punct("?"), SetState(&POSTFIX)),
|
||||||
|
(ConsumeBinOp, SetState(&INIT)),
|
||||||
|
(Punct("="), SetState(&INIT)),
|
||||||
|
(ConsumeNestedBrace, SetState(&IF_THEN)),
|
||||||
|
(ConsumeDelimiter, SetState(&POSTFIX)),
|
||||||
|
(Empty, Finish),
|
||||||
|
];
|
||||||
|
|
||||||
|
static ASYNC: [(Input, Action); 3] = [
|
||||||
|
(Keyword("move"), SetState(&ASYNC)),
|
||||||
|
(Punct("|"), SetState(&CLOSURE_ARGS)),
|
||||||
|
(ConsumeBrace, SetState(&POSTFIX)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static BLOCK: [(Input, Action); 1] = [(ConsumeBrace, SetState(&POSTFIX))];
|
||||||
|
|
||||||
|
static BREAK_LABEL: [(Input, Action); 2] = [
|
||||||
|
(ConsumeLifetime, SetState(&BREAK_VALUE)),
|
||||||
|
(Otherwise, SetState(&BREAK_VALUE)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static BREAK_VALUE: [(Input, Action); 3] = [
|
||||||
|
(ConsumeNestedBrace, SetState(&IF_THEN)),
|
||||||
|
(CanBeginExpr, SetState(&INIT)),
|
||||||
|
(Otherwise, SetState(&POSTFIX)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static CLOSURE: [(Input, Action); 6] = [
|
||||||
|
(Keyword("async"), SetState(&CLOSURE)),
|
||||||
|
(Keyword("move"), SetState(&CLOSURE)),
|
||||||
|
(Punct(","), SetState(&CLOSURE)),
|
||||||
|
(Punct(">"), SetState(&CLOSURE)),
|
||||||
|
(Punct("|"), SetState(&CLOSURE_ARGS)),
|
||||||
|
(ConsumeLifetime, SetState(&CLOSURE)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static CLOSURE_ARGS: [(Input, Action); 2] = [
|
||||||
|
(Punct("|"), SetState(&CLOSURE_RET)),
|
||||||
|
(ConsumeAny, SetState(&CLOSURE_ARGS)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static CLOSURE_RET: [(Input, Action); 2] = [
|
||||||
|
(Punct("->"), SetState(&[(ExpectType, SetState(&BLOCK))])),
|
||||||
|
(Otherwise, SetState(&INIT)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static CONST: [(Input, Action); 2] = [
|
||||||
|
(Punct("|"), SetState(&CLOSURE_ARGS)),
|
||||||
|
(ConsumeBrace, SetState(&POSTFIX)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static CONTINUE: [(Input, Action); 2] = [
|
||||||
|
(ConsumeLifetime, SetState(&POSTFIX)),
|
||||||
|
(Otherwise, SetState(&POSTFIX)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static DOT: [(Input, Action); 3] = [
|
||||||
|
(Keyword("await"), SetState(&POSTFIX)),
|
||||||
|
(ConsumeIdent, SetState(&METHOD)),
|
||||||
|
(ConsumeLiteral, SetState(&POSTFIX)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static FOR: [(Input, Action); 2] = [
|
||||||
|
(Punct("<"), SetState(&CLOSURE)),
|
||||||
|
(Otherwise, SetState(&PATTERN)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static IF_ELSE: [(Input, Action); 2] = [(Keyword("if"), SetState(&INIT)), (ConsumeBrace, DecDepth)];
|
||||||
|
static IF_THEN: [(Input, Action); 2] =
|
||||||
|
[(Keyword("else"), SetState(&IF_ELSE)), (Otherwise, DecDepth)];
|
||||||
|
|
||||||
|
static METHOD: [(Input, Action); 1] = [(ExpectTurbofish, SetState(&POSTFIX))];
|
||||||
|
|
||||||
|
static PATH: [(Input, Action); 4] = [
|
||||||
|
(Punct("!="), SetState(&INIT)),
|
||||||
|
(Punct("!"), SetState(&INIT)),
|
||||||
|
(ConsumeNestedBrace, SetState(&IF_THEN)),
|
||||||
|
(Otherwise, SetState(&POSTFIX)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static PATTERN: [(Input, Action); 15] = [
|
||||||
|
(ConsumeDelimiter, SetState(&PATTERN)),
|
||||||
|
(Keyword("box"), SetState(&PATTERN)),
|
||||||
|
(Keyword("in"), IncDepth),
|
||||||
|
(Keyword("mut"), SetState(&PATTERN)),
|
||||||
|
(Keyword("ref"), SetState(&PATTERN)),
|
||||||
|
(Keyword("_"), SetState(&PATTERN)),
|
||||||
|
(Punct("!"), SetState(&PATTERN)),
|
||||||
|
(Punct("&"), SetState(&PATTERN)),
|
||||||
|
(Punct("..="), SetState(&PATTERN)),
|
||||||
|
(Punct(".."), SetState(&PATTERN)),
|
||||||
|
(Punct("="), SetState(&INIT)),
|
||||||
|
(Punct("@"), SetState(&PATTERN)),
|
||||||
|
(Punct("|"), SetState(&PATTERN)),
|
||||||
|
(ConsumeLiteral, SetState(&PATTERN)),
|
||||||
|
(ExpectPath, SetState(&PATTERN)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static RANGE: [(Input, Action); 6] = [
|
||||||
|
(Punct("..="), SetState(&INIT)),
|
||||||
|
(Punct(".."), SetState(&RANGE)),
|
||||||
|
(Punct("."), SetState(&DOT)),
|
||||||
|
(ConsumeNestedBrace, SetState(&IF_THEN)),
|
||||||
|
(Empty, Finish),
|
||||||
|
(Otherwise, SetState(&INIT)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static RAW: [(Input, Action); 3] = [
|
||||||
|
(Keyword("const"), SetState(&INIT)),
|
||||||
|
(Keyword("mut"), SetState(&INIT)),
|
||||||
|
(Otherwise, SetState(&POSTFIX)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static REFERENCE: [(Input, Action); 3] = [
|
||||||
|
(Keyword("mut"), SetState(&INIT)),
|
||||||
|
(Keyword("raw"), SetState(&RAW)),
|
||||||
|
(Otherwise, SetState(&INIT)),
|
||||||
|
];
|
||||||
|
|
||||||
|
static RETURN: [(Input, Action); 2] = [
|
||||||
|
(CanBeginExpr, SetState(&INIT)),
|
||||||
|
(Otherwise, SetState(&POSTFIX)),
|
||||||
|
];
|
||||||
|
|
||||||
|
pub(crate) fn scan_expr(input: ParseStream) -> Result<()> {
|
||||||
|
let mut state = INIT.as_slice();
|
||||||
|
let mut depth = 0usize;
|
||||||
|
'table: loop {
|
||||||
|
for rule in state {
|
||||||
|
if match rule.0 {
|
||||||
|
Input::Keyword(expected) => input.step(|cursor| match cursor.ident() {
|
||||||
|
Some((ident, rest)) if ident == expected => Ok((true, rest)),
|
||||||
|
_ => Ok((false, *cursor)),
|
||||||
|
})?,
|
||||||
|
Input::Punct(expected) => input.step(|cursor| {
|
||||||
|
let begin = *cursor;
|
||||||
|
let mut cursor = begin;
|
||||||
|
for (i, ch) in expected.chars().enumerate() {
|
||||||
|
match cursor.punct() {
|
||||||
|
Some((punct, _)) if punct.as_char() != ch => break,
|
||||||
|
Some((_, rest)) if i == expected.len() - 1 => {
|
||||||
|
return Ok((true, rest));
|
||||||
|
}
|
||||||
|
Some((punct, rest)) if punct.spacing() == Spacing::Joint => {
|
||||||
|
cursor = rest;
|
||||||
|
}
|
||||||
|
_ => break,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok((false, begin))
|
||||||
|
})?,
|
||||||
|
Input::ConsumeAny => input.parse::<Option<TokenTree>>()?.is_some(),
|
||||||
|
Input::ConsumeBinOp => input.parse::<BinOp>().is_ok(),
|
||||||
|
Input::ConsumeBrace | Input::ConsumeNestedBrace => {
|
||||||
|
(matches!(rule.0, Input::ConsumeBrace) || depth > 0)
|
||||||
|
&& input.step(|cursor| match cursor.group(Delimiter::Brace) {
|
||||||
|
Some((_inside, _span, rest)) => Ok((true, rest)),
|
||||||
|
None => Ok((false, *cursor)),
|
||||||
|
})?
|
||||||
|
}
|
||||||
|
Input::ConsumeDelimiter => input.step(|cursor| match cursor.any_group() {
|
||||||
|
Some((_inside, _delimiter, _span, rest)) => Ok((true, rest)),
|
||||||
|
None => Ok((false, *cursor)),
|
||||||
|
})?,
|
||||||
|
Input::ConsumeIdent => input.parse::<Option<Ident>>()?.is_some(),
|
||||||
|
Input::ConsumeLifetime => input.parse::<Option<Lifetime>>()?.is_some(),
|
||||||
|
Input::ConsumeLiteral => input.parse::<Option<Lit>>()?.is_some(),
|
||||||
|
Input::ExpectPath => {
|
||||||
|
input.parse::<ExprPath>()?;
|
||||||
|
true
|
||||||
|
}
|
||||||
|
Input::ExpectTurbofish => {
|
||||||
|
if input.peek(Token![::]) {
|
||||||
|
input.parse::<AngleBracketedGenericArguments>()?;
|
||||||
|
}
|
||||||
|
true
|
||||||
|
}
|
||||||
|
Input::ExpectType => {
|
||||||
|
Type::without_plus(input)?;
|
||||||
|
true
|
||||||
|
}
|
||||||
|
Input::CanBeginExpr => Expr::peek(input),
|
||||||
|
Input::Otherwise => true,
|
||||||
|
Input::Empty => input.is_empty() || input.peek(Token![,]),
|
||||||
|
} {
|
||||||
|
state = match rule.1 {
|
||||||
|
Action::SetState(next) => next,
|
||||||
|
Action::IncDepth => (depth += 1, &INIT).1,
|
||||||
|
Action::DecDepth => (depth -= 1, &POSTFIX).1,
|
||||||
|
Action::Finish => return if depth == 0 { Ok(()) } else { break },
|
||||||
|
};
|
||||||
|
continue 'table;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Err(input.error("unsupported expression"));
|
||||||
|
}
|
||||||
|
}
|
2
third_party/rust/syn/tests/common/eq.rs
vendored
2
third_party/rust/syn/tests/common/eq.rs
vendored
@ -498,7 +498,7 @@ spanless_eq_struct!(Fn; defaultness generics sig body);
|
|||||||
spanless_eq_struct!(FnDecl; inputs output);
|
spanless_eq_struct!(FnDecl; inputs output);
|
||||||
spanless_eq_struct!(FnHeader; constness coroutine_kind safety ext);
|
spanless_eq_struct!(FnHeader; constness coroutine_kind safety ext);
|
||||||
spanless_eq_struct!(FnSig; header decl span);
|
spanless_eq_struct!(FnSig; header decl span);
|
||||||
spanless_eq_struct!(ForeignMod; safety abi items);
|
spanless_eq_struct!(ForeignMod; extern_span safety abi items);
|
||||||
spanless_eq_struct!(FormatArgPosition; index kind span);
|
spanless_eq_struct!(FormatArgPosition; index kind span);
|
||||||
spanless_eq_struct!(FormatArgs; span template arguments);
|
spanless_eq_struct!(FormatArgs; span template arguments);
|
||||||
spanless_eq_struct!(FormatArgument; kind expr);
|
spanless_eq_struct!(FormatArgument; kind expr);
|
||||||
|
17
third_party/rust/syn/tests/test_precedence.rs
vendored
17
third_party/rust/syn/tests/test_precedence.rs
vendored
@ -49,6 +49,7 @@ use std::fs;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::process;
|
use std::process;
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
use syn::parse::Parser as _;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
mod macros;
|
mod macros;
|
||||||
@ -56,6 +57,9 @@ mod macros;
|
|||||||
mod common;
|
mod common;
|
||||||
mod repo;
|
mod repo;
|
||||||
|
|
||||||
|
#[path = "../src/scan_expr.rs"]
|
||||||
|
mod scan_expr;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_rustc_precedence() {
|
fn test_rustc_precedence() {
|
||||||
repo::rayon_init();
|
repo::rayon_init();
|
||||||
@ -115,7 +119,8 @@ fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (us
|
|||||||
|
|
||||||
rustc_span::create_session_if_not_set_then(edition, |_| {
|
rustc_span::create_session_if_not_set_then(edition, |_| {
|
||||||
for expr in exprs {
|
for expr in exprs {
|
||||||
let source_code = expr.to_token_stream().to_string();
|
let expr_tokens = expr.to_token_stream();
|
||||||
|
let source_code = expr_tokens.to_string();
|
||||||
let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&source_code) {
|
let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&source_code) {
|
||||||
e
|
e
|
||||||
} else {
|
} else {
|
||||||
@ -173,6 +178,16 @@ fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (us
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if scan_expr::scan_expr.parse2(expr_tokens).is_err() {
|
||||||
|
failed += 1;
|
||||||
|
errorf!(
|
||||||
|
"\nFAIL {} - failed to scan expr\n{}\n",
|
||||||
|
path.display(),
|
||||||
|
source_code,
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
passed += 1;
|
passed += 1;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user