mirror of
https://gitee.com/openharmony/third_party_rust_syn
synced 2024-11-23 16:00:10 +00:00
Update to the next version of proc-macro2
Depends on dtolnay/quote#73 Depends on alexcrichton/proc-macro2#90 Depends on a new nightly
This commit is contained in:
parent
1df4ef073b
commit
a74a1c89fd
@ -26,11 +26,11 @@ visit-mut = []
|
||||
fold = []
|
||||
clone-impls = []
|
||||
extra-traits = []
|
||||
proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
|
||||
proc-macro = ["proc-macro2/proc-macro"]
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = { version = "0.3", default-features = false }
|
||||
quote = { version = "0.5", optional = true, default-features = false }
|
||||
proc-macro2 = { version = "0.4", default-features = false }
|
||||
quote = { version = "0.6", optional = true, default-features = false }
|
||||
unicode-xid = "0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
@ -42,3 +42,6 @@ all-features = true
|
||||
|
||||
[package.metadata.playground]
|
||||
all-features = true
|
||||
|
||||
[patch.crates-io]
|
||||
quote = { git = 'https://github.com/alexcrichton/quote', branch = 'next' }
|
||||
|
@ -7,7 +7,10 @@ publish = false # this is an internal crate which should never be published
|
||||
|
||||
[dependencies]
|
||||
syn = { path = "..", features = ["full", "extra-traits"] }
|
||||
quote = "0.5"
|
||||
quote = "0.6"
|
||||
failure = "0.1"
|
||||
inflections = "1.1"
|
||||
proc-macro2 = "0.3"
|
||||
proc-macro2 = "0.4"
|
||||
|
||||
[patch.crates-io]
|
||||
quote = { git = 'https://github.com/alexcrichton/quote', branch = 'next' }
|
||||
|
@ -22,8 +22,9 @@ extern crate quote;
|
||||
extern crate syn;
|
||||
|
||||
use quote::{ToTokens, Tokens};
|
||||
use syn::{Attribute, Data, DataStruct, DeriveInput, Ident, Item};
|
||||
use syn::{Attribute, Data, DataStruct, DeriveInput, Item};
|
||||
use failure::{err_msg, Error};
|
||||
use proc_macro2::{Ident, Span};
|
||||
|
||||
use std::io::{Read, Write};
|
||||
use std::fmt::{self, Debug};
|
||||
@ -43,19 +44,19 @@ const EXTRA_TYPES: &[&str] = &["Ident", "Lifetime"];
|
||||
|
||||
const TERMINAL_TYPES: &[&str] = &["Span"];
|
||||
|
||||
fn path_eq(a: &syn::Path, b: &syn::Path) -> bool {
|
||||
if a.global() != b.global() || a.segments.len() != b.segments.len() {
|
||||
return false;
|
||||
fn path_eq(a: &syn::Path, b: &str) -> bool {
|
||||
if a.global() {
|
||||
return false
|
||||
}
|
||||
a.segments
|
||||
.iter()
|
||||
.zip(b.segments.iter())
|
||||
.all(|(a, b)| a.ident == b.ident)
|
||||
if a.segments.len() != 1 {
|
||||
return false
|
||||
}
|
||||
a.segments[0].ident.to_string() == b
|
||||
}
|
||||
|
||||
fn get_features(attrs: &[Attribute], mut features: Tokens) -> Tokens {
|
||||
for attr in attrs {
|
||||
if path_eq(&attr.path, &"cfg".into()) {
|
||||
if path_eq(&attr.path, "cfg") {
|
||||
attr.to_tokens(&mut features);
|
||||
}
|
||||
}
|
||||
@ -106,7 +107,7 @@ fn load_file<P: AsRef<Path>>(name: P, features: &Tokens, lookup: &mut Lookup) ->
|
||||
// We don't want to try to load the generated rust files and
|
||||
// parse them, so we ignore them here.
|
||||
for name in IGNORED_MODS {
|
||||
if item.ident == name {
|
||||
if item.ident.to_string() == *name {
|
||||
continue 'items;
|
||||
}
|
||||
}
|
||||
@ -116,7 +117,7 @@ fn load_file<P: AsRef<Path>>(name: P, features: &Tokens, lookup: &mut Lookup) ->
|
||||
//
|
||||
// The derive module is weird because it is built with either
|
||||
// `full` or `derive` but exported only under `derive`.
|
||||
let features = if item.ident == "derive" {
|
||||
let features = if item.ident.to_string() == "derive" {
|
||||
quote!(#[cfg(feature = "derive")])
|
||||
} else {
|
||||
get_features(&item.attrs, features.clone())
|
||||
@ -124,7 +125,7 @@ fn load_file<P: AsRef<Path>>(name: P, features: &Tokens, lookup: &mut Lookup) ->
|
||||
|
||||
// Look up the submodule file, and recursively parse it.
|
||||
// XXX: Only handles same-directory .rs file submodules.
|
||||
let path = parent.join(&format!("{}.rs", item.ident.as_ref()));
|
||||
let path = parent.join(&format!("{}.rs", item.ident));
|
||||
load_file(path, &features, lookup)?;
|
||||
}
|
||||
Item::Macro(item) => {
|
||||
@ -134,15 +135,15 @@ fn load_file<P: AsRef<Path>>(name: P, features: &Tokens, lookup: &mut Lookup) ->
|
||||
|
||||
// Try to parse the AstItem declaration out of the item.
|
||||
let tts = &item.mac.tts;
|
||||
let found = if path_eq(&item.mac.path, &"ast_struct".into()) {
|
||||
let found = if path_eq(&item.mac.path, "ast_struct") {
|
||||
syn::parse_str::<parsing::AstStruct>("e!(#tts).to_string())
|
||||
.map_err(|_| err_msg("failed to parse ast_struct"))?
|
||||
.0
|
||||
} else if path_eq(&item.mac.path, &"ast_enum".into()) {
|
||||
} else if path_eq(&item.mac.path, "ast_enum") {
|
||||
syn::parse_str::<parsing::AstEnum>("e!(#tts).to_string())
|
||||
.map_err(|_| err_msg("failed to parse ast_enum"))?
|
||||
.0
|
||||
} else if path_eq(&item.mac.path, &"ast_enum_of_structs".into()) {
|
||||
} else if path_eq(&item.mac.path, "ast_enum_of_structs") {
|
||||
syn::parse_str::<parsing::AstEnumOfStructs>("e!(#tts).to_string())
|
||||
.map_err(|_| err_msg("failed to parse ast_enum_of_structs"))?
|
||||
.0
|
||||
@ -153,13 +154,13 @@ fn load_file<P: AsRef<Path>>(name: P, features: &Tokens, lookup: &mut Lookup) ->
|
||||
// Record our features on the parsed AstItems.
|
||||
for mut item in found {
|
||||
features.to_tokens(&mut item.features);
|
||||
lookup.insert(item.ast.ident, item);
|
||||
lookup.insert(item.ast.ident.clone(), item);
|
||||
}
|
||||
}
|
||||
Item::Struct(item) => {
|
||||
let ident = item.ident;
|
||||
if EXTRA_TYPES.contains(&ident.as_ref()) {
|
||||
lookup.insert(ident, AstItem {
|
||||
if EXTRA_TYPES.contains(&&ident.to_string()[..]) {
|
||||
lookup.insert(ident.clone(), AstItem {
|
||||
ast: DeriveInput {
|
||||
ident: ident,
|
||||
vis: item.vis,
|
||||
@ -189,14 +190,14 @@ mod parsing {
|
||||
use syn::synom::*;
|
||||
use syn::*;
|
||||
use quote::Tokens;
|
||||
use proc_macro2::TokenStream;
|
||||
use proc_macro2::{TokenStream, Ident};
|
||||
|
||||
// Parses #full - returns #[cfg(feature = "full")] if it is present, and
|
||||
// nothing otherwise.
|
||||
named!(full -> (Tokens, bool), map!(option!(do_parse!(
|
||||
punct!(#) >>
|
||||
id: syn!(Ident) >>
|
||||
cond_reduce!(id == "full") >>
|
||||
cond_reduce!(id.to_string() == "full") >>
|
||||
()
|
||||
)), |s| if s.is_some() {
|
||||
(quote!(#[cfg(feature = "full")]), true)
|
||||
@ -207,7 +208,7 @@ mod parsing {
|
||||
named!(manual_extra_traits -> (), do_parse!(
|
||||
punct!(#) >>
|
||||
id: syn!(Ident) >>
|
||||
cond_reduce!(id == "manual_extra_traits") >>
|
||||
cond_reduce!(id.to_string() == "manual_extra_traits") >>
|
||||
()
|
||||
));
|
||||
|
||||
@ -241,7 +242,7 @@ mod parsing {
|
||||
named!(no_visit -> (), do_parse!(
|
||||
punct!(#) >>
|
||||
id: syn!(Ident) >>
|
||||
cond_reduce!(id == "no_visit") >>
|
||||
cond_reduce!(id.to_string() == "no_visit") >>
|
||||
()
|
||||
));
|
||||
|
||||
@ -280,7 +281,7 @@ mod parsing {
|
||||
keyword!(pub) >>
|
||||
variant: syn!(Ident) >>
|
||||
member: option!(map!(parens!(alt!(
|
||||
call!(ast_struct_inner) => { |x: AstItem| (Path::from(x.ast.ident), Some(x)) }
|
||||
call!(ast_struct_inner) => { |x: AstItem| (Path::from(x.ast.ident.clone()), Some(x)) }
|
||||
|
|
||||
syn!(Path) => { |x| (x, None) }
|
||||
)), |x| x.1)) >>
|
||||
@ -307,7 +308,7 @@ mod parsing {
|
||||
// tokens to strings to re-parse them.
|
||||
let enum_item = {
|
||||
let variants = variants.1.iter().map(|v| {
|
||||
let name = v.name;
|
||||
let name = v.name.clone();
|
||||
match v.member {
|
||||
Some(ref member) => quote!(#name(#member)),
|
||||
None => quote!(#name),
|
||||
@ -335,6 +336,7 @@ mod codegen {
|
||||
use syn::punctuated::Punctuated;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use std::fmt::{self, Display};
|
||||
use proc_macro2::{Ident, Span};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct State {
|
||||
@ -348,7 +350,7 @@ mod codegen {
|
||||
|
||||
fn under_name(name: Ident) -> Ident {
|
||||
use inflections::Inflect;
|
||||
name.as_ref().to_snake_case().into()
|
||||
Ident::new(&name.to_string().to_snake_case(), Span::call_site())
|
||||
}
|
||||
|
||||
enum RelevantType<'a> {
|
||||
@ -366,13 +368,13 @@ mod codegen {
|
||||
match *ty {
|
||||
Type::Path(TypePath { qself: None, ref path }) => {
|
||||
let last = path.segments.last().unwrap().into_value();
|
||||
match last.ident.as_ref() {
|
||||
match &last.ident.to_string()[..] {
|
||||
"Box" => RelevantType::Box(first_arg(&last.arguments)),
|
||||
"Vec" => RelevantType::Vec(first_arg(&last.arguments)),
|
||||
"Punctuated" => RelevantType::Punctuated(first_arg(&last.arguments)),
|
||||
"Option" => RelevantType::Option(first_arg(&last.arguments)),
|
||||
"Brace" | "Bracket" | "Paren" | "Group" => {
|
||||
RelevantType::Token(last.ident.into_tokens())
|
||||
RelevantType::Token(last.ident.clone().into_tokens())
|
||||
}
|
||||
_ => {
|
||||
if let Some(item) = lookup.get(&last.ident) {
|
||||
@ -386,7 +388,7 @@ mod codegen {
|
||||
Type::Tuple(TypeTuple { ref elems, .. }) => {
|
||||
RelevantType::Tuple(elems)
|
||||
}
|
||||
Type::Macro(TypeMacro { ref mac }) if mac.path.segments.last().unwrap().into_value().ident == "Token" => {
|
||||
Type::Macro(TypeMacro { ref mac }) if mac.path.segments.last().unwrap().into_value().ident.to_string() == "Token" => {
|
||||
RelevantType::Token(mac.into_tokens())
|
||||
}
|
||||
_ => RelevantType::Pass,
|
||||
@ -467,17 +469,17 @@ mod codegen {
|
||||
match kind {
|
||||
Visit => format!(
|
||||
"_visitor.visit_{under_name}({name})",
|
||||
under_name = under_name(item.ast.ident),
|
||||
under_name = under_name(item.ast.ident.clone()),
|
||||
name = name.ref_tokens(),
|
||||
),
|
||||
VisitMut => format!(
|
||||
"_visitor.visit_{under_name}_mut({name})",
|
||||
under_name = under_name(item.ast.ident),
|
||||
under_name = under_name(item.ast.ident.clone()),
|
||||
name = name.ref_mut_tokens(),
|
||||
),
|
||||
Fold => format!(
|
||||
"_visitor.fold_{under_name}({name})",
|
||||
under_name = under_name(item.ast.ident),
|
||||
under_name = under_name(item.ast.ident.clone()),
|
||||
name = name.owned_tokens(),
|
||||
),
|
||||
}
|
||||
@ -695,7 +697,7 @@ mod codegen {
|
||||
}
|
||||
|
||||
pub fn generate(state: &mut State, lookup: &Lookup, s: &AstItem) {
|
||||
let under_name = under_name(s.ast.ident);
|
||||
let under_name = under_name(s.ast.ident.clone());
|
||||
|
||||
state.visit_trait.push_str(&format!(
|
||||
"{features}\n\
|
||||
@ -783,7 +785,7 @@ mod codegen {
|
||||
state.fold_impl.push_str(", ");
|
||||
|
||||
let mut tokens = quote!();
|
||||
Ident::from(name).to_tokens(&mut tokens);
|
||||
Ident::new(&name, Span::call_site()).to_tokens(&mut tokens);
|
||||
|
||||
(el, tokens)
|
||||
})
|
||||
@ -872,7 +874,7 @@ mod codegen {
|
||||
fields.named
|
||||
.iter()
|
||||
.map(|el| {
|
||||
let id = el.ident;
|
||||
let id = el.ident.clone();
|
||||
(el, quote!(_i.#id))
|
||||
})
|
||||
.collect()
|
||||
@ -962,10 +964,10 @@ fn main() {
|
||||
for &tt in TERMINAL_TYPES {
|
||||
use syn::*;
|
||||
lookup.insert(
|
||||
Ident::from(tt),
|
||||
Ident::new(&tt, Span::call_site()),
|
||||
AstItem {
|
||||
ast: DeriveInput {
|
||||
ident: Ident::from(tt),
|
||||
ident: Ident::new(tt, Span::call_site()),
|
||||
vis: Visibility::Public(VisPublic {
|
||||
pub_token: Default::default(),
|
||||
}),
|
||||
@ -1015,7 +1017,7 @@ macro_rules! full {
|
||||
use *;
|
||||
#[cfg(any(feature = \"full\", feature = \"derive\"))]
|
||||
use token::{{Brace, Bracket, Paren, Group}};
|
||||
use proc_macro2::Span;
|
||||
use proc_macro2::{{Span, Ident}};
|
||||
#[cfg(any(feature = \"full\", feature = \"derive\"))]
|
||||
use gen::helper::fold::*;
|
||||
|
||||
|
46
src/attr.rs
46
src/attr.rs
@ -11,7 +11,7 @@ use punctuated::Punctuated;
|
||||
|
||||
use std::iter;
|
||||
|
||||
use proc_macro2::{Delimiter, Spacing, TokenStream, TokenTree};
|
||||
use proc_macro2::{Delimiter, Spacing, TokenStream, TokenTree, Ident};
|
||||
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::hash::{Hash, Hasher};
|
||||
@ -107,19 +107,19 @@ impl Attribute {
|
||||
};
|
||||
|
||||
if self.tts.is_empty() {
|
||||
return Some(Meta::Word(*name));
|
||||
return Some(Meta::Word(name.clone()));
|
||||
}
|
||||
|
||||
let tts = self.tts.clone().into_iter().collect::<Vec<_>>();
|
||||
|
||||
if tts.len() == 1 {
|
||||
if let Some(meta) = Attribute::extract_meta_list(*name, &tts[0]) {
|
||||
if let Some(meta) = Attribute::extract_meta_list(name.clone(), &tts[0]) {
|
||||
return Some(meta);
|
||||
}
|
||||
}
|
||||
|
||||
if tts.len() == 2 {
|
||||
if let Some(meta) = Attribute::extract_name_value(*name, &tts[0], &tts[1]) {
|
||||
if let Some(meta) = Attribute::extract_name_value(name.clone(), &tts[0], &tts[1]) {
|
||||
return Some(meta);
|
||||
}
|
||||
}
|
||||
@ -149,13 +149,13 @@ impl Attribute {
|
||||
|
||||
fn extract_name_value(ident: Ident, a: &TokenTree, b: &TokenTree) -> Option<Meta> {
|
||||
let a = match *a {
|
||||
TokenTree::Op(ref o) => o,
|
||||
TokenTree::Punct(ref o) => o,
|
||||
_ => return None,
|
||||
};
|
||||
if a.spacing() != Spacing::Alone {
|
||||
return None;
|
||||
}
|
||||
if a.op() != '=' {
|
||||
if a.as_char() != '=' {
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -167,7 +167,7 @@ impl Attribute {
|
||||
lit: Lit::new(l.clone()),
|
||||
}))
|
||||
}
|
||||
TokenTree::Term(ref term) => match term.as_str() {
|
||||
TokenTree::Ident(ref term) => match &term.to_string()[..] {
|
||||
v @ "true" | v @ "false" => Some(Meta::NameValue(MetaNameValue {
|
||||
ident: ident,
|
||||
eq_token: Token![=]([a.span()]),
|
||||
@ -196,21 +196,20 @@ fn nested_meta_item_from_tokens(tts: &[TokenTree]) -> Option<(NestedMeta, &[Toke
|
||||
}
|
||||
}
|
||||
|
||||
TokenTree::Term(sym) => {
|
||||
let ident = Ident::new(sym.as_str(), sym.span());
|
||||
TokenTree::Ident(ref ident) => {
|
||||
if tts.len() >= 3 {
|
||||
if let Some(meta) = Attribute::extract_name_value(ident, &tts[1], &tts[2]) {
|
||||
if let Some(meta) = Attribute::extract_name_value(ident.clone(), &tts[1], &tts[2]) {
|
||||
return Some((NestedMeta::Meta(meta), &tts[3..]));
|
||||
}
|
||||
}
|
||||
|
||||
if tts.len() >= 2 {
|
||||
if let Some(meta) = Attribute::extract_meta_list(ident, &tts[1]) {
|
||||
if let Some(meta) = Attribute::extract_meta_list(ident.clone(), &tts[1]) {
|
||||
return Some((NestedMeta::Meta(meta), &tts[2..]));
|
||||
}
|
||||
}
|
||||
|
||||
Some((Meta::Word(ident).into(), &tts[1..]))
|
||||
Some((Meta::Word(ident.clone()).into(), &tts[1..]))
|
||||
}
|
||||
|
||||
_ => None,
|
||||
@ -227,11 +226,11 @@ fn list_of_nested_meta_items_from_tokens(
|
||||
let prev_comma = if first {
|
||||
first = false;
|
||||
None
|
||||
} else if let TokenTree::Op(ref op) = tts[0] {
|
||||
} else if let TokenTree::Punct(ref op) = tts[0] {
|
||||
if op.spacing() != Spacing::Alone {
|
||||
return None;
|
||||
}
|
||||
if op.op() != ',' {
|
||||
if op.as_char() != ',' {
|
||||
return None;
|
||||
}
|
||||
let tok = Token![,]([op.span()]);
|
||||
@ -336,9 +335,9 @@ impl Meta {
|
||||
/// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
|
||||
pub fn name(&self) -> Ident {
|
||||
match *self {
|
||||
Meta::Word(ref meta) => *meta,
|
||||
Meta::List(ref meta) => meta.ident,
|
||||
Meta::NameValue(ref meta) => meta.ident,
|
||||
Meta::Word(ref meta) => meta.clone(),
|
||||
Meta::List(ref meta) => meta.ident.clone(),
|
||||
Meta::NameValue(ref meta) => meta.ident.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -397,11 +396,11 @@ pub mod parsing {
|
||||
use super::*;
|
||||
use buffer::Cursor;
|
||||
use parse_error;
|
||||
use proc_macro2::{Literal, Op, Spacing, Span, TokenTree};
|
||||
use proc_macro2::{Literal, Punct, Spacing, Span, TokenTree};
|
||||
use synom::PResult;
|
||||
|
||||
fn eq(span: Span) -> TokenTree {
|
||||
let mut op = Op::new('=', Spacing::Alone);
|
||||
let mut op = Punct::new('=', Spacing::Alone);
|
||||
op.set_span(span);
|
||||
op.into()
|
||||
}
|
||||
@ -518,10 +517,11 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
|
||||
impl ToTokens for Attribute {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pound_token.to_tokens(tokens);
|
||||
if let AttrStyle::Inner(ref b) = self.style {
|
||||
b.to_tokens(tokens);
|
||||
@ -534,7 +534,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for MetaList {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.nested.to_tokens(tokens);
|
||||
@ -543,7 +543,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for MetaNameValue {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
self.lit.to_tokens(tokens);
|
||||
|
@ -129,8 +129,8 @@
|
||||
|
||||
#[cfg(feature = "proc-macro")]
|
||||
use proc_macro as pm;
|
||||
use proc_macro2::{Delimiter, Literal, Span, Term, TokenStream};
|
||||
use proc_macro2::{Group, Op, TokenTree};
|
||||
use proc_macro2::{Delimiter, Literal, Span, Ident, TokenStream};
|
||||
use proc_macro2::{Group, Punct, TokenTree};
|
||||
|
||||
use std::marker::PhantomData;
|
||||
use std::ptr;
|
||||
@ -143,8 +143,8 @@ use std::fmt::{self, Debug};
|
||||
enum Entry {
|
||||
// Mimicking types from proc-macro.
|
||||
Group(Span, Delimiter, TokenBuffer),
|
||||
Term(Term),
|
||||
Op(Op),
|
||||
Ident(Ident),
|
||||
Punct(Punct),
|
||||
Literal(Literal),
|
||||
// End entries contain a raw pointer to the entry from the containing
|
||||
// token tree, or null if this is the outermost level.
|
||||
@ -177,11 +177,11 @@ impl TokenBuffer {
|
||||
let mut seqs = Vec::new();
|
||||
for tt in stream {
|
||||
match tt {
|
||||
TokenTree::Term(sym) => {
|
||||
entries.push(Entry::Term(sym));
|
||||
TokenTree::Ident(sym) => {
|
||||
entries.push(Entry::Ident(sym));
|
||||
}
|
||||
TokenTree::Op(op) => {
|
||||
entries.push(Entry::Op(op));
|
||||
TokenTree::Punct(op) => {
|
||||
entries.push(Entry::Punct(op));
|
||||
}
|
||||
TokenTree::Literal(l) => {
|
||||
entries.push(Entry::Literal(l));
|
||||
@ -275,8 +275,8 @@ impl<'a> Cursor<'a> {
|
||||
pub fn empty() -> Self {
|
||||
// It's safe in this situation for us to put an `Entry` object in global
|
||||
// storage, despite it not actually being safe to send across threads
|
||||
// (`Term` is a reference into a thread-local table). This is because
|
||||
// this entry never includes a `Term` object.
|
||||
// (`Ident` is a reference into a thread-local table). This is because
|
||||
// this entry never includes a `Ident` object.
|
||||
//
|
||||
// This wrapper struct allows us to break the rules and put a `Sync`
|
||||
// object in global storage.
|
||||
@ -368,22 +368,22 @@ impl<'a> Cursor<'a> {
|
||||
None
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at a `Term`, returns it along with a cursor
|
||||
/// If the cursor is pointing at a `Ident`, returns it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn term(mut self) -> Option<(Term, Cursor<'a>)> {
|
||||
pub fn term(mut self) -> Option<(Ident, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match *self.entry() {
|
||||
Entry::Term(term) => Some((term, unsafe { self.bump() })),
|
||||
Entry::Ident(ref term) => Some((term.clone(), unsafe { self.bump() })),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// If the cursor is pointing at an `Op`, returns it along with a cursor
|
||||
/// If the cursor is pointing at an `Punct`, returns it along with a cursor
|
||||
/// pointing at the next `TokenTree`.
|
||||
pub fn op(mut self) -> Option<(Op, Cursor<'a>)> {
|
||||
pub fn op(mut self) -> Option<(Punct, Cursor<'a>)> {
|
||||
self.ignore_none();
|
||||
match *self.entry() {
|
||||
Entry::Op(op) => Some((op, unsafe { self.bump() })),
|
||||
Entry::Punct(ref op) => Some((op.clone(), unsafe { self.bump() })),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -426,8 +426,8 @@ impl<'a> Cursor<'a> {
|
||||
TokenTree::from(g)
|
||||
}
|
||||
Entry::Literal(ref lit) => lit.clone().into(),
|
||||
Entry::Term(term) => term.into(),
|
||||
Entry::Op(op) => op.into(),
|
||||
Entry::Ident(ref term) => term.clone().into(),
|
||||
Entry::Punct(ref op) => op.clone().into(),
|
||||
Entry::End(..) => {
|
||||
return None;
|
||||
}
|
||||
@ -442,8 +442,8 @@ impl<'a> Cursor<'a> {
|
||||
match *self.entry() {
|
||||
Entry::Group(span, ..) => span,
|
||||
Entry::Literal(ref l) => l.span(),
|
||||
Entry::Term(t) => t.span(),
|
||||
Entry::Op(o) => o.span(),
|
||||
Entry::Ident(ref t) => t.span(),
|
||||
Entry::Punct(ref o) => o.span(),
|
||||
Entry::End(..) => Span::call_site(),
|
||||
}
|
||||
}
|
||||
|
19
src/data.rs
19
src/data.rs
@ -6,6 +6,8 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use proc_macro2::Ident;
|
||||
|
||||
use super::*;
|
||||
use punctuated::Punctuated;
|
||||
|
||||
@ -322,10 +324,11 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
impl ToTokens for Variant {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
self.ident.to_tokens(tokens);
|
||||
self.fields.to_tokens(tokens);
|
||||
@ -337,7 +340,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for FieldsNamed {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
self.named.to_tokens(tokens);
|
||||
});
|
||||
@ -345,7 +348,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for FieldsUnnamed {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.unnamed.to_tokens(tokens);
|
||||
});
|
||||
@ -353,7 +356,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for Field {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
self.vis.to_tokens(tokens);
|
||||
if let Some(ref ident) = self.ident {
|
||||
@ -365,19 +368,19 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for VisPublic {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pub_token.to_tokens(tokens)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for VisCrate {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.crate_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for VisRestricted {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pub_token.to_tokens(tokens);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
// XXX: If we have a path which is not "self" or "super" or
|
||||
|
@ -7,6 +7,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use super::*;
|
||||
use proc_macro2::Ident;
|
||||
use punctuated::Punctuated;
|
||||
|
||||
ast_struct! {
|
||||
@ -163,10 +164,11 @@ pub mod parsing {
|
||||
mod printing {
|
||||
use super::*;
|
||||
use attr::FilterAttrs;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::ToTokens;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl ToTokens for DeriveInput {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
for attr in self.attrs.outer() {
|
||||
attr.to_tokens(tokens);
|
||||
}
|
||||
|
148
src/expr.rs
148
src/expr.rs
@ -7,7 +7,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use super::*;
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use proc_macro2::{Span, TokenStream, Ident};
|
||||
use punctuated::Punctuated;
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::hash::{Hash, Hasher};
|
||||
@ -2178,7 +2178,7 @@ pub mod parsing {
|
||||
tuple!(syn!(Member), map!(punct!(:), Some), syn!(Expr))
|
||||
|
|
||||
map!(syn!(Ident), |name| (
|
||||
Member::Named(name),
|
||||
Member::Named(name.clone()),
|
||||
None,
|
||||
Expr::Path(ExprPath {
|
||||
attrs: Vec::new(),
|
||||
@ -2583,7 +2583,7 @@ pub mod parsing {
|
||||
let mut pat: Pat = PatIdent {
|
||||
by_ref: by_ref,
|
||||
mutability: mutability,
|
||||
ident: ident,
|
||||
ident: ident.clone(),
|
||||
subpat: None,
|
||||
}.into();
|
||||
if let Some(boxed) = boxed {
|
||||
@ -2821,13 +2821,13 @@ mod printing {
|
||||
use super::*;
|
||||
#[cfg(feature = "full")]
|
||||
use attr::FilterAttrs;
|
||||
use proc_macro2::Literal;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use proc_macro2::{Literal, TokenStream};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
|
||||
// If the given expression is a bare `ExprStruct`, wraps it in parenthesis
|
||||
// before appending it to `Tokens`.
|
||||
#[cfg(feature = "full")]
|
||||
fn wrap_bare_struct(tokens: &mut Tokens, e: &Expr) {
|
||||
fn wrap_bare_struct(tokens: &mut TokenStream, e: &Expr) {
|
||||
if let Expr::Struct(_) = *e {
|
||||
token::Paren::default().surround(tokens, |tokens| {
|
||||
e.to_tokens(tokens);
|
||||
@ -2838,16 +2838,16 @@ mod printing {
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
fn attrs_to_tokens(attrs: &[Attribute], tokens: &mut Tokens) {
|
||||
fn attrs_to_tokens(attrs: &[Attribute], tokens: &mut TokenStream) {
|
||||
tokens.append_all(attrs.outer());
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
fn attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut Tokens) {}
|
||||
fn attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut TokenStream) {}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprBox {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.box_token.to_tokens(tokens);
|
||||
self.expr.to_tokens(tokens);
|
||||
@ -2856,7 +2856,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprInPlace {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.place.to_tokens(tokens);
|
||||
self.arrow_token.to_tokens(tokens);
|
||||
@ -2866,7 +2866,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprArray {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.elems.to_tokens(tokens);
|
||||
@ -2875,7 +2875,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ExprCall {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
self.func.to_tokens(tokens);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
@ -2886,7 +2886,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprMethodCall {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.receiver.to_tokens(tokens);
|
||||
self.dot_token.to_tokens(tokens);
|
||||
@ -2900,7 +2900,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for MethodTurbofish {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.colon2_token.to_tokens(tokens);
|
||||
self.lt_token.to_tokens(tokens);
|
||||
self.args.to_tokens(tokens);
|
||||
@ -2910,7 +2910,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for GenericMethodArgument {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
GenericMethodArgument::Type(ref t) => t.to_tokens(tokens),
|
||||
GenericMethodArgument::Const(ref c) => c.to_tokens(tokens),
|
||||
@ -2920,7 +2920,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprTuple {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.elems.to_tokens(tokens);
|
||||
@ -2934,7 +2934,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ExprBinary {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
self.left.to_tokens(tokens);
|
||||
self.op.to_tokens(tokens);
|
||||
@ -2943,7 +2943,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ExprUnary {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
self.op.to_tokens(tokens);
|
||||
self.expr.to_tokens(tokens);
|
||||
@ -2951,14 +2951,14 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ExprLit {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
self.lit.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ExprCast {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
self.expr.to_tokens(tokens);
|
||||
self.as_token.to_tokens(tokens);
|
||||
@ -2968,7 +2968,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprType {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
self.expr.to_tokens(tokens);
|
||||
self.colon_token.to_tokens(tokens);
|
||||
@ -2977,7 +2977,7 @@ mod printing {
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
fn maybe_wrap_else(tokens: &mut Tokens, else_: &Option<(Token![else], Box<Expr>)>) {
|
||||
fn maybe_wrap_else(tokens: &mut TokenStream, else_: &Option<(Token![else], Box<Expr>)>) {
|
||||
if let Some((ref else_token, ref else_)) = *else_ {
|
||||
else_token.to_tokens(tokens);
|
||||
|
||||
@ -2998,7 +2998,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprIf {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.if_token.to_tokens(tokens);
|
||||
wrap_bare_struct(tokens, &self.cond);
|
||||
@ -3009,7 +3009,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprIfLet {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.if_token.to_tokens(tokens);
|
||||
self.let_token.to_tokens(tokens);
|
||||
@ -3023,7 +3023,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprWhile {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.label.to_tokens(tokens);
|
||||
self.while_token.to_tokens(tokens);
|
||||
@ -3034,7 +3034,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprWhileLet {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.label.to_tokens(tokens);
|
||||
self.while_token.to_tokens(tokens);
|
||||
@ -3048,7 +3048,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprForLoop {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.label.to_tokens(tokens);
|
||||
self.for_token.to_tokens(tokens);
|
||||
@ -3061,7 +3061,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprLoop {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.label.to_tokens(tokens);
|
||||
self.loop_token.to_tokens(tokens);
|
||||
@ -3071,7 +3071,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprMatch {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.match_token.to_tokens(tokens);
|
||||
wrap_bare_struct(tokens, &self.expr);
|
||||
@ -3091,7 +3091,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprCatch {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.do_token.to_tokens(tokens);
|
||||
self.catch_token.to_tokens(tokens);
|
||||
@ -3101,7 +3101,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprYield {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.yield_token.to_tokens(tokens);
|
||||
self.expr.to_tokens(tokens);
|
||||
@ -3110,7 +3110,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprClosure {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.movability.to_tokens(tokens);
|
||||
self.capture.to_tokens(tokens);
|
||||
@ -3136,7 +3136,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprUnsafe {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.unsafe_token.to_tokens(tokens);
|
||||
self.block.to_tokens(tokens);
|
||||
@ -3145,7 +3145,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprBlock {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.block.to_tokens(tokens);
|
||||
}
|
||||
@ -3153,7 +3153,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprAssign {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.left.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
@ -3163,7 +3163,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprAssignOp {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.left.to_tokens(tokens);
|
||||
self.op.to_tokens(tokens);
|
||||
@ -3173,7 +3173,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprField {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.base.to_tokens(tokens);
|
||||
self.dot_token.to_tokens(tokens);
|
||||
@ -3182,16 +3182,16 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for Member {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
Member::Named(ident) => ident.to_tokens(tokens),
|
||||
Member::Named(ref ident) => ident.to_tokens(tokens),
|
||||
Member::Unnamed(ref index) => index.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Index {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let mut lit = Literal::i64_unsuffixed(i64::from(self.index));
|
||||
lit.set_span(self.span);
|
||||
tokens.append(lit);
|
||||
@ -3199,7 +3199,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ExprIndex {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
self.expr.to_tokens(tokens);
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
@ -3210,7 +3210,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprRange {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.from.to_tokens(tokens);
|
||||
match self.limits {
|
||||
@ -3222,7 +3222,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ExprPath {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
::PathTokens(&self.qself, &self.path).to_tokens(tokens)
|
||||
}
|
||||
@ -3230,7 +3230,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprReference {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.and_token.to_tokens(tokens);
|
||||
self.mutability.to_tokens(tokens);
|
||||
@ -3240,7 +3240,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprBreak {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.break_token.to_tokens(tokens);
|
||||
self.label.to_tokens(tokens);
|
||||
@ -3250,7 +3250,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprContinue {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.continue_token.to_tokens(tokens);
|
||||
self.label.to_tokens(tokens);
|
||||
@ -3259,7 +3259,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprReturn {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.return_token.to_tokens(tokens);
|
||||
self.expr.to_tokens(tokens);
|
||||
@ -3268,7 +3268,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprMacro {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.mac.to_tokens(tokens);
|
||||
}
|
||||
@ -3276,7 +3276,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprStruct {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.path.to_tokens(tokens);
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
@ -3291,7 +3291,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprRepeat {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.expr.to_tokens(tokens);
|
||||
@ -3303,7 +3303,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprGroup {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
self.group_token.surround(tokens, |tokens| {
|
||||
self.expr.to_tokens(tokens);
|
||||
@ -3312,7 +3312,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ExprParen {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
attrs_to_tokens(&self.attrs, tokens);
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.expr.to_tokens(tokens);
|
||||
@ -3322,7 +3322,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for ExprTry {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.expr.to_tokens(tokens);
|
||||
self.question_token.to_tokens(tokens);
|
||||
@ -3330,14 +3330,14 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ExprVerbatim {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.tts.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for Label {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.name.to_tokens(tokens);
|
||||
self.colon_token.to_tokens(tokens);
|
||||
}
|
||||
@ -3345,7 +3345,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for FieldValue {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.member.to_tokens(tokens);
|
||||
if let Some(ref colon_token) = self.colon_token {
|
||||
@ -3357,7 +3357,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for Arm {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(&self.attrs);
|
||||
self.leading_vert.to_tokens(tokens);
|
||||
self.pats.to_tokens(tokens);
|
||||
@ -3373,14 +3373,14 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatWild {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.underscore_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatIdent {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.by_ref.to_tokens(tokens);
|
||||
self.mutability.to_tokens(tokens);
|
||||
self.ident.to_tokens(tokens);
|
||||
@ -3393,7 +3393,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatStruct {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
self.fields.to_tokens(tokens);
|
||||
@ -3408,7 +3408,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatTupleStruct {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.pat.to_tokens(tokens);
|
||||
}
|
||||
@ -3416,14 +3416,14 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatPath {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
::PathTokens(&self.qself, &self.path).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatTuple {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.front.to_tokens(tokens);
|
||||
if let Some(ref dot2_token) = self.dot2_token {
|
||||
@ -3445,7 +3445,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatBox {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.box_token.to_tokens(tokens);
|
||||
self.pat.to_tokens(tokens);
|
||||
}
|
||||
@ -3453,7 +3453,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatRef {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.and_token.to_tokens(tokens);
|
||||
self.mutability.to_tokens(tokens);
|
||||
self.pat.to_tokens(tokens);
|
||||
@ -3462,14 +3462,14 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatLit {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.expr.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatRange {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.lo.to_tokens(tokens);
|
||||
match self.limits {
|
||||
RangeLimits::HalfOpen(ref t) => t.to_tokens(tokens),
|
||||
@ -3481,7 +3481,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatSlice {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
// XXX: This is a mess, and it will be so easy to screw it up. How
|
||||
// do we make this correct itself better?
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
@ -3516,21 +3516,21 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatMacro {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.mac.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for PatVerbatim {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.tts.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for FieldPat {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if let Some(ref colon_token) = self.colon_token {
|
||||
self.member.to_tokens(tokens);
|
||||
colon_token.to_tokens(tokens);
|
||||
@ -3541,7 +3541,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for Block {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
tokens.append_all(&self.stmts);
|
||||
});
|
||||
@ -3550,7 +3550,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for Stmt {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
Stmt::Local(ref local) => local.to_tokens(tokens),
|
||||
Stmt::Item(ref item) => item.to_tokens(tokens),
|
||||
@ -3565,7 +3565,7 @@ mod printing {
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
impl ToTokens for Local {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.let_token.to_tokens(tokens);
|
||||
self.pats.to_tokens(tokens);
|
||||
|
@ -111,10 +111,11 @@ pub mod parsing {
|
||||
mod printing {
|
||||
use super::*;
|
||||
use attr::FilterAttrs;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl ToTokens for File {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.inner());
|
||||
tokens.append_all(&self.items);
|
||||
}
|
||||
|
@ -9,7 +9,7 @@
|
||||
use *;
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
use token::{Brace, Bracket, Paren, Group};
|
||||
use proc_macro2::Span;
|
||||
use proc_macro2::{Span, Ident};
|
||||
#[cfg(any(feature = "full", feature = "derive"))]
|
||||
use gen::helper::fold::*;
|
||||
|
||||
@ -191,8 +191,6 @@ fn fold_generic_method_argument(&mut self, i: GenericMethodArgument) -> GenericM
|
||||
fn fold_generic_param(&mut self, i: GenericParam) -> GenericParam { fold_generic_param(self, i) }
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
fn fold_generics(&mut self, i: Generics) -> Generics { fold_generics(self, i) }
|
||||
|
||||
fn fold_ident(&mut self, i: Ident) -> Ident { fold_ident(self, i) }
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
fn fold_impl_item(&mut self, i: ImplItem) -> ImplItem { fold_impl_item(self, i) }
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
@ -538,7 +536,7 @@ pub fn fold_bare_fn_arg_name<V: Fold + ?Sized>(_visitor: &mut V, _i: BareFnArgNa
|
||||
match _i {
|
||||
BareFnArgName::Named(_binding_0, ) => {
|
||||
BareFnArgName::Named (
|
||||
_visitor.fold_ident(_binding_0),
|
||||
_binding_0,
|
||||
)
|
||||
}
|
||||
BareFnArgName::Wild(_binding_0, ) => {
|
||||
@ -696,7 +694,7 @@ pub fn fold_bin_op<V: Fold + ?Sized>(_visitor: &mut V, _i: BinOp) -> BinOp {
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn fold_binding<V: Fold + ?Sized>(_visitor: &mut V, _i: Binding) -> Binding {
|
||||
Binding {
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
eq_token: Token ! [ = ](tokens_helper(_visitor, &(_i . eq_token).0)),
|
||||
ty: _visitor.fold_type(_i . ty),
|
||||
}
|
||||
@ -722,7 +720,7 @@ pub fn fold_const_param<V: Fold + ?Sized>(_visitor: &mut V, _i: ConstParam) -> C
|
||||
ConstParam {
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
const_token: Token ! [ const ](tokens_helper(_visitor, &(_i . const_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
colon_token: Token ! [ : ](tokens_helper(_visitor, &(_i . colon_token).0)),
|
||||
ty: _visitor.fold_type(_i . ty),
|
||||
eq_token: (_i . eq_token).map(|it| { Token ! [ = ](tokens_helper(_visitor, &(it).0)) }),
|
||||
@ -777,7 +775,7 @@ pub fn fold_derive_input<V: Fold + ?Sized>(_visitor: &mut V, _i: DeriveInput) ->
|
||||
DeriveInput {
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
generics: _visitor.fold_generics(_i . generics),
|
||||
data: _visitor.fold_data(_i . data),
|
||||
}
|
||||
@ -1209,7 +1207,7 @@ pub fn fold_expr_method_call<V: Fold + ?Sized>(_visitor: &mut V, _i: ExprMethodC
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
receiver: Box::new(_visitor.fold_expr(* _i . receiver)),
|
||||
dot_token: Token ! [ . ](tokens_helper(_visitor, &(_i . dot_token).0)),
|
||||
method: _visitor.fold_ident(_i . method),
|
||||
method: _i . method,
|
||||
turbofish: (_i . turbofish).map(|it| { _visitor.fold_method_turbofish(it) }),
|
||||
paren_token: Paren(tokens_helper(_visitor, &(_i . paren_token).0)),
|
||||
args: FoldHelper::lift(_i . args, |it| { _visitor.fold_expr(it) }),
|
||||
@ -1361,7 +1359,7 @@ pub fn fold_field<V: Fold + ?Sized>(_visitor: &mut V, _i: Field) -> Field {
|
||||
Field {
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
ident: (_i . ident).map(|it| { _visitor.fold_ident(it) }),
|
||||
ident: _i . ident,
|
||||
colon_token: (_i . colon_token).map(|it| { Token ! [ : ](tokens_helper(_visitor, &(it).0)) }),
|
||||
ty: _visitor.fold_type(_i . ty),
|
||||
}
|
||||
@ -1493,7 +1491,7 @@ pub fn fold_foreign_item_fn<V: Fold + ?Sized>(_visitor: &mut V, _i: ForeignItemF
|
||||
ForeignItemFn {
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
decl: Box::new(_visitor.fold_fn_decl(* _i . decl)),
|
||||
semi_token: Token ! [ ; ](tokens_helper(_visitor, &(_i . semi_token).0)),
|
||||
}
|
||||
@ -1505,7 +1503,7 @@ pub fn fold_foreign_item_static<V: Fold + ?Sized>(_visitor: &mut V, _i: ForeignI
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
static_token: Token ! [ static ](tokens_helper(_visitor, &(_i . static_token).0)),
|
||||
mutability: (_i . mutability).map(|it| { Token ! [ mut ](tokens_helper(_visitor, &(it).0)) }),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
colon_token: Token ! [ : ](tokens_helper(_visitor, &(_i . colon_token).0)),
|
||||
ty: Box::new(_visitor.fold_type(* _i . ty)),
|
||||
semi_token: Token ! [ ; ](tokens_helper(_visitor, &(_i . semi_token).0)),
|
||||
@ -1517,7 +1515,7 @@ pub fn fold_foreign_item_type<V: Fold + ?Sized>(_visitor: &mut V, _i: ForeignIte
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
type_token: Token ! [ type ](tokens_helper(_visitor, &(_i . type_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
semi_token: Token ! [ ; ](tokens_helper(_visitor, &(_i . semi_token).0)),
|
||||
}
|
||||
}
|
||||
@ -1633,7 +1631,7 @@ pub fn fold_impl_item_const<V: Fold + ?Sized>(_visitor: &mut V, _i: ImplItemCons
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
defaultness: (_i . defaultness).map(|it| { Token ! [ default ](tokens_helper(_visitor, &(it).0)) }),
|
||||
const_token: Token ! [ const ](tokens_helper(_visitor, &(_i . const_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
colon_token: Token ! [ : ](tokens_helper(_visitor, &(_i . colon_token).0)),
|
||||
ty: _visitor.fold_type(_i . ty),
|
||||
eq_token: Token ! [ = ](tokens_helper(_visitor, &(_i . eq_token).0)),
|
||||
@ -1666,7 +1664,7 @@ pub fn fold_impl_item_type<V: Fold + ?Sized>(_visitor: &mut V, _i: ImplItemType)
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
defaultness: (_i . defaultness).map(|it| { Token ! [ default ](tokens_helper(_visitor, &(it).0)) }),
|
||||
type_token: Token ! [ type ](tokens_helper(_visitor, &(_i . type_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
generics: _visitor.fold_generics(_i . generics),
|
||||
eq_token: Token ! [ = ](tokens_helper(_visitor, &(_i . eq_token).0)),
|
||||
ty: _visitor.fold_type(_i . ty),
|
||||
@ -1777,7 +1775,7 @@ pub fn fold_item_const<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemConst) -> Ite
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
const_token: Token ! [ const ](tokens_helper(_visitor, &(_i . const_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
colon_token: Token ! [ : ](tokens_helper(_visitor, &(_i . colon_token).0)),
|
||||
ty: Box::new(_visitor.fold_type(* _i . ty)),
|
||||
eq_token: Token ! [ = ](tokens_helper(_visitor, &(_i . eq_token).0)),
|
||||
@ -1791,7 +1789,7 @@ pub fn fold_item_enum<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemEnum) -> ItemE
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
enum_token: Token ! [ enum ](tokens_helper(_visitor, &(_i . enum_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
generics: _visitor.fold_generics(_i . generics),
|
||||
brace_token: Brace(tokens_helper(_visitor, &(_i . brace_token).0)),
|
||||
variants: FoldHelper::lift(_i . variants, |it| { _visitor.fold_variant(it) }),
|
||||
@ -1804,10 +1802,10 @@ pub fn fold_item_extern_crate<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemExtern
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
extern_token: Token ! [ extern ](tokens_helper(_visitor, &(_i . extern_token).0)),
|
||||
crate_token: Token ! [ crate ](tokens_helper(_visitor, &(_i . crate_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
rename: (_i . rename).map(|it| { (
|
||||
Token ! [ as ](tokens_helper(_visitor, &(( it ) . 0).0)),
|
||||
_visitor.fold_ident(( it ) . 1),
|
||||
( it ) . 1,
|
||||
) }),
|
||||
semi_token: Token ! [ ; ](tokens_helper(_visitor, &(_i . semi_token).0)),
|
||||
}
|
||||
@ -1820,7 +1818,7 @@ pub fn fold_item_fn<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemFn) -> ItemFn {
|
||||
constness: (_i . constness).map(|it| { Token ! [ const ](tokens_helper(_visitor, &(it).0)) }),
|
||||
unsafety: (_i . unsafety).map(|it| { Token ! [ unsafe ](tokens_helper(_visitor, &(it).0)) }),
|
||||
abi: (_i . abi).map(|it| { _visitor.fold_abi(it) }),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
decl: Box::new(_visitor.fold_fn_decl(* _i . decl)),
|
||||
block: Box::new(_visitor.fold_block(* _i . block)),
|
||||
}
|
||||
@ -1856,7 +1854,7 @@ pub fn fold_item_impl<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemImpl) -> ItemI
|
||||
pub fn fold_item_macro<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemMacro) -> ItemMacro {
|
||||
ItemMacro {
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
ident: (_i . ident).map(|it| { _visitor.fold_ident(it) }),
|
||||
ident: _i . ident,
|
||||
mac: _visitor.fold_macro(_i . mac),
|
||||
semi_token: (_i . semi_token).map(|it| { Token ! [ ; ](tokens_helper(_visitor, &(it).0)) }),
|
||||
}
|
||||
@ -1867,7 +1865,7 @@ pub fn fold_item_macro2<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemMacro2) -> I
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
macro_token: Token ! [ macro ](tokens_helper(_visitor, &(_i . macro_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
paren_token: Paren(tokens_helper(_visitor, &(_i . paren_token).0)),
|
||||
args: _i . args,
|
||||
brace_token: Brace(tokens_helper(_visitor, &(_i . brace_token).0)),
|
||||
@ -1880,7 +1878,7 @@ pub fn fold_item_mod<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemMod) -> ItemMod
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
mod_token: Token ! [ mod ](tokens_helper(_visitor, &(_i . mod_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
content: (_i . content).map(|it| { (
|
||||
Brace(tokens_helper(_visitor, &(( it ) . 0).0)),
|
||||
FoldHelper::lift(( it ) . 1, |it| { _visitor.fold_item(it) }),
|
||||
@ -1895,7 +1893,7 @@ pub fn fold_item_static<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemStatic) -> I
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
static_token: Token ! [ static ](tokens_helper(_visitor, &(_i . static_token).0)),
|
||||
mutability: (_i . mutability).map(|it| { Token ! [ mut ](tokens_helper(_visitor, &(it).0)) }),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
colon_token: Token ! [ : ](tokens_helper(_visitor, &(_i . colon_token).0)),
|
||||
ty: Box::new(_visitor.fold_type(* _i . ty)),
|
||||
eq_token: Token ! [ = ](tokens_helper(_visitor, &(_i . eq_token).0)),
|
||||
@ -1909,7 +1907,7 @@ pub fn fold_item_struct<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemStruct) -> I
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
struct_token: Token ! [ struct ](tokens_helper(_visitor, &(_i . struct_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
generics: _visitor.fold_generics(_i . generics),
|
||||
fields: _visitor.fold_fields(_i . fields),
|
||||
semi_token: (_i . semi_token).map(|it| { Token ! [ ; ](tokens_helper(_visitor, &(it).0)) }),
|
||||
@ -1923,7 +1921,7 @@ pub fn fold_item_trait<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemTrait) -> Ite
|
||||
unsafety: (_i . unsafety).map(|it| { Token ! [ unsafe ](tokens_helper(_visitor, &(it).0)) }),
|
||||
auto_token: (_i . auto_token).map(|it| { Token ! [ auto ](tokens_helper(_visitor, &(it).0)) }),
|
||||
trait_token: Token ! [ trait ](tokens_helper(_visitor, &(_i . trait_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
generics: _visitor.fold_generics(_i . generics),
|
||||
colon_token: (_i . colon_token).map(|it| { Token ! [ : ](tokens_helper(_visitor, &(it).0)) }),
|
||||
supertraits: FoldHelper::lift(_i . supertraits, |it| { _visitor.fold_type_param_bound(it) }),
|
||||
@ -1937,7 +1935,7 @@ pub fn fold_item_type<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemType) -> ItemT
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
type_token: Token ! [ type ](tokens_helper(_visitor, &(_i . type_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
generics: _visitor.fold_generics(_i . generics),
|
||||
eq_token: Token ! [ = ](tokens_helper(_visitor, &(_i . eq_token).0)),
|
||||
ty: Box::new(_visitor.fold_type(* _i . ty)),
|
||||
@ -1950,7 +1948,7 @@ pub fn fold_item_union<V: Fold + ?Sized>(_visitor: &mut V, _i: ItemUnion) -> Ite
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
vis: _visitor.fold_visibility(_i . vis),
|
||||
union_token: Token ! [ union ](tokens_helper(_visitor, &(_i . union_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
generics: _visitor.fold_generics(_i . generics),
|
||||
fields: _visitor.fold_fields_named(_i . fields),
|
||||
}
|
||||
@ -2097,7 +2095,7 @@ pub fn fold_member<V: Fold + ?Sized>(_visitor: &mut V, _i: Member) -> Member {
|
||||
match _i {
|
||||
Member::Named(_binding_0, ) => {
|
||||
Member::Named (
|
||||
_visitor.fold_ident(_binding_0),
|
||||
_binding_0,
|
||||
)
|
||||
}
|
||||
Member::Unnamed(_binding_0, ) => {
|
||||
@ -2112,7 +2110,7 @@ pub fn fold_meta<V: Fold + ?Sized>(_visitor: &mut V, _i: Meta) -> Meta {
|
||||
match _i {
|
||||
Meta::Word(_binding_0, ) => {
|
||||
Meta::Word (
|
||||
_visitor.fold_ident(_binding_0),
|
||||
_binding_0,
|
||||
)
|
||||
}
|
||||
Meta::List(_binding_0, ) => {
|
||||
@ -2130,7 +2128,7 @@ pub fn fold_meta<V: Fold + ?Sized>(_visitor: &mut V, _i: Meta) -> Meta {
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn fold_meta_list<V: Fold + ?Sized>(_visitor: &mut V, _i: MetaList) -> MetaList {
|
||||
MetaList {
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
paren_token: Paren(tokens_helper(_visitor, &(_i . paren_token).0)),
|
||||
nested: FoldHelper::lift(_i . nested, |it| { _visitor.fold_nested_meta(it) }),
|
||||
}
|
||||
@ -2138,7 +2136,7 @@ pub fn fold_meta_list<V: Fold + ?Sized>(_visitor: &mut V, _i: MetaList) -> MetaL
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn fold_meta_name_value<V: Fold + ?Sized>(_visitor: &mut V, _i: MetaNameValue) -> MetaNameValue {
|
||||
MetaNameValue {
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
eq_token: Token ! [ = ](tokens_helper(_visitor, &(_i . eq_token).0)),
|
||||
lit: _visitor.fold_lit(_i . lit),
|
||||
}
|
||||
@ -2149,7 +2147,7 @@ pub fn fold_method_sig<V: Fold + ?Sized>(_visitor: &mut V, _i: MethodSig) -> Met
|
||||
constness: (_i . constness).map(|it| { Token ! [ const ](tokens_helper(_visitor, &(it).0)) }),
|
||||
unsafety: (_i . unsafety).map(|it| { Token ! [ unsafe ](tokens_helper(_visitor, &(it).0)) }),
|
||||
abi: (_i . abi).map(|it| { _visitor.fold_abi(it) }),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
decl: _visitor.fold_fn_decl(_i . decl),
|
||||
}
|
||||
}
|
||||
@ -2267,7 +2265,7 @@ pub fn fold_pat_ident<V: Fold + ?Sized>(_visitor: &mut V, _i: PatIdent) -> PatId
|
||||
PatIdent {
|
||||
by_ref: (_i . by_ref).map(|it| { Token ! [ ref ](tokens_helper(_visitor, &(it).0)) }),
|
||||
mutability: (_i . mutability).map(|it| { Token ! [ mut ](tokens_helper(_visitor, &(it).0)) }),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
subpat: (_i . subpat).map(|it| { (
|
||||
Token ! [ @ ](tokens_helper(_visitor, &(( it ) . 0).0)),
|
||||
Box::new(_visitor.fold_pat(* ( it ) . 1)),
|
||||
@ -2384,7 +2382,7 @@ pub fn fold_path_arguments<V: Fold + ?Sized>(_visitor: &mut V, _i: PathArguments
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn fold_path_segment<V: Fold + ?Sized>(_visitor: &mut V, _i: PathSegment) -> PathSegment {
|
||||
PathSegment {
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
arguments: _visitor.fold_path_arguments(_i . arguments),
|
||||
}
|
||||
}
|
||||
@ -2535,7 +2533,7 @@ pub fn fold_trait_item_const<V: Fold + ?Sized>(_visitor: &mut V, _i: TraitItemCo
|
||||
TraitItemConst {
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
const_token: Token ! [ const ](tokens_helper(_visitor, &(_i . const_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
colon_token: Token ! [ : ](tokens_helper(_visitor, &(_i . colon_token).0)),
|
||||
ty: _visitor.fold_type(_i . ty),
|
||||
default: (_i . default).map(|it| { (
|
||||
@ -2567,7 +2565,7 @@ pub fn fold_trait_item_type<V: Fold + ?Sized>(_visitor: &mut V, _i: TraitItemTyp
|
||||
TraitItemType {
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
type_token: Token ! [ type ](tokens_helper(_visitor, &(_i . type_token).0)),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
generics: _visitor.fold_generics(_i . generics),
|
||||
colon_token: (_i . colon_token).map(|it| { Token ! [ : ](tokens_helper(_visitor, &(it).0)) }),
|
||||
bounds: FoldHelper::lift(_i . bounds, |it| { _visitor.fold_type_param_bound(it) }),
|
||||
@ -2722,7 +2720,7 @@ pub fn fold_type_never<V: Fold + ?Sized>(_visitor: &mut V, _i: TypeNever) -> Typ
|
||||
pub fn fold_type_param<V: Fold + ?Sized>(_visitor: &mut V, _i: TypeParam) -> TypeParam {
|
||||
TypeParam {
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
colon_token: (_i . colon_token).map(|it| { Token ! [ : ](tokens_helper(_visitor, &(it).0)) }),
|
||||
bounds: FoldHelper::lift(_i . bounds, |it| { _visitor.fold_type_param_bound(it) }),
|
||||
eq_token: (_i . eq_token).map(|it| { Token ! [ = ](tokens_helper(_visitor, &(it).0)) }),
|
||||
@ -2839,13 +2837,13 @@ pub fn fold_use_group<V: Fold + ?Sized>(_visitor: &mut V, _i: UseGroup) -> UseGr
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn fold_use_name<V: Fold + ?Sized>(_visitor: &mut V, _i: UseName) -> UseName {
|
||||
UseName {
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
}
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn fold_use_path<V: Fold + ?Sized>(_visitor: &mut V, _i: UsePath) -> UsePath {
|
||||
UsePath {
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
colon2_token: Token ! [ :: ](tokens_helper(_visitor, &(_i . colon2_token).0)),
|
||||
tree: Box::new(_visitor.fold_use_tree(* _i . tree)),
|
||||
}
|
||||
@ -2853,9 +2851,9 @@ pub fn fold_use_path<V: Fold + ?Sized>(_visitor: &mut V, _i: UsePath) -> UsePath
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn fold_use_rename<V: Fold + ?Sized>(_visitor: &mut V, _i: UseRename) -> UseRename {
|
||||
UseRename {
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
as_token: Token ! [ as ](tokens_helper(_visitor, &(_i . as_token).0)),
|
||||
rename: _visitor.fold_ident(_i . rename),
|
||||
rename: _i . rename,
|
||||
}
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
@ -2892,7 +2890,7 @@ pub fn fold_use_tree<V: Fold + ?Sized>(_visitor: &mut V, _i: UseTree) -> UseTree
|
||||
pub fn fold_variant<V: Fold + ?Sized>(_visitor: &mut V, _i: Variant) -> Variant {
|
||||
Variant {
|
||||
attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
|
||||
ident: _visitor.fold_ident(_i . ident),
|
||||
ident: _i . ident,
|
||||
fields: _visitor.fold_fields(_i . fields),
|
||||
discriminant: (_i . discriminant).map(|it| { (
|
||||
Token ! [ = ](tokens_helper(_visitor, &(( it ) . 0).0)),
|
||||
|
@ -189,8 +189,6 @@ fn visit_generic_method_argument(&mut self, i: &'ast GenericMethodArgument) { vi
|
||||
fn visit_generic_param(&mut self, i: &'ast GenericParam) { visit_generic_param(self, i) }
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
fn visit_generics(&mut self, i: &'ast Generics) { visit_generics(self, i) }
|
||||
|
||||
fn visit_ident(&mut self, i: &'ast Ident) { visit_ident(self, i) }
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
fn visit_impl_item(&mut self, i: &'ast ImplItem) { visit_impl_item(self, i) }
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
@ -491,7 +489,7 @@ pub fn visit_bare_fn_arg<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'
|
||||
pub fn visit_bare_fn_arg_name<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast BareFnArgName) {
|
||||
match *_i {
|
||||
BareFnArgName::Named(ref _binding_0, ) => {
|
||||
_visitor.visit_ident(_binding_0);
|
||||
// Skipped field _binding_0;
|
||||
}
|
||||
BareFnArgName::Wild(ref _binding_0, ) => {
|
||||
tokens_helper(_visitor, &(_binding_0).0);
|
||||
@ -589,7 +587,7 @@ pub fn visit_bin_op<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast B
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_binding<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Binding) {
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . eq_token).0);
|
||||
_visitor.visit_type(& _i . ty);
|
||||
}
|
||||
@ -609,7 +607,7 @@ pub fn visit_bound_lifetimes<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i
|
||||
pub fn visit_const_param<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast ConstParam) {
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
tokens_helper(_visitor, &(& _i . const_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . colon_token).0);
|
||||
_visitor.visit_type(& _i . ty);
|
||||
if let Some(ref it) = _i . eq_token { tokens_helper(_visitor, &(it).0) };
|
||||
@ -650,7 +648,7 @@ pub fn visit_data_union<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'a
|
||||
pub fn visit_derive_input<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast DeriveInput) {
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics(& _i . generics);
|
||||
_visitor.visit_data(& _i . data);
|
||||
}
|
||||
@ -954,7 +952,7 @@ pub fn visit_expr_method_call<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_expr(& * _i . receiver);
|
||||
tokens_helper(_visitor, &(& _i . dot_token).0);
|
||||
_visitor.visit_ident(& _i . method);
|
||||
// Skipped field _i . method;
|
||||
if let Some(ref it) = _i . turbofish { _visitor.visit_method_turbofish(it) };
|
||||
tokens_helper(_visitor, &(& _i . paren_token).0);
|
||||
for el in Punctuated::pairs(& _i . args) { let it = el.value(); _visitor.visit_expr(it) };
|
||||
@ -1072,7 +1070,7 @@ pub fn visit_expr_yield<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'a
|
||||
pub fn visit_field<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Field) {
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
if let Some(ref it) = _i . ident { _visitor.visit_ident(it) };
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref it) = _i . colon_token { tokens_helper(_visitor, &(it).0) };
|
||||
_visitor.visit_type(& _i . ty);
|
||||
}
|
||||
@ -1168,7 +1166,7 @@ pub fn visit_foreign_item<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &
|
||||
pub fn visit_foreign_item_fn<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast ForeignItemFn) {
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_fn_decl(& * _i . decl);
|
||||
tokens_helper(_visitor, &(& _i . semi_token).0);
|
||||
}
|
||||
@ -1178,7 +1176,7 @@ pub fn visit_foreign_item_static<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . static_token).0);
|
||||
if let Some(ref it) = _i . mutability { tokens_helper(_visitor, &(it).0) };
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . colon_token).0);
|
||||
_visitor.visit_type(& * _i . ty);
|
||||
tokens_helper(_visitor, &(& _i . semi_token).0);
|
||||
@ -1188,7 +1186,7 @@ pub fn visit_foreign_item_type<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V,
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . type_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . semi_token).0);
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
@ -1244,10 +1242,6 @@ pub fn visit_generics<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast
|
||||
if let Some(ref it) = _i . gt_token { tokens_helper(_visitor, &(it).0) };
|
||||
if let Some(ref it) = _i . where_clause { _visitor.visit_where_clause(it) };
|
||||
}
|
||||
|
||||
pub fn visit_ident<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Ident) {
|
||||
// Skipped field _i . term;
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_impl_item<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast ImplItem) {
|
||||
match *_i {
|
||||
@ -1274,7 +1268,7 @@ pub fn visit_impl_item_const<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
if let Some(ref it) = _i . defaultness { tokens_helper(_visitor, &(it).0) };
|
||||
tokens_helper(_visitor, &(& _i . const_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . colon_token).0);
|
||||
_visitor.visit_type(& _i . ty);
|
||||
tokens_helper(_visitor, &(& _i . eq_token).0);
|
||||
@ -1301,7 +1295,7 @@ pub fn visit_impl_item_type<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i:
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
if let Some(ref it) = _i . defaultness { tokens_helper(_visitor, &(it).0) };
|
||||
tokens_helper(_visitor, &(& _i . type_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics(& _i . generics);
|
||||
tokens_helper(_visitor, &(& _i . eq_token).0);
|
||||
_visitor.visit_type(& _i . ty);
|
||||
@ -1374,7 +1368,7 @@ pub fn visit_item_const<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'a
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . const_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . colon_token).0);
|
||||
_visitor.visit_type(& * _i . ty);
|
||||
tokens_helper(_visitor, &(& _i . eq_token).0);
|
||||
@ -1386,7 +1380,7 @@ pub fn visit_item_enum<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'as
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . enum_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics(& _i . generics);
|
||||
tokens_helper(_visitor, &(& _i . brace_token).0);
|
||||
for el in Punctuated::pairs(& _i . variants) { let it = el.value(); _visitor.visit_variant(it) };
|
||||
@ -1397,10 +1391,10 @@ pub fn visit_item_extern_crate<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V,
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . extern_token).0);
|
||||
tokens_helper(_visitor, &(& _i . crate_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref it) = _i . rename {
|
||||
tokens_helper(_visitor, &(& ( it ) . 0).0);
|
||||
_visitor.visit_ident(& ( it ) . 1);
|
||||
// Skipped field ( it ) . 1;
|
||||
};
|
||||
tokens_helper(_visitor, &(& _i . semi_token).0);
|
||||
}
|
||||
@ -1411,7 +1405,7 @@ pub fn visit_item_fn<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast
|
||||
if let Some(ref it) = _i . constness { tokens_helper(_visitor, &(it).0) };
|
||||
if let Some(ref it) = _i . unsafety { tokens_helper(_visitor, &(it).0) };
|
||||
if let Some(ref it) = _i . abi { _visitor.visit_abi(it) };
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_fn_decl(& * _i . decl);
|
||||
_visitor.visit_block(& * _i . block);
|
||||
}
|
||||
@ -1441,7 +1435,7 @@ pub fn visit_item_impl<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'as
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_item_macro<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast ItemMacro) {
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
if let Some(ref it) = _i . ident { _visitor.visit_ident(it) };
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_macro(& _i . mac);
|
||||
if let Some(ref it) = _i . semi_token { tokens_helper(_visitor, &(it).0) };
|
||||
}
|
||||
@ -1450,7 +1444,7 @@ pub fn visit_item_macro2<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . macro_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . paren_token).0);
|
||||
// Skipped field _i . args;
|
||||
tokens_helper(_visitor, &(& _i . brace_token).0);
|
||||
@ -1461,7 +1455,7 @@ pub fn visit_item_mod<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . mod_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref it) = _i . content {
|
||||
tokens_helper(_visitor, &(& ( it ) . 0).0);
|
||||
for it in & ( it ) . 1 { _visitor.visit_item(it) };
|
||||
@ -1474,7 +1468,7 @@ pub fn visit_item_static<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . static_token).0);
|
||||
if let Some(ref it) = _i . mutability { tokens_helper(_visitor, &(it).0) };
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . colon_token).0);
|
||||
_visitor.visit_type(& * _i . ty);
|
||||
tokens_helper(_visitor, &(& _i . eq_token).0);
|
||||
@ -1486,7 +1480,7 @@ pub fn visit_item_struct<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . struct_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics(& _i . generics);
|
||||
_visitor.visit_fields(& _i . fields);
|
||||
if let Some(ref it) = _i . semi_token { tokens_helper(_visitor, &(it).0) };
|
||||
@ -1498,7 +1492,7 @@ pub fn visit_item_trait<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'a
|
||||
if let Some(ref it) = _i . unsafety { tokens_helper(_visitor, &(it).0) };
|
||||
if let Some(ref it) = _i . auto_token { tokens_helper(_visitor, &(it).0) };
|
||||
tokens_helper(_visitor, &(& _i . trait_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics(& _i . generics);
|
||||
if let Some(ref it) = _i . colon_token { tokens_helper(_visitor, &(it).0) };
|
||||
for el in Punctuated::pairs(& _i . supertraits) { let it = el.value(); _visitor.visit_type_param_bound(it) };
|
||||
@ -1510,7 +1504,7 @@ pub fn visit_item_type<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'as
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . type_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics(& _i . generics);
|
||||
tokens_helper(_visitor, &(& _i . eq_token).0);
|
||||
_visitor.visit_type(& * _i . ty);
|
||||
@ -1521,7 +1515,7 @@ pub fn visit_item_union<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'a
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_visibility(& _i . vis);
|
||||
tokens_helper(_visitor, &(& _i . union_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics(& _i . generics);
|
||||
_visitor.visit_fields_named(& _i . fields);
|
||||
}
|
||||
@ -1545,7 +1539,7 @@ pub fn visit_label<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast La
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_lifetime<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Lifetime) {
|
||||
// Skipped field _i . term;
|
||||
// Skipped field _i . ident;
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_lifetime_def<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast LifetimeDef) {
|
||||
@ -1656,7 +1650,7 @@ pub fn visit_macro_delimiter<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i
|
||||
pub fn visit_member<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Member) {
|
||||
match *_i {
|
||||
Member::Named(ref _binding_0, ) => {
|
||||
_visitor.visit_ident(_binding_0);
|
||||
// Skipped field _binding_0;
|
||||
}
|
||||
Member::Unnamed(ref _binding_0, ) => {
|
||||
_visitor.visit_index(_binding_0);
|
||||
@ -1667,7 +1661,7 @@ pub fn visit_member<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast M
|
||||
pub fn visit_meta<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Meta) {
|
||||
match *_i {
|
||||
Meta::Word(ref _binding_0, ) => {
|
||||
_visitor.visit_ident(_binding_0);
|
||||
// Skipped field _binding_0;
|
||||
}
|
||||
Meta::List(ref _binding_0, ) => {
|
||||
_visitor.visit_meta_list(_binding_0);
|
||||
@ -1679,13 +1673,13 @@ pub fn visit_meta<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Met
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_meta_list<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast MetaList) {
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . paren_token).0);
|
||||
for el in Punctuated::pairs(& _i . nested) { let it = el.value(); _visitor.visit_nested_meta(it) };
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_meta_name_value<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast MetaNameValue) {
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . eq_token).0);
|
||||
_visitor.visit_lit(& _i . lit);
|
||||
}
|
||||
@ -1694,7 +1688,7 @@ pub fn visit_method_sig<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'a
|
||||
if let Some(ref it) = _i . constness { tokens_helper(_visitor, &(it).0) };
|
||||
if let Some(ref it) = _i . unsafety { tokens_helper(_visitor, &(it).0) };
|
||||
if let Some(ref it) = _i . abi { _visitor.visit_abi(it) };
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_fn_decl(& _i . decl);
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ] # [ cfg ( feature = "full" ) ]
|
||||
@ -1774,7 +1768,7 @@ pub fn visit_pat_box<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast
|
||||
pub fn visit_pat_ident<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast PatIdent) {
|
||||
if let Some(ref it) = _i . by_ref { tokens_helper(_visitor, &(it).0) };
|
||||
if let Some(ref it) = _i . mutability { tokens_helper(_visitor, &(it).0) };
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref it) = _i . subpat {
|
||||
tokens_helper(_visitor, &(& ( it ) . 0).0);
|
||||
_visitor.visit_pat(& * ( it ) . 1);
|
||||
@ -1861,7 +1855,7 @@ pub fn visit_path_arguments<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i:
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_path_segment<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast PathSegment) {
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_path_arguments(& _i . arguments);
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
@ -1973,7 +1967,7 @@ pub fn visit_trait_item<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'a
|
||||
pub fn visit_trait_item_const<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast TraitItemConst) {
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
tokens_helper(_visitor, &(& _i . const_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . colon_token).0);
|
||||
_visitor.visit_type(& _i . ty);
|
||||
if let Some(ref it) = _i . default {
|
||||
@ -1999,7 +1993,7 @@ pub fn visit_trait_item_method<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V,
|
||||
pub fn visit_trait_item_type<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast TraitItemType) {
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
tokens_helper(_visitor, &(& _i . type_token).0);
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics(& _i . generics);
|
||||
if let Some(ref it) = _i . colon_token { tokens_helper(_visitor, &(it).0) };
|
||||
for el in Punctuated::pairs(& _i . bounds) { let it = el.value(); _visitor.visit_type_param_bound(it) };
|
||||
@ -2106,7 +2100,7 @@ pub fn visit_type_never<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'a
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_type_param<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast TypeParam) {
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref it) = _i . colon_token { tokens_helper(_visitor, &(it).0) };
|
||||
for el in Punctuated::pairs(& _i . bounds) { let it = el.value(); _visitor.visit_type_param_bound(it) };
|
||||
if let Some(ref it) = _i . eq_token { tokens_helper(_visitor, &(it).0) };
|
||||
@ -2191,19 +2185,19 @@ pub fn visit_use_group<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'as
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_use_name<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast UseName) {
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_use_path<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast UsePath) {
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . colon2_token).0);
|
||||
_visitor.visit_use_tree(& * _i . tree);
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_use_rename<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast UseRename) {
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &(& _i . as_token).0);
|
||||
_visitor.visit_ident(& _i . rename);
|
||||
// Skipped field _i . rename;
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_use_tree<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast UseTree) {
|
||||
@ -2228,7 +2222,7 @@ pub fn visit_use_tree<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_variant<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Variant) {
|
||||
for it in & _i . attrs { _visitor.visit_attribute(it) };
|
||||
_visitor.visit_ident(& _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_fields(& _i . fields);
|
||||
if let Some(ref it) = _i . discriminant {
|
||||
tokens_helper(_visitor, &(& ( it ) . 0).0);
|
||||
|
@ -190,8 +190,6 @@ fn visit_generic_method_argument_mut(&mut self, i: &mut GenericMethodArgument) {
|
||||
fn visit_generic_param_mut(&mut self, i: &mut GenericParam) { visit_generic_param_mut(self, i) }
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
fn visit_generics_mut(&mut self, i: &mut Generics) { visit_generics_mut(self, i) }
|
||||
|
||||
fn visit_ident_mut(&mut self, i: &mut Ident) { visit_ident_mut(self, i) }
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
fn visit_impl_item_mut(&mut self, i: &mut ImplItem) { visit_impl_item_mut(self, i) }
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
@ -492,7 +490,7 @@ pub fn visit_bare_fn_arg_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Ba
|
||||
pub fn visit_bare_fn_arg_name_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut BareFnArgName) {
|
||||
match *_i {
|
||||
BareFnArgName::Named(ref mut _binding_0, ) => {
|
||||
_visitor.visit_ident_mut(_binding_0);
|
||||
// Skipped field _binding_0;
|
||||
}
|
||||
BareFnArgName::Wild(ref mut _binding_0, ) => {
|
||||
tokens_helper(_visitor, &mut (_binding_0).0);
|
||||
@ -590,7 +588,7 @@ pub fn visit_bin_op_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut BinOp)
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_binding_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Binding) {
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . eq_token).0);
|
||||
_visitor.visit_type_mut(& mut _i . ty);
|
||||
}
|
||||
@ -610,7 +608,7 @@ pub fn visit_bound_lifetimes_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mu
|
||||
pub fn visit_const_param_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut ConstParam) {
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
tokens_helper(_visitor, &mut (& mut _i . const_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . colon_token).0);
|
||||
_visitor.visit_type_mut(& mut _i . ty);
|
||||
if let Some(ref mut it) = _i . eq_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
@ -651,7 +649,7 @@ pub fn visit_data_union_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Dat
|
||||
pub fn visit_derive_input_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut DeriveInput) {
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics_mut(& mut _i . generics);
|
||||
_visitor.visit_data_mut(& mut _i . data);
|
||||
}
|
||||
@ -955,7 +953,7 @@ pub fn visit_expr_method_call_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &m
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_expr_mut(& mut * _i . receiver);
|
||||
tokens_helper(_visitor, &mut (& mut _i . dot_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . method);
|
||||
// Skipped field _i . method;
|
||||
if let Some(ref mut it) = _i . turbofish { _visitor.visit_method_turbofish_mut(it) };
|
||||
tokens_helper(_visitor, &mut (& mut _i . paren_token).0);
|
||||
for mut el in Punctuated::pairs_mut(& mut _i . args) { let it = el.value_mut(); _visitor.visit_expr_mut(it) };
|
||||
@ -1073,7 +1071,7 @@ pub fn visit_expr_yield_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Exp
|
||||
pub fn visit_field_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Field) {
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
if let Some(ref mut it) = _i . ident { _visitor.visit_ident_mut(it) };
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref mut it) = _i . colon_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
_visitor.visit_type_mut(& mut _i . ty);
|
||||
}
|
||||
@ -1169,7 +1167,7 @@ pub fn visit_foreign_item_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut F
|
||||
pub fn visit_foreign_item_fn_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut ForeignItemFn) {
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_fn_decl_mut(& mut * _i . decl);
|
||||
tokens_helper(_visitor, &mut (& mut _i . semi_token).0);
|
||||
}
|
||||
@ -1179,7 +1177,7 @@ pub fn visit_foreign_item_static_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i:
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . static_token).0);
|
||||
if let Some(ref mut it) = _i . mutability { tokens_helper(_visitor, &mut (it).0) };
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . colon_token).0);
|
||||
_visitor.visit_type_mut(& mut * _i . ty);
|
||||
tokens_helper(_visitor, &mut (& mut _i . semi_token).0);
|
||||
@ -1189,7 +1187,7 @@ pub fn visit_foreign_item_type_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . type_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . semi_token).0);
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
@ -1245,10 +1243,6 @@ pub fn visit_generics_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Gener
|
||||
if let Some(ref mut it) = _i . gt_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
if let Some(ref mut it) = _i . where_clause { _visitor.visit_where_clause_mut(it) };
|
||||
}
|
||||
|
||||
pub fn visit_ident_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Ident) {
|
||||
// Skipped field _i . term;
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_impl_item_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut ImplItem) {
|
||||
match *_i {
|
||||
@ -1275,7 +1269,7 @@ pub fn visit_impl_item_const_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mu
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
if let Some(ref mut it) = _i . defaultness { tokens_helper(_visitor, &mut (it).0) };
|
||||
tokens_helper(_visitor, &mut (& mut _i . const_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . colon_token).0);
|
||||
_visitor.visit_type_mut(& mut _i . ty);
|
||||
tokens_helper(_visitor, &mut (& mut _i . eq_token).0);
|
||||
@ -1302,7 +1296,7 @@ pub fn visit_impl_item_type_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
if let Some(ref mut it) = _i . defaultness { tokens_helper(_visitor, &mut (it).0) };
|
||||
tokens_helper(_visitor, &mut (& mut _i . type_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics_mut(& mut _i . generics);
|
||||
tokens_helper(_visitor, &mut (& mut _i . eq_token).0);
|
||||
_visitor.visit_type_mut(& mut _i . ty);
|
||||
@ -1375,7 +1369,7 @@ pub fn visit_item_const_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Ite
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . const_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . colon_token).0);
|
||||
_visitor.visit_type_mut(& mut * _i . ty);
|
||||
tokens_helper(_visitor, &mut (& mut _i . eq_token).0);
|
||||
@ -1387,7 +1381,7 @@ pub fn visit_item_enum_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Item
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . enum_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics_mut(& mut _i . generics);
|
||||
tokens_helper(_visitor, &mut (& mut _i . brace_token).0);
|
||||
for mut el in Punctuated::pairs_mut(& mut _i . variants) { let it = el.value_mut(); _visitor.visit_variant_mut(it) };
|
||||
@ -1398,10 +1392,10 @@ pub fn visit_item_extern_crate_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . extern_token).0);
|
||||
tokens_helper(_visitor, &mut (& mut _i . crate_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref mut it) = _i . rename {
|
||||
tokens_helper(_visitor, &mut (& mut ( it ) . 0).0);
|
||||
_visitor.visit_ident_mut(& mut ( it ) . 1);
|
||||
// Skipped field ( it ) . 1;
|
||||
};
|
||||
tokens_helper(_visitor, &mut (& mut _i . semi_token).0);
|
||||
}
|
||||
@ -1412,7 +1406,7 @@ pub fn visit_item_fn_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut ItemFn
|
||||
if let Some(ref mut it) = _i . constness { tokens_helper(_visitor, &mut (it).0) };
|
||||
if let Some(ref mut it) = _i . unsafety { tokens_helper(_visitor, &mut (it).0) };
|
||||
if let Some(ref mut it) = _i . abi { _visitor.visit_abi_mut(it) };
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_fn_decl_mut(& mut * _i . decl);
|
||||
_visitor.visit_block_mut(& mut * _i . block);
|
||||
}
|
||||
@ -1442,7 +1436,7 @@ pub fn visit_item_impl_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Item
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_item_macro_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut ItemMacro) {
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
if let Some(ref mut it) = _i . ident { _visitor.visit_ident_mut(it) };
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_macro_mut(& mut _i . mac);
|
||||
if let Some(ref mut it) = _i . semi_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
}
|
||||
@ -1451,7 +1445,7 @@ pub fn visit_item_macro2_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut It
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . macro_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . paren_token).0);
|
||||
// Skipped field _i . args;
|
||||
tokens_helper(_visitor, &mut (& mut _i . brace_token).0);
|
||||
@ -1462,7 +1456,7 @@ pub fn visit_item_mod_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut ItemM
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . mod_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref mut it) = _i . content {
|
||||
tokens_helper(_visitor, &mut (& mut ( it ) . 0).0);
|
||||
for it in & mut ( it ) . 1 { _visitor.visit_item_mut(it) };
|
||||
@ -1475,7 +1469,7 @@ pub fn visit_item_static_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut It
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . static_token).0);
|
||||
if let Some(ref mut it) = _i . mutability { tokens_helper(_visitor, &mut (it).0) };
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . colon_token).0);
|
||||
_visitor.visit_type_mut(& mut * _i . ty);
|
||||
tokens_helper(_visitor, &mut (& mut _i . eq_token).0);
|
||||
@ -1487,7 +1481,7 @@ pub fn visit_item_struct_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut It
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . struct_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics_mut(& mut _i . generics);
|
||||
_visitor.visit_fields_mut(& mut _i . fields);
|
||||
if let Some(ref mut it) = _i . semi_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
@ -1499,7 +1493,7 @@ pub fn visit_item_trait_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Ite
|
||||
if let Some(ref mut it) = _i . unsafety { tokens_helper(_visitor, &mut (it).0) };
|
||||
if let Some(ref mut it) = _i . auto_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
tokens_helper(_visitor, &mut (& mut _i . trait_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics_mut(& mut _i . generics);
|
||||
if let Some(ref mut it) = _i . colon_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
for mut el in Punctuated::pairs_mut(& mut _i . supertraits) { let it = el.value_mut(); _visitor.visit_type_param_bound_mut(it) };
|
||||
@ -1511,7 +1505,7 @@ pub fn visit_item_type_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Item
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . type_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics_mut(& mut _i . generics);
|
||||
tokens_helper(_visitor, &mut (& mut _i . eq_token).0);
|
||||
_visitor.visit_type_mut(& mut * _i . ty);
|
||||
@ -1522,7 +1516,7 @@ pub fn visit_item_union_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Ite
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_visibility_mut(& mut _i . vis);
|
||||
tokens_helper(_visitor, &mut (& mut _i . union_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics_mut(& mut _i . generics);
|
||||
_visitor.visit_fields_named_mut(& mut _i . fields);
|
||||
}
|
||||
@ -1546,7 +1540,7 @@ pub fn visit_label_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Label) {
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_lifetime_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Lifetime) {
|
||||
// Skipped field _i . term;
|
||||
// Skipped field _i . ident;
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_lifetime_def_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut LifetimeDef) {
|
||||
@ -1657,7 +1651,7 @@ pub fn visit_macro_delimiter_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mu
|
||||
pub fn visit_member_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Member) {
|
||||
match *_i {
|
||||
Member::Named(ref mut _binding_0, ) => {
|
||||
_visitor.visit_ident_mut(_binding_0);
|
||||
// Skipped field _binding_0;
|
||||
}
|
||||
Member::Unnamed(ref mut _binding_0, ) => {
|
||||
_visitor.visit_index_mut(_binding_0);
|
||||
@ -1668,7 +1662,7 @@ pub fn visit_member_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Member)
|
||||
pub fn visit_meta_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Meta) {
|
||||
match *_i {
|
||||
Meta::Word(ref mut _binding_0, ) => {
|
||||
_visitor.visit_ident_mut(_binding_0);
|
||||
// Skipped field _binding_0;
|
||||
}
|
||||
Meta::List(ref mut _binding_0, ) => {
|
||||
_visitor.visit_meta_list_mut(_binding_0);
|
||||
@ -1680,13 +1674,13 @@ pub fn visit_meta_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Meta) {
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_meta_list_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut MetaList) {
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . paren_token).0);
|
||||
for mut el in Punctuated::pairs_mut(& mut _i . nested) { let it = el.value_mut(); _visitor.visit_nested_meta_mut(it) };
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_meta_name_value_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut MetaNameValue) {
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . eq_token).0);
|
||||
_visitor.visit_lit_mut(& mut _i . lit);
|
||||
}
|
||||
@ -1695,7 +1689,7 @@ pub fn visit_method_sig_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Met
|
||||
if let Some(ref mut it) = _i . constness { tokens_helper(_visitor, &mut (it).0) };
|
||||
if let Some(ref mut it) = _i . unsafety { tokens_helper(_visitor, &mut (it).0) };
|
||||
if let Some(ref mut it) = _i . abi { _visitor.visit_abi_mut(it) };
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_fn_decl_mut(& mut _i . decl);
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ] # [ cfg ( feature = "full" ) ]
|
||||
@ -1775,7 +1769,7 @@ pub fn visit_pat_box_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut PatBox
|
||||
pub fn visit_pat_ident_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut PatIdent) {
|
||||
if let Some(ref mut it) = _i . by_ref { tokens_helper(_visitor, &mut (it).0) };
|
||||
if let Some(ref mut it) = _i . mutability { tokens_helper(_visitor, &mut (it).0) };
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref mut it) = _i . subpat {
|
||||
tokens_helper(_visitor, &mut (& mut ( it ) . 0).0);
|
||||
_visitor.visit_pat_mut(& mut * ( it ) . 1);
|
||||
@ -1862,7 +1856,7 @@ pub fn visit_path_arguments_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_path_segment_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut PathSegment) {
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_path_arguments_mut(& mut _i . arguments);
|
||||
}
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
@ -1974,7 +1968,7 @@ pub fn visit_trait_item_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Tra
|
||||
pub fn visit_trait_item_const_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut TraitItemConst) {
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
tokens_helper(_visitor, &mut (& mut _i . const_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . colon_token).0);
|
||||
_visitor.visit_type_mut(& mut _i . ty);
|
||||
if let Some(ref mut it) = _i . default {
|
||||
@ -2000,7 +1994,7 @@ pub fn visit_trait_item_method_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &
|
||||
pub fn visit_trait_item_type_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut TraitItemType) {
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
tokens_helper(_visitor, &mut (& mut _i . type_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_generics_mut(& mut _i . generics);
|
||||
if let Some(ref mut it) = _i . colon_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
for mut el in Punctuated::pairs_mut(& mut _i . bounds) { let it = el.value_mut(); _visitor.visit_type_param_bound_mut(it) };
|
||||
@ -2107,7 +2101,7 @@ pub fn visit_type_never_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Typ
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_type_param_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut TypeParam) {
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
if let Some(ref mut it) = _i . colon_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
for mut el in Punctuated::pairs_mut(& mut _i . bounds) { let it = el.value_mut(); _visitor.visit_type_param_bound_mut(it) };
|
||||
if let Some(ref mut it) = _i . eq_token { tokens_helper(_visitor, &mut (it).0) };
|
||||
@ -2192,19 +2186,19 @@ pub fn visit_use_group_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut UseG
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_use_name_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut UseName) {
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_use_path_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut UsePath) {
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . colon2_token).0);
|
||||
_visitor.visit_use_tree_mut(& mut * _i . tree);
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_use_rename_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut UseRename) {
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
tokens_helper(_visitor, &mut (& mut _i . as_token).0);
|
||||
_visitor.visit_ident_mut(& mut _i . rename);
|
||||
// Skipped field _i . rename;
|
||||
}
|
||||
# [ cfg ( feature = "full" ) ]
|
||||
pub fn visit_use_tree_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut UseTree) {
|
||||
@ -2229,7 +2223,7 @@ pub fn visit_use_tree_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut UseTr
|
||||
# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
|
||||
pub fn visit_variant_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Variant) {
|
||||
for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
|
||||
_visitor.visit_ident_mut(& mut _i . ident);
|
||||
// Skipped field _i . ident;
|
||||
_visitor.visit_fields_mut(& mut _i . fields);
|
||||
if let Some(ref mut it) = _i . discriminant {
|
||||
tokens_helper(_visitor, &mut (& mut ( it ) . 0).0);
|
||||
|
@ -7,6 +7,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use super::*;
|
||||
use proc_macro2::Ident;
|
||||
use punctuated::{Iter, IterMut, Punctuated};
|
||||
|
||||
ast_struct! {
|
||||
@ -751,10 +752,11 @@ pub mod parsing {
|
||||
mod printing {
|
||||
use super::*;
|
||||
use attr::FilterAttrs;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl ToTokens for Generics {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if self.params.is_empty() {
|
||||
return;
|
||||
}
|
||||
@ -791,7 +793,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl<'a> ToTokens for ImplGenerics<'a> {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if self.0.params.is_empty() {
|
||||
return;
|
||||
}
|
||||
@ -846,7 +848,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl<'a> ToTokens for TypeGenerics<'a> {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if self.0.params.is_empty() {
|
||||
return;
|
||||
}
|
||||
@ -894,7 +896,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl<'a> ToTokens for Turbofish<'a> {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if !self.0.params.is_empty() {
|
||||
<Token![::]>::default().to_tokens(tokens);
|
||||
TypeGenerics(self.0).to_tokens(tokens);
|
||||
@ -903,7 +905,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for BoundLifetimes {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.for_token.to_tokens(tokens);
|
||||
self.lt_token.to_tokens(tokens);
|
||||
self.lifetimes.to_tokens(tokens);
|
||||
@ -912,7 +914,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for LifetimeDef {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.lifetime.to_tokens(tokens);
|
||||
if !self.bounds.is_empty() {
|
||||
@ -923,7 +925,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TypeParam {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.ident.to_tokens(tokens);
|
||||
if !self.bounds.is_empty() {
|
||||
@ -938,8 +940,8 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TraitBound {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
let to_tokens = |tokens: &mut Tokens| {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let to_tokens = |tokens: &mut TokenStream| {
|
||||
self.modifier.to_tokens(tokens);
|
||||
self.lifetimes.to_tokens(tokens);
|
||||
self.path.to_tokens(tokens);
|
||||
@ -952,7 +954,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TraitBoundModifier {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
TraitBoundModifier::None => {}
|
||||
TraitBoundModifier::Maybe(ref t) => t.to_tokens(tokens),
|
||||
@ -961,7 +963,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ConstParam {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.const_token.to_tokens(tokens);
|
||||
self.ident.to_tokens(tokens);
|
||||
@ -975,7 +977,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for WhereClause {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if !self.predicates.is_empty() {
|
||||
self.where_token.to_tokens(tokens);
|
||||
self.predicates.to_tokens(tokens);
|
||||
@ -984,7 +986,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for PredicateType {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.lifetimes.to_tokens(tokens);
|
||||
self.bounded_ty.to_tokens(tokens);
|
||||
self.colon_token.to_tokens(tokens);
|
||||
@ -993,7 +995,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for PredicateLifetime {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.lifetime.to_tokens(tokens);
|
||||
if !self.bounds.is_empty() {
|
||||
TokensOrDefault(&self.colon_token).to_tokens(tokens);
|
||||
@ -1003,7 +1005,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for PredicateEq {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.lhs_ty.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
self.rhs_ty.to_tokens(tokens);
|
||||
|
306
src/ident.rs
306
src/ident.rs
@ -1,306 +0,0 @@
|
||||
// Copyright 2018 Syn Developers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{self, Display};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
use proc_macro2::Term;
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use proc_macro2::Span;
|
||||
|
||||
/// A word of Rust code, which may be a keyword or legal variable name.
|
||||
///
|
||||
/// An identifier consists of at least one Unicode code point, the first of
|
||||
/// which has the XID_Start property and the rest of which have the XID_Continue
|
||||
/// property. An underscore may be used as the first character as long as it is
|
||||
/// not the only character.
|
||||
///
|
||||
/// - The empty string is not an identifier. Use `Option<Ident>`.
|
||||
/// - An underscore by itself is not an identifier. Use
|
||||
/// `Token![_]` instead.
|
||||
/// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
|
||||
///
|
||||
/// An identifier constructed with `Ident::new` is permitted to be a Rust
|
||||
/// keyword, though parsing one through its [`Synom`] implementation rejects
|
||||
/// Rust keywords. Use `call!(Ident::parse_any)` when parsing to match the
|
||||
/// behaviour of `Ident::new`.
|
||||
///
|
||||
/// [`Synom`]: synom/trait.Synom.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// A new ident can be created from a string using the `Ident::from` function.
|
||||
/// Idents produced by `Ident::from` are set to resolve at the procedural macro
|
||||
/// *def site* by default. A different span can be provided explicitly by using
|
||||
/// `Ident::new`.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate syn;
|
||||
/// extern crate proc_macro2;
|
||||
///
|
||||
/// use syn::Ident;
|
||||
/// use proc_macro2::Span;
|
||||
///
|
||||
/// fn main() {
|
||||
/// let def_ident = Ident::from("definitely");
|
||||
/// let call_ident = Ident::new("calligraphy", Span::call_site());
|
||||
///
|
||||
/// println!("{} {}", def_ident, call_ident);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// An ident can be interpolated into a token stream using the `quote!` macro.
|
||||
///
|
||||
/// ```rust
|
||||
/// #[macro_use]
|
||||
/// extern crate quote;
|
||||
///
|
||||
/// extern crate syn;
|
||||
/// use syn::Ident;
|
||||
///
|
||||
/// fn main() {
|
||||
/// let ident = Ident::from("demo");
|
||||
///
|
||||
/// // Create a variable binding whose name is this ident.
|
||||
/// let expanded = quote! { let #ident = 10; };
|
||||
///
|
||||
/// // Create a variable binding with a slightly different name.
|
||||
/// let temp_ident = Ident::from(format!("new_{}", ident));
|
||||
/// let expanded = quote! { let #temp_ident = 10; };
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// A string representation of the ident is available through the `as_ref()` and
|
||||
/// `to_string()` methods.
|
||||
///
|
||||
/// ```rust
|
||||
/// # use syn::Ident;
|
||||
/// # let ident = Ident::from("another_identifier");
|
||||
/// #
|
||||
/// // Examine the ident as a &str.
|
||||
/// let ident_str = ident.as_ref();
|
||||
/// if ident_str.len() > 60 {
|
||||
/// println!("Very long identifier: {}", ident_str)
|
||||
/// }
|
||||
///
|
||||
/// // Create a String from the ident.
|
||||
/// let ident_string = ident.to_string();
|
||||
/// give_away(ident_string);
|
||||
///
|
||||
/// fn give_away(s: String) { /* ... */ }
|
||||
/// ```
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Ident {
|
||||
term: Term,
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
/// Creates an ident with the given string representation.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the input string is neither a keyword nor a legal variable
|
||||
/// name.
|
||||
pub fn new(s: &str, span: Span) -> Self {
|
||||
if s.is_empty() {
|
||||
panic!("ident is not allowed to be empty; use Option<Ident>");
|
||||
}
|
||||
|
||||
if s.starts_with('\'') {
|
||||
panic!("ident is not allowed to be a lifetime; use syn::Lifetime");
|
||||
}
|
||||
|
||||
if s.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
|
||||
panic!("ident cannot be a number, use syn::Index instead");
|
||||
}
|
||||
|
||||
fn xid_ok(s: &str) -> bool {
|
||||
let mut chars = s.chars();
|
||||
let first = chars.next().unwrap();
|
||||
if !(UnicodeXID::is_xid_start(first) || first == '_') {
|
||||
return false;
|
||||
}
|
||||
for ch in chars {
|
||||
if !UnicodeXID::is_xid_continue(ch) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
if !xid_ok(s) {
|
||||
panic!("{:?} is not a valid ident", s);
|
||||
}
|
||||
|
||||
Ident {
|
||||
term: Term::new(s, span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.term.span()
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.term.set_span(span);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for Ident {
|
||||
fn from(s: &str) -> Self {
|
||||
Ident::new(s, Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Token![self]> for Ident {
|
||||
fn from(tok: Token![self]) -> Self {
|
||||
Ident::new("self", tok.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Token![Self]> for Ident {
|
||||
fn from(tok: Token![Self]) -> Self {
|
||||
Ident::new("Self", tok.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Token![super]> for Ident {
|
||||
fn from(tok: Token![super]) -> Self {
|
||||
Ident::new("super", tok.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Token![crate]> for Ident {
|
||||
fn from(tok: Token![crate]) -> Self {
|
||||
Ident::new("crate", tok.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<Cow<'a, str>> for Ident {
|
||||
fn from(s: Cow<'a, str>) -> Self {
|
||||
Ident::new(&s, Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for Ident {
|
||||
fn from(s: String) -> Self {
|
||||
Ident::new(&s, Span::call_site())
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for Ident {
|
||||
fn as_ref(&self) -> &str {
|
||||
self.term.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Ident {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.term.as_str().fmt(formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> PartialEq<T> for Ident
|
||||
where
|
||||
T: AsRef<str>,
|
||||
{
|
||||
fn eq(&self, other: &T) -> bool {
|
||||
self.as_ref() == other.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Ident {}
|
||||
|
||||
impl PartialOrd for Ident {
|
||||
fn partial_cmp(&self, other: &Ident) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Ident {
|
||||
fn cmp(&self, other: &Ident) -> Ordering {
|
||||
self.as_ref().cmp(other.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Ident {
|
||||
fn hash<H: Hasher>(&self, h: &mut H) {
|
||||
self.as_ref().hash(h);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
use buffer::Cursor;
|
||||
use parse_error;
|
||||
use synom::PResult;
|
||||
use synom::Synom;
|
||||
|
||||
impl Synom for Ident {
|
||||
fn parse(input: Cursor) -> PResult<Self> {
|
||||
let (term, rest) = match input.term() {
|
||||
Some(term) => term,
|
||||
_ => return parse_error(),
|
||||
};
|
||||
if term.as_str().starts_with('\'') {
|
||||
return parse_error();
|
||||
}
|
||||
match term.as_str() {
|
||||
"_"
|
||||
// From https://doc.rust-lang.org/grammar.html#keywords
|
||||
| "abstract" | "alignof" | "as" | "become" | "box" | "break" | "const"
|
||||
| "continue" | "crate" | "do" | "else" | "enum" | "extern" | "false" | "final"
|
||||
| "fn" | "for" | "if" | "impl" | "in" | "let" | "loop" | "macro" | "match"
|
||||
| "mod" | "move" | "mut" | "offsetof" | "override" | "priv" | "proc" | "pub"
|
||||
| "pure" | "ref" | "return" | "Self" | "self" | "sizeof" | "static" | "struct"
|
||||
| "super" | "trait" | "true" | "type" | "typeof" | "unsafe" | "unsized" | "use"
|
||||
| "virtual" | "where" | "while" | "yield" => return parse_error(),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok((Ident { term: term }, rest))
|
||||
}
|
||||
|
||||
fn description() -> Option<&'static str> {
|
||||
Some("identifier")
|
||||
}
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
/// Parses any identifier
|
||||
///
|
||||
/// This is useful when parsing a DSL which allows Rust keywords as identifiers.
|
||||
pub fn parse_any(input: Cursor) -> PResult<Self> {
|
||||
let (term, rest) = match input.term() {
|
||||
Some(term) => term,
|
||||
_ => return parse_error(),
|
||||
};
|
||||
if term.as_str().starts_with('\'') {
|
||||
return parse_error();
|
||||
}
|
||||
|
||||
Ok((Ident { term: term }, rest))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
|
||||
impl ToTokens for Ident {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
tokens.append(self.term);
|
||||
}
|
||||
}
|
||||
}
|
93
src/item.rs
93
src/item.rs
@ -8,7 +8,7 @@
|
||||
|
||||
use super::*;
|
||||
use derive::{Data, DeriveInput};
|
||||
use proc_macro2::TokenStream;
|
||||
use proc_macro2::{TokenStream, Ident};
|
||||
use punctuated::Punctuated;
|
||||
use token::{Brace, Paren};
|
||||
|
||||
@ -1536,10 +1536,11 @@ pub mod parsing {
|
||||
mod printing {
|
||||
use super::*;
|
||||
use attr::FilterAttrs;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl ToTokens for ItemExternCrate {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.extern_token.to_tokens(tokens);
|
||||
@ -1554,7 +1555,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemUse {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.use_token.to_tokens(tokens);
|
||||
@ -1565,7 +1566,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemStatic {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.static_token.to_tokens(tokens);
|
||||
@ -1580,7 +1581,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemConst {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.const_token.to_tokens(tokens);
|
||||
@ -1594,13 +1595,13 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemFn {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.constness.to_tokens(tokens);
|
||||
self.unsafety.to_tokens(tokens);
|
||||
self.abi.to_tokens(tokens);
|
||||
NamedDecl(&self.decl, self.ident).to_tokens(tokens);
|
||||
NamedDecl(&self.decl, &self.ident).to_tokens(tokens);
|
||||
self.block.brace_token.surround(tokens, |tokens| {
|
||||
tokens.append_all(self.attrs.inner());
|
||||
tokens.append_all(&self.block.stmts);
|
||||
@ -1609,7 +1610,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemMod {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.mod_token.to_tokens(tokens);
|
||||
@ -1626,7 +1627,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemForeignMod {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.abi.to_tokens(tokens);
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
@ -1636,7 +1637,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemType {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.type_token.to_tokens(tokens);
|
||||
@ -1650,7 +1651,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemEnum {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.enum_token.to_tokens(tokens);
|
||||
@ -1664,7 +1665,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemStruct {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.struct_token.to_tokens(tokens);
|
||||
@ -1689,7 +1690,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemUnion {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.union_token.to_tokens(tokens);
|
||||
@ -1701,7 +1702,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemTrait {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.unsafety.to_tokens(tokens);
|
||||
@ -1721,7 +1722,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemImpl {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.defaultness.to_tokens(tokens);
|
||||
self.unsafety.to_tokens(tokens);
|
||||
@ -1742,7 +1743,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemMacro {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.mac.path.to_tokens(tokens);
|
||||
self.mac.bang_token.to_tokens(tokens);
|
||||
@ -1763,7 +1764,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemMacro2 {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.macro_token.to_tokens(tokens);
|
||||
@ -1778,13 +1779,13 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ItemVerbatim {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.tts.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for UsePath {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.colon2_token.to_tokens(tokens);
|
||||
self.tree.to_tokens(tokens);
|
||||
@ -1792,13 +1793,13 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for UseName {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for UseRename {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.as_token.to_tokens(tokens);
|
||||
self.rename.to_tokens(tokens);
|
||||
@ -1806,13 +1807,13 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for UseGlob {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.star_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for UseGroup {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.brace_token.surround(tokens, |tokens| {
|
||||
self.items.to_tokens(tokens);
|
||||
});
|
||||
@ -1820,7 +1821,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TraitItemConst {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.const_token.to_tokens(tokens);
|
||||
self.ident.to_tokens(tokens);
|
||||
@ -1835,7 +1836,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TraitItemMethod {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.sig.to_tokens(tokens);
|
||||
match self.default {
|
||||
@ -1853,7 +1854,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TraitItemType {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.type_token.to_tokens(tokens);
|
||||
self.ident.to_tokens(tokens);
|
||||
@ -1872,7 +1873,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TraitItemMacro {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.mac.to_tokens(tokens);
|
||||
self.semi_token.to_tokens(tokens);
|
||||
@ -1880,13 +1881,13 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TraitItemVerbatim {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.tts.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ImplItemConst {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.defaultness.to_tokens(tokens);
|
||||
@ -1901,7 +1902,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ImplItemMethod {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.defaultness.to_tokens(tokens);
|
||||
@ -1914,7 +1915,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ImplItemType {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.defaultness.to_tokens(tokens);
|
||||
@ -1928,7 +1929,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ImplItemMacro {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.mac.to_tokens(tokens);
|
||||
self.semi_token.to_tokens(tokens);
|
||||
@ -1936,22 +1937,22 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ImplItemVerbatim {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.tts.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ForeignItemFn {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
NamedDecl(&self.decl, self.ident).to_tokens(tokens);
|
||||
NamedDecl(&self.decl, &self.ident).to_tokens(tokens);
|
||||
self.semi_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ForeignItemStatic {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.static_token.to_tokens(tokens);
|
||||
@ -1964,7 +1965,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ForeignItemType {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.attrs.outer());
|
||||
self.vis.to_tokens(tokens);
|
||||
self.type_token.to_tokens(tokens);
|
||||
@ -1974,24 +1975,24 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ForeignItemVerbatim {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.tts.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MethodSig {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.constness.to_tokens(tokens);
|
||||
self.unsafety.to_tokens(tokens);
|
||||
self.abi.to_tokens(tokens);
|
||||
NamedDecl(&self.decl, self.ident).to_tokens(tokens);
|
||||
NamedDecl(&self.decl, &self.ident).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
struct NamedDecl<'a>(&'a FnDecl, Ident);
|
||||
struct NamedDecl<'a>(&'a FnDecl, &'a Ident);
|
||||
|
||||
impl<'a> ToTokens for NamedDecl<'a> {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.0.fn_token.to_tokens(tokens);
|
||||
self.1.to_tokens(tokens);
|
||||
self.0.generics.to_tokens(tokens);
|
||||
@ -2008,7 +2009,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ArgSelfRef {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.and_token.to_tokens(tokens);
|
||||
self.lifetime.to_tokens(tokens);
|
||||
self.mutability.to_tokens(tokens);
|
||||
@ -2017,14 +2018,14 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ArgSelf {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.mutability.to_tokens(tokens);
|
||||
self.self_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ArgCaptured {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pat.to_tokens(tokens);
|
||||
self.colon_token.to_tokens(tokens);
|
||||
self.ty.to_tokens(tokens);
|
||||
|
@ -329,9 +329,6 @@ pub use generics::{
|
||||
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
|
||||
pub use generics::{ImplGenerics, Turbofish, TypeGenerics};
|
||||
|
||||
mod ident;
|
||||
pub use ident::Ident;
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
mod item;
|
||||
#[cfg(feature = "full")]
|
||||
@ -732,7 +729,7 @@ impl<'a, T> quote::ToTokens for TokensOrDefault<'a, T>
|
||||
where
|
||||
T: quote::ToTokens + Default,
|
||||
{
|
||||
fn to_tokens(&self, tokens: &mut quote::Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
|
||||
match *self.0 {
|
||||
Some(ref t) => t.to_tokens(tokens),
|
||||
None => T::default().to_tokens(tokens),
|
||||
|
@ -10,7 +10,7 @@ use std::cmp::Ordering;
|
||||
use std::fmt::{self, Display};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
use proc_macro2::{Span, Term};
|
||||
use proc_macro2::{Span, Ident};
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
/// A Rust lifetime: `'a`.
|
||||
@ -27,9 +27,9 @@ use unicode_xid::UnicodeXID;
|
||||
/// *This type is available if Syn is built with the `"derive"` or `"full"`
|
||||
/// feature.*
|
||||
#[cfg_attr(feature = "extra-traits", derive(Debug))]
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Clone)]
|
||||
pub struct Lifetime {
|
||||
term: Term,
|
||||
ident: Ident,
|
||||
}
|
||||
|
||||
impl Lifetime {
|
||||
@ -65,28 +65,29 @@ impl Lifetime {
|
||||
}
|
||||
|
||||
Lifetime {
|
||||
term: Term::new(s, span),
|
||||
ident: Ident::new(&s[1..], span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.term.span()
|
||||
self.ident.span()
|
||||
}
|
||||
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.term.set_span(span);
|
||||
self.ident.set_span(span);
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Lifetime {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.term.as_str().fmt(formatter)
|
||||
"'".fmt(formatter)?;
|
||||
self.ident.fmt(formatter)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Lifetime {
|
||||
fn eq(&self, other: &Lifetime) -> bool {
|
||||
self.term.as_str() == other.term.as_str()
|
||||
self.ident.eq(&other.ident)
|
||||
}
|
||||
}
|
||||
|
||||
@ -100,19 +101,20 @@ impl PartialOrd for Lifetime {
|
||||
|
||||
impl Ord for Lifetime {
|
||||
fn cmp(&self, other: &Lifetime) -> Ordering {
|
||||
self.term.as_str().cmp(other.term.as_str())
|
||||
self.ident.cmp(&other.ident)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Lifetime {
|
||||
fn hash<H: Hasher>(&self, h: &mut H) {
|
||||
self.term.as_str().hash(h)
|
||||
self.ident.hash(h)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
pub mod parsing {
|
||||
use super::*;
|
||||
use proc_macro2::Spacing;
|
||||
use buffer::Cursor;
|
||||
use parse_error;
|
||||
use synom::PResult;
|
||||
@ -120,15 +122,22 @@ pub mod parsing {
|
||||
|
||||
impl Synom for Lifetime {
|
||||
fn parse(input: Cursor) -> PResult<Self> {
|
||||
let (term, rest) = match input.term() {
|
||||
Some(term) => term,
|
||||
let rest = match input.op() {
|
||||
Some((op, rest)) => {
|
||||
if op.as_char() == '\'' && op.spacing() == Spacing::Joint {
|
||||
rest
|
||||
} else {
|
||||
return parse_error()
|
||||
}
|
||||
}
|
||||
_ => return parse_error(),
|
||||
};
|
||||
if !term.as_str().starts_with('\'') {
|
||||
return parse_error();
|
||||
}
|
||||
let (ident, rest) = match rest.term() {
|
||||
Some(pair) => pair,
|
||||
None => return parse_error(),
|
||||
};
|
||||
|
||||
Ok((Lifetime { term: term }, rest))
|
||||
Ok((Lifetime { ident: ident }, rest))
|
||||
}
|
||||
|
||||
fn description() -> Option<&'static str> {
|
||||
@ -140,11 +149,13 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
use proc_macro2::{TokenStream, Punct, Spacing};
|
||||
|
||||
impl ToTokens for Lifetime {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
self.term.to_tokens(tokens);
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append(Punct::new('\'', Spacing::Joint));
|
||||
self.ident.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
27
src/lit.rs
27
src/lit.rs
@ -10,7 +10,7 @@ use proc_macro2::{Literal, Span};
|
||||
use std::str;
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
use proc_macro2::Term;
|
||||
use proc_macro2::Ident;
|
||||
|
||||
#[cfg(feature = "parsing")]
|
||||
use proc_macro2::TokenStream;
|
||||
@ -432,9 +432,9 @@ pub mod parsing {
|
||||
_ => match input.term() {
|
||||
Some((term, rest)) => Ok((
|
||||
Lit::Bool(LitBool {
|
||||
value: if term.as_str() == "true" {
|
||||
value: if term.to_string() == "true" {
|
||||
true
|
||||
} else if term.as_str() == "false" {
|
||||
} else if term.to_string() == "false" {
|
||||
false
|
||||
} else {
|
||||
return parse_error();
|
||||
@ -506,53 +506,54 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl ToTokens for LitStr {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for LitByteStr {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for LitByte {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for LitChar {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for LitInt {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for LitFloat {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for LitBool {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let s = if self.value { "true" } else { "false" };
|
||||
tokens.append(Term::new(s, self.span));
|
||||
tokens.append(Ident::new(s, self.span));
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for LitVerbatim {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
@ -93,10 +93,11 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::ToTokens;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl ToTokens for Macro {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.path.to_tokens(tokens);
|
||||
self.bang_token.to_tokens(tokens);
|
||||
match self.delimiter {
|
||||
|
@ -133,7 +133,7 @@ macro_rules! generate_to_tokens {
|
||||
|
||||
(($($arms:tt)*) $tokens:ident $name:ident {}) => {
|
||||
impl ::quote::ToTokens for $name {
|
||||
fn to_tokens(&self, $tokens: &mut ::quote::Tokens) {
|
||||
fn to_tokens(&self, $tokens: &mut ::proc_macro2::TokenStream) {
|
||||
match *self {
|
||||
$($arms)*
|
||||
}
|
||||
|
@ -174,10 +174,11 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::ToTokens;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl ToTokens for BinOp {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
BinOp::Add(ref t) => t.to_tokens(tokens),
|
||||
BinOp::Sub(ref t) => t.to_tokens(tokens),
|
||||
@ -212,7 +213,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for UnOp {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
UnOp::Deref(ref t) => t.to_tokens(tokens),
|
||||
UnOp::Not(ref t) => t.to_tokens(tokens),
|
||||
|
24
src/path.rs
24
src/path.rs
@ -7,6 +7,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use super::*;
|
||||
use proc_macro2::Ident;
|
||||
use punctuated::Punctuated;
|
||||
|
||||
ast_struct! {
|
||||
@ -41,7 +42,7 @@ impl Path {
|
||||
/// }
|
||||
///
|
||||
/// impl ToTokens for MyNode {
|
||||
/// fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
/// fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
/// PathTokens(&self.qself, &self.path).to_tokens(tokens);
|
||||
/// }
|
||||
/// }
|
||||
@ -231,7 +232,7 @@ pub mod parsing {
|
||||
named!(parse -> Self, do_parse!(
|
||||
colon: option!(punct!(::)) >>
|
||||
segments: call!(Punctuated::<PathSegment, Token![::]>::parse_separated_nonempty) >>
|
||||
cond_reduce!(segments.first().map_or(true, |seg| seg.value().ident != "dyn")) >>
|
||||
cond_reduce!(segments.first().map_or(true, |seg| seg.value().ident.to_string() != "dyn")) >>
|
||||
(Path {
|
||||
leading_colon: colon,
|
||||
segments: segments,
|
||||
@ -416,24 +417,25 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::ToTokens;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl ToTokens for Path {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.leading_colon.to_tokens(tokens);
|
||||
self.segments.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for PathSegment {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.arguments.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for PathArguments {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
PathArguments::None => {}
|
||||
PathArguments::AngleBracketed(ref arguments) => {
|
||||
@ -448,7 +450,7 @@ mod printing {
|
||||
|
||||
impl ToTokens for GenericArgument {
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(match_same_arms))]
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
GenericArgument::Lifetime(ref lt) => lt.to_tokens(tokens),
|
||||
GenericArgument::Type(ref ty) => ty.to_tokens(tokens),
|
||||
@ -473,7 +475,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for AngleBracketedGenericArguments {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.colon2_token.to_tokens(tokens);
|
||||
self.lt_token.to_tokens(tokens);
|
||||
|
||||
@ -516,7 +518,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for Binding {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.ident.to_tokens(tokens);
|
||||
self.eq_token.to_tokens(tokens);
|
||||
self.ty.to_tokens(tokens);
|
||||
@ -524,7 +526,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for ParenthesizedGenericArguments {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.inputs.to_tokens(tokens);
|
||||
});
|
||||
@ -533,7 +535,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl<'a> ToTokens for PathTokens<'a> {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let qself = match *self.0 {
|
||||
Some(ref qself) => qself,
|
||||
None => return self.1.to_tokens(tokens),
|
||||
|
@ -762,14 +762,15 @@ where
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::{ToTokens, TokenStreamExt};
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl<T, P> ToTokens for Punctuated<T, P>
|
||||
where
|
||||
T: ToTokens,
|
||||
P: ToTokens,
|
||||
{
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
tokens.append_all(self.pairs())
|
||||
}
|
||||
}
|
||||
@ -779,7 +780,7 @@ mod printing {
|
||||
T: ToTokens,
|
||||
P: ToTokens,
|
||||
{
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
Pair::Punctuated(ref a, ref b) => {
|
||||
a.to_tokens(tokens);
|
||||
|
@ -79,7 +79,7 @@
|
||||
//! error appear in the correct place underlining the right type.
|
||||
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::ToTokens;
|
||||
|
||||
/// A trait that can provide the `Span` of the complete contents of a syntax
|
||||
/// tree node.
|
||||
@ -109,7 +109,7 @@ where
|
||||
{
|
||||
#[cfg(procmacro2_semver_exempt)]
|
||||
fn span(&self) -> Span {
|
||||
let mut tokens = Tokens::new();
|
||||
let mut tokens = TokenStream::empty();
|
||||
self.to_tokens(&mut tokens);
|
||||
let token_stream = TokenStream::from(tokens);
|
||||
let mut iter = token_stream.into_iter();
|
||||
@ -129,7 +129,7 @@ where
|
||||
|
||||
#[cfg(not(procmacro2_semver_exempt))]
|
||||
fn span(&self) -> Span {
|
||||
let mut tokens = Tokens::new();
|
||||
let mut tokens = TokenStream::empty();
|
||||
self.to_tokens(&mut tokens);
|
||||
let token_stream = TokenStream::from(tokens);
|
||||
let mut iter = token_stream.into_iter();
|
||||
|
10
src/synom.rs
10
src/synom.rs
@ -213,6 +213,16 @@ impl Synom for proc_macro2::TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
impl Synom for proc_macro2::Ident {
|
||||
fn parse(input: Cursor) -> PResult<Self> {
|
||||
input.term().ok_or_else(|| ParseError::new("not an ident"))
|
||||
}
|
||||
|
||||
fn description() -> Option<&'static str> {
|
||||
Some("arbitrary token stream")
|
||||
}
|
||||
}
|
||||
|
||||
/// Parser that can parse Rust tokens into a particular syntax tree node.
|
||||
///
|
||||
/// Refer to the [module documentation] for details about parsing in Syn.
|
||||
|
70
src/token.rs
70
src/token.rs
@ -97,7 +97,7 @@
|
||||
//! # fn main() {}
|
||||
//! ```
|
||||
|
||||
use proc_macro2::Span;
|
||||
use proc_macro2::{Span, Ident};
|
||||
|
||||
macro_rules! tokens {
|
||||
(
|
||||
@ -179,7 +179,7 @@ macro_rules! token_punct_parser {
|
||||
($s:tt pub struct $name:ident) => {
|
||||
#[cfg(feature = "printing")]
|
||||
impl ::quote::ToTokens for $name {
|
||||
fn to_tokens(&self, tokens: &mut ::quote::Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) {
|
||||
printing::punct($s, &self.0, tokens);
|
||||
}
|
||||
}
|
||||
@ -242,7 +242,7 @@ macro_rules! token_keyword {
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
impl ::quote::ToTokens for $name {
|
||||
fn to_tokens(&self, tokens: &mut ::quote::Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) {
|
||||
printing::keyword($s, &self.0, tokens);
|
||||
}
|
||||
}
|
||||
@ -258,6 +258,12 @@ macro_rules! token_keyword {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<$name> for Ident {
|
||||
fn from(me: $name) -> Ident {
|
||||
Ident::new($s, me.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Span> for $name {
|
||||
fn from(span: Span) -> Self {
|
||||
$name(span)
|
||||
@ -306,9 +312,9 @@ macro_rules! token_delimiter {
|
||||
|
||||
impl $name {
|
||||
#[cfg(feature = "printing")]
|
||||
pub fn surround<F>(&self, tokens: &mut ::quote::Tokens, f: F)
|
||||
pub fn surround<F>(&self, tokens: &mut ::proc_macro2::TokenStream, f: F)
|
||||
where
|
||||
F: FnOnce(&mut ::quote::Tokens),
|
||||
F: FnOnce(&mut ::proc_macro2::TokenStream),
|
||||
{
|
||||
printing::delim($s, &self.0, tokens, f);
|
||||
}
|
||||
@ -340,8 +346,9 @@ token_punct_def! {
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
impl ::quote::ToTokens for Underscore {
|
||||
fn to_tokens(&self, tokens: &mut ::quote::Tokens) {
|
||||
tokens.append(::proc_macro2::Term::new("_", self.0[0]));
|
||||
fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) {
|
||||
use quote::TokenStreamExt;
|
||||
tokens.append(::proc_macro2::Ident::new("_", self.0[0]));
|
||||
}
|
||||
}
|
||||
|
||||
@ -349,8 +356,13 @@ impl ::quote::ToTokens for Underscore {
|
||||
impl ::Synom for Underscore {
|
||||
fn parse(input: ::buffer::Cursor) -> ::synom::PResult<Underscore> {
|
||||
match input.term() {
|
||||
Some((term, rest)) if term.as_str() == "_" => Ok((Underscore([term.span()]), rest)),
|
||||
Some(_) => ::parse_error(),
|
||||
Some((term, rest)) => {
|
||||
if term.to_string() == "_" {
|
||||
Ok((Underscore([term.span()]), rest))
|
||||
} else {
|
||||
::parse_error()
|
||||
}
|
||||
}
|
||||
None => parsing::punct("_", input, Underscore),
|
||||
}
|
||||
}
|
||||
@ -713,15 +725,19 @@ mod parsing {
|
||||
|
||||
for (i, (ch, slot)) in chars.zip(&mut spans).enumerate() {
|
||||
match tokens.op() {
|
||||
Some((op, rest)) if op.op() == ch => {
|
||||
if i != s.len() - 1 {
|
||||
match op.spacing() {
|
||||
Spacing::Joint => {}
|
||||
_ => return parse_error(),
|
||||
Some((op, rest)) => {
|
||||
if op.as_char() == ch {
|
||||
if i != s.len() - 1 {
|
||||
match op.spacing() {
|
||||
Spacing::Joint => {}
|
||||
_ => return parse_error(),
|
||||
}
|
||||
}
|
||||
*slot = op.span();
|
||||
tokens = rest;
|
||||
} else {
|
||||
return parse_error()
|
||||
}
|
||||
*slot = op.span();
|
||||
tokens = rest;
|
||||
}
|
||||
_ => return parse_error(),
|
||||
}
|
||||
@ -735,7 +751,7 @@ mod parsing {
|
||||
new: fn(Span) -> T,
|
||||
) -> PResult<'a, T> {
|
||||
if let Some((term, rest)) = tokens.term() {
|
||||
if term.as_str() == keyword {
|
||||
if term.to_string() == keyword {
|
||||
return Ok((new(term.span()), rest));
|
||||
}
|
||||
}
|
||||
@ -776,10 +792,10 @@ mod parsing {
|
||||
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use proc_macro2::{Delimiter, Group, Op, Spacing, Span, Term};
|
||||
use quote::Tokens;
|
||||
use proc_macro2::{Delimiter, Group, Punct, Spacing, Span, Ident, TokenStream};
|
||||
use quote::TokenStreamExt;
|
||||
|
||||
pub fn punct(s: &str, spans: &[Span], tokens: &mut Tokens) {
|
||||
pub fn punct(s: &str, spans: &[Span], tokens: &mut TokenStream) {
|
||||
assert_eq!(s.len(), spans.len());
|
||||
|
||||
let mut chars = s.chars();
|
||||
@ -787,23 +803,23 @@ mod printing {
|
||||
let ch = chars.next_back().unwrap();
|
||||
let span = spans.next_back().unwrap();
|
||||
for (ch, span) in chars.zip(spans) {
|
||||
let mut op = Op::new(ch, Spacing::Joint);
|
||||
let mut op = Punct::new(ch, Spacing::Joint);
|
||||
op.set_span(*span);
|
||||
tokens.append(op);
|
||||
}
|
||||
|
||||
let mut op = Op::new(ch, Spacing::Alone);
|
||||
let mut op = Punct::new(ch, Spacing::Alone);
|
||||
op.set_span(*span);
|
||||
tokens.append(op);
|
||||
}
|
||||
|
||||
pub fn keyword(s: &str, span: &Span, tokens: &mut Tokens) {
|
||||
tokens.append(Term::new(s, *span));
|
||||
pub fn keyword(s: &str, span: &Span, tokens: &mut TokenStream) {
|
||||
tokens.append(Ident::new(s, *span));
|
||||
}
|
||||
|
||||
pub fn delim<F>(s: &str, span: &Span, tokens: &mut Tokens, f: F)
|
||||
pub fn delim<F>(s: &str, span: &Span, tokens: &mut TokenStream, f: F)
|
||||
where
|
||||
F: FnOnce(&mut Tokens),
|
||||
F: FnOnce(&mut TokenStream),
|
||||
{
|
||||
let delim = match s {
|
||||
"(" => Delimiter::Parenthesis,
|
||||
@ -812,7 +828,7 @@ mod printing {
|
||||
" " => Delimiter::None,
|
||||
_ => panic!("unknown delimiter: {}", s),
|
||||
};
|
||||
let mut inner = Tokens::new();
|
||||
let mut inner = TokenStream::empty();
|
||||
f(&mut inner);
|
||||
let mut g = Group::new(delim, inner.into());
|
||||
g.set_span(*span);
|
||||
|
12
src/tt.rs
12
src/tt.rs
@ -89,8 +89,8 @@ impl<'a> PartialEq for TokenTreeHelper<'a> {
|
||||
}
|
||||
s2.next().is_none()
|
||||
}
|
||||
(&TokenTree::Op(ref o1), &TokenTree::Op(ref o2)) => {
|
||||
o1.op() == o2.op() && match (o1.spacing(), o2.spacing()) {
|
||||
(&TokenTree::Punct(ref o1), &TokenTree::Punct(ref o2)) => {
|
||||
o1.as_char() == o2.as_char() && match (o1.spacing(), o2.spacing()) {
|
||||
(Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
|
||||
_ => false,
|
||||
}
|
||||
@ -98,7 +98,7 @@ impl<'a> PartialEq for TokenTreeHelper<'a> {
|
||||
(&TokenTree::Literal(ref l1), &TokenTree::Literal(ref l2)) => {
|
||||
l1.to_string() == l2.to_string()
|
||||
}
|
||||
(&TokenTree::Term(ref s1), &TokenTree::Term(ref s2)) => s1.as_str() == s2.as_str(),
|
||||
(&TokenTree::Ident(ref s1), &TokenTree::Ident(ref s2)) => s1 == s2,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@ -124,16 +124,16 @@ impl<'a> Hash for TokenTreeHelper<'a> {
|
||||
}
|
||||
0xffu8.hash(h); // terminator w/ a variant we don't normally hash
|
||||
}
|
||||
TokenTree::Op(ref op) => {
|
||||
TokenTree::Punct(ref op) => {
|
||||
1u8.hash(h);
|
||||
op.op().hash(h);
|
||||
op.as_char().hash(h);
|
||||
match op.spacing() {
|
||||
Spacing::Alone => 0u8.hash(h),
|
||||
Spacing::Joint => 1u8.hash(h),
|
||||
}
|
||||
}
|
||||
TokenTree::Literal(ref lit) => (2u8, lit.to_string()).hash(h),
|
||||
TokenTree::Term(ref word) => (3u8, word.as_str()).hash(h),
|
||||
TokenTree::Ident(ref word) => (3u8, word).hash(h),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
43
src/ty.rs
43
src/ty.rs
@ -7,7 +7,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
use proc_macro2::{TokenStream, Ident};
|
||||
use punctuated::Punctuated;
|
||||
#[cfg(feature = "extra-traits")]
|
||||
use std::hash::{Hash, Hasher};
|
||||
@ -641,10 +641,11 @@ pub mod parsing {
|
||||
#[cfg(feature = "printing")]
|
||||
mod printing {
|
||||
use super::*;
|
||||
use quote::{ToTokens, Tokens};
|
||||
use quote::ToTokens;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
impl ToTokens for TypeSlice {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.elem.to_tokens(tokens);
|
||||
});
|
||||
@ -652,7 +653,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TypeArray {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.elem.to_tokens(tokens);
|
||||
self.semi_token.to_tokens(tokens);
|
||||
@ -662,7 +663,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TypePtr {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.star_token.to_tokens(tokens);
|
||||
match self.mutability {
|
||||
Some(ref tok) => tok.to_tokens(tokens),
|
||||
@ -675,7 +676,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TypeReference {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.and_token.to_tokens(tokens);
|
||||
self.lifetime.to_tokens(tokens);
|
||||
self.mutability.to_tokens(tokens);
|
||||
@ -684,7 +685,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TypeBareFn {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.lifetimes.to_tokens(tokens);
|
||||
self.unsafety.to_tokens(tokens);
|
||||
self.abi.to_tokens(tokens);
|
||||
@ -704,13 +705,13 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TypeNever {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.bang_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TypeTuple {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.elems.to_tokens(tokens);
|
||||
});
|
||||
@ -718,27 +719,27 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TypePath {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
PathTokens(&self.qself, &self.path).to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TypeTraitObject {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.dyn_token.to_tokens(tokens);
|
||||
self.bounds.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TypeImplTrait {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.impl_token.to_tokens(tokens);
|
||||
self.bounds.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TypeGroup {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.group_token.surround(tokens, |tokens| {
|
||||
self.elem.to_tokens(tokens);
|
||||
});
|
||||
@ -746,7 +747,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TypeParen {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.paren_token.surround(tokens, |tokens| {
|
||||
self.elem.to_tokens(tokens);
|
||||
});
|
||||
@ -754,25 +755,25 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for TypeInfer {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.underscore_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TypeMacro {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.mac.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TypeVerbatim {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.tts.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ReturnType {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
ReturnType::Default => {}
|
||||
ReturnType::Type(ref arrow, ref ty) => {
|
||||
@ -784,7 +785,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for BareFnArg {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
if let Some((ref name, ref colon)) = self.name {
|
||||
name.to_tokens(tokens);
|
||||
colon.to_tokens(tokens);
|
||||
@ -794,7 +795,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for BareFnArgName {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match *self {
|
||||
BareFnArgName::Named(ref t) => t.to_tokens(tokens),
|
||||
BareFnArgName::Wild(ref t) => t.to_tokens(tokens),
|
||||
@ -803,7 +804,7 @@ mod printing {
|
||||
}
|
||||
|
||||
impl ToTokens for Abi {
|
||||
fn to_tokens(&self, tokens: &mut Tokens) {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.extern_token.to_tokens(tokens);
|
||||
self.name.to_tokens(tokens);
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user