mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-23 21:01:08 +00:00
Bug 1716518 - Upgrade paste and paste-impl to v0.1.18. r=emilio
Differential Revision: https://phabricator.services.mozilla.com/D117827
This commit is contained in:
parent
c1c7f733c5
commit
e651cd961d
11
Cargo.lock
generated
11
Cargo.lock
generated
@ -3696,9 +3696,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "0.1.12"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a229b1c58c692edcaa5b9b0948084f130f55d2dcc15b02fcc5340b2b4521476"
|
||||
checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880"
|
||||
dependencies = [
|
||||
"paste-impl",
|
||||
"proc-macro-hack",
|
||||
@ -3706,14 +3706,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "paste-impl"
|
||||
version = "0.1.12"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e0bf239e447e67ff6d16a8bb5e4d4bd2343acf5066061c0e8e06ac5ba8ca68c"
|
||||
checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6"
|
||||
dependencies = [
|
||||
"proc-macro-hack",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1 +1 @@
|
||||
{"files":{"Cargo.toml":"a38a5cd4f98c83b4e3da74ea12f2e9ec9839c4b344f4b13be5d13280bf38b928","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7f697f191d6ffb32881a6bcf535e817ebbebaaa11b9aacf4f0b26fd32e2cc201","src/enum_hack.rs":"c01854a6921de3bb55670d0f5a359fed3d68b36db837b858cc3ed1553fc13a74","src/lib.rs":"ed045c1ac2a53e733bb6065805707d6d327edf30345f4a9a8bd5db8d7041e2ca"},"package":"2e0bf239e447e67ff6d16a8bb5e4d4bd2343acf5066061c0e8e06ac5ba8ca68c"}
|
||||
{"files":{"Cargo.toml":"81f9fbb93a68bab36249f394bce4d7efaa551eb3d1697dadc02fd5ede96b31cc","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7f697f191d6ffb32881a6bcf535e817ebbebaaa11b9aacf4f0b26fd32e2cc201","src/enum_hack.rs":"649ec2dd8b44607ca00daba8b0e3a054b6e8f3d1cb55bacb88b03a5570311e86","src/error.rs":"be08d9d48b4cb9984b5141b12f21c31bd7293942c18b3ce25754723930cf54f5","src/lib.rs":"474883fa7898ace1f4c8d08a250777093656296ceb47924986ba4f62c4761fb7"},"package":"d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6"}
|
11
third_party/rust/paste-impl/Cargo.toml
vendored
11
third_party/rust/paste-impl/Cargo.toml
vendored
@ -13,7 +13,7 @@
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "paste-impl"
|
||||
version = "0.1.12"
|
||||
version = "0.1.18"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
description = "Implementation detail of the `paste` crate"
|
||||
license = "MIT OR Apache-2.0"
|
||||
@ -25,12 +25,3 @@ targets = ["x86_64-unknown-linux-gnu"]
|
||||
proc-macro = true
|
||||
[dependencies.proc-macro-hack]
|
||||
version = "0.5"
|
||||
|
||||
[dependencies.proc-macro2]
|
||||
version = "1.0"
|
||||
|
||||
[dependencies.quote]
|
||||
version = "1.0"
|
||||
|
||||
[dependencies.syn]
|
||||
version = "1.0"
|
||||
|
123
third_party/rust/paste-impl/src/enum_hack.rs
vendored
123
third_party/rust/paste-impl/src/enum_hack.rs
vendored
@ -1,10 +1,7 @@
|
||||
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
use proc_macro2::{Ident, Span, TokenStream, TokenTree};
|
||||
use quote::quote;
|
||||
use syn::parse::{Parse, ParseStream, Result};
|
||||
use syn::{braced, parenthesized, parse_macro_input, Token};
|
||||
use std::iter::FromIterator;
|
||||
|
||||
pub fn wrap(output: TokenStream) -> TokenStream {
|
||||
let mut hasher = DefaultHasher::default();
|
||||
@ -12,50 +9,78 @@ pub fn wrap(output: TokenStream) -> TokenStream {
|
||||
let mangled_name = format!("_paste_{}", hasher.finish());
|
||||
let ident = Ident::new(&mangled_name, Span::call_site());
|
||||
|
||||
quote! {
|
||||
#[derive(paste::EnumHack)]
|
||||
enum #ident {
|
||||
Value = (stringify! {
|
||||
#output
|
||||
}, 0).1,
|
||||
}
|
||||
}
|
||||
// #[derive(paste::EnumHack)]
|
||||
// enum #ident {
|
||||
// Value = (stringify! {
|
||||
// #output
|
||||
// }, 0).1,
|
||||
// }
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Punct(Punct::new('#', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Bracket,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("derive", Span::call_site())),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Parenthesis,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("paste", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Joint)),
|
||||
TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
TokenTree::Ident(Ident::new("EnumHack", Span::call_site())),
|
||||
]),
|
||||
)),
|
||||
]),
|
||||
)),
|
||||
TokenTree::Ident(Ident::new("enum", Span::call_site())),
|
||||
TokenTree::Ident(ident),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Brace,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("Value", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new('=', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(
|
||||
Delimiter::Parenthesis,
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("stringify", Span::call_site())),
|
||||
TokenTree::Punct(Punct::new('!', Spacing::Alone)),
|
||||
TokenTree::Group(Group::new(Delimiter::Brace, output)),
|
||||
TokenTree::Punct(Punct::new(',', Spacing::Alone)),
|
||||
TokenTree::Literal(Literal::usize_unsuffixed(0)),
|
||||
]),
|
||||
)),
|
||||
TokenTree::Punct(Punct::new('.', Spacing::Alone)),
|
||||
TokenTree::Literal(Literal::usize_unsuffixed(1)),
|
||||
TokenTree::Punct(Punct::new(',', Spacing::Alone)),
|
||||
]),
|
||||
)),
|
||||
])
|
||||
}
|
||||
|
||||
struct EnumHack {
|
||||
token_stream: TokenStream,
|
||||
}
|
||||
|
||||
impl Parse for EnumHack {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
input.parse::<Token![enum]>()?;
|
||||
input.parse::<Ident>()?;
|
||||
|
||||
let braces;
|
||||
braced!(braces in input);
|
||||
braces.parse::<Ident>()?;
|
||||
braces.parse::<Token![=]>()?;
|
||||
|
||||
let parens;
|
||||
parenthesized!(parens in braces);
|
||||
parens.parse::<Ident>()?;
|
||||
parens.parse::<Token![!]>()?;
|
||||
|
||||
let inner;
|
||||
braced!(inner in parens);
|
||||
let token_stream: TokenStream = inner.parse()?;
|
||||
|
||||
parens.parse::<Token![,]>()?;
|
||||
parens.parse::<TokenTree>()?;
|
||||
braces.parse::<Token![.]>()?;
|
||||
braces.parse::<TokenTree>()?;
|
||||
braces.parse::<Token![,]>()?;
|
||||
|
||||
Ok(EnumHack { token_stream })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let inner = parse_macro_input!(input as EnumHack);
|
||||
proc_macro::TokenStream::from(inner.token_stream)
|
||||
pub fn extract(input: TokenStream) -> TokenStream {
|
||||
let mut tokens = input.into_iter();
|
||||
let _ = tokens.next().expect("enum");
|
||||
let _ = tokens.next().expect("#ident");
|
||||
let mut braces = match tokens.next().expect("{...}") {
|
||||
TokenTree::Group(group) => group.stream().into_iter(),
|
||||
_ => unreachable!("{...}"),
|
||||
};
|
||||
let _ = braces.next().expect("Value");
|
||||
let _ = braces.next().expect("=");
|
||||
let mut parens = match braces.next().expect("(...)") {
|
||||
TokenTree::Group(group) => group.stream().into_iter(),
|
||||
_ => unreachable!("(...)"),
|
||||
};
|
||||
let _ = parens.next().expect("stringify");
|
||||
let _ = parens.next().expect("!");
|
||||
let token_stream = match parens.next().expect("{...}") {
|
||||
TokenTree::Group(group) => group.stream(),
|
||||
_ => unreachable!("{...}"),
|
||||
};
|
||||
let _ = parens.next().expect(",");
|
||||
let _ = parens.next().expect("0");
|
||||
let _ = braces.next().expect(".");
|
||||
let _ = braces.next().expect("1");
|
||||
let _ = braces.next().expect(",");
|
||||
token_stream
|
||||
}
|
||||
|
47
third_party/rust/paste-impl/src/error.rs
vendored
Normal file
47
third_party/rust/paste-impl/src/error.rs
vendored
Normal file
@ -0,0 +1,47 @@
|
||||
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use std::iter::FromIterator;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
pub struct Error {
|
||||
begin: Span,
|
||||
end: Span,
|
||||
msg: String,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
pub fn new(span: Span, msg: &str) -> Self {
|
||||
Self::new2(span, span, msg)
|
||||
}
|
||||
|
||||
pub fn new2(begin: Span, end: Span, msg: &str) -> Self {
|
||||
Error {
|
||||
begin,
|
||||
end,
|
||||
msg: msg.to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_compile_error(&self) -> TokenStream {
|
||||
// compile_error! { $msg }
|
||||
TokenStream::from_iter(vec![
|
||||
TokenTree::Ident(Ident::new("compile_error", self.begin)),
|
||||
TokenTree::Punct({
|
||||
let mut punct = Punct::new('!', Spacing::Alone);
|
||||
punct.set_span(self.begin);
|
||||
punct
|
||||
}),
|
||||
TokenTree::Group({
|
||||
let mut group = Group::new(Delimiter::Brace, {
|
||||
TokenStream::from_iter(vec![TokenTree::Literal({
|
||||
let mut string = Literal::string(&self.msg);
|
||||
string.set_span(self.end);
|
||||
string
|
||||
})])
|
||||
});
|
||||
group.set_span(self.end);
|
||||
group
|
||||
}),
|
||||
])
|
||||
}
|
||||
}
|
427
third_party/rust/paste-impl/src/lib.rs
vendored
427
third_party/rust/paste-impl/src/lib.rs
vendored
@ -1,79 +1,124 @@
|
||||
extern crate proc_macro;
|
||||
|
||||
mod enum_hack;
|
||||
mod error;
|
||||
|
||||
use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
|
||||
use crate::error::{Error, Result};
|
||||
use proc_macro::{
|
||||
token_stream, Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree,
|
||||
};
|
||||
use proc_macro_hack::proc_macro_hack;
|
||||
use quote::{quote, ToTokens};
|
||||
use std::iter::FromIterator;
|
||||
use syn::parse::{Error, Parse, ParseStream, Parser, Result};
|
||||
use syn::{parenthesized, parse_macro_input, Lit, LitStr, Token};
|
||||
use std::iter::{self, FromIterator, Peekable};
|
||||
use std::panic;
|
||||
|
||||
#[proc_macro]
|
||||
pub fn item(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input = parse_macro_input!(input as PasteInput);
|
||||
proc_macro::TokenStream::from(input.expanded)
|
||||
pub fn item(input: TokenStream) -> TokenStream {
|
||||
expand_paste(input)
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn item_with_macros(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input = parse_macro_input!(input as PasteInput);
|
||||
proc_macro::TokenStream::from(enum_hack::wrap(input.expanded))
|
||||
pub fn item_with_macros(input: TokenStream) -> TokenStream {
|
||||
enum_hack::wrap(expand_paste(input))
|
||||
}
|
||||
|
||||
#[proc_macro_hack]
|
||||
pub fn expr(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input = parse_macro_input!(input as PasteInput);
|
||||
let output = input.expanded;
|
||||
proc_macro::TokenStream::from(quote!({ #output }))
|
||||
pub fn expr(input: TokenStream) -> TokenStream {
|
||||
TokenStream::from(TokenTree::Group(Group::new(
|
||||
Delimiter::Brace,
|
||||
expand_paste(input),
|
||||
)))
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[proc_macro_derive(EnumHack)]
|
||||
pub fn enum_hack(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
pub fn enum_hack(input: TokenStream) -> TokenStream {
|
||||
enum_hack::extract(input)
|
||||
}
|
||||
|
||||
struct PasteInput {
|
||||
expanded: TokenStream,
|
||||
}
|
||||
|
||||
impl Parse for PasteInput {
|
||||
fn parse(input: ParseStream) -> Result<Self> {
|
||||
let mut expanded = TokenStream::new();
|
||||
while !input.is_empty() {
|
||||
match input.parse()? {
|
||||
TokenTree::Group(group) => {
|
||||
let delimiter = group.delimiter();
|
||||
let content = group.stream();
|
||||
let span = group.span();
|
||||
if delimiter == Delimiter::Bracket && is_paste_operation(&content) {
|
||||
let segments = parse_bracket_as_segments.parse2(content)?;
|
||||
let pasted = paste_segments(span, &segments)?;
|
||||
pasted.to_tokens(&mut expanded);
|
||||
} else if is_none_delimited_single_ident_or_lifetime(delimiter, &content) {
|
||||
content.to_tokens(&mut expanded);
|
||||
} else {
|
||||
let nested = PasteInput::parse.parse2(content)?;
|
||||
let mut group = Group::new(delimiter, nested.expanded);
|
||||
group.set_span(span);
|
||||
group.to_tokens(&mut expanded);
|
||||
}
|
||||
}
|
||||
other => other.to_tokens(&mut expanded),
|
||||
}
|
||||
}
|
||||
Ok(PasteInput { expanded })
|
||||
fn expand_paste(input: TokenStream) -> TokenStream {
|
||||
let mut contains_paste = false;
|
||||
match expand(input, &mut contains_paste) {
|
||||
Ok(expanded) => expanded,
|
||||
Err(err) => err.to_compile_error(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_paste_operation(input: &TokenStream) -> bool {
|
||||
let input = input.clone();
|
||||
parse_bracket_as_segments.parse2(input).is_ok()
|
||||
fn expand(input: TokenStream, contains_paste: &mut bool) -> Result<TokenStream> {
|
||||
let mut expanded = TokenStream::new();
|
||||
let (mut prev_colon, mut colon) = (false, false);
|
||||
let mut prev_none_group = None::<Group>;
|
||||
let mut tokens = input.into_iter().peekable();
|
||||
loop {
|
||||
let token = tokens.next();
|
||||
if let Some(group) = prev_none_group.take() {
|
||||
if match (&token, tokens.peek()) {
|
||||
(Some(TokenTree::Punct(fst)), Some(TokenTree::Punct(snd))) => {
|
||||
fst.as_char() == ':' && snd.as_char() == ':' && fst.spacing() == Spacing::Joint
|
||||
}
|
||||
_ => false,
|
||||
} {
|
||||
expanded.extend(group.stream());
|
||||
*contains_paste = true;
|
||||
} else {
|
||||
expanded.extend(iter::once(TokenTree::Group(group)));
|
||||
}
|
||||
}
|
||||
match token {
|
||||
Some(TokenTree::Group(group)) => {
|
||||
let delimiter = group.delimiter();
|
||||
let content = group.stream();
|
||||
let span = group.span();
|
||||
if delimiter == Delimiter::Bracket && is_paste_operation(&content) {
|
||||
let segments = parse_bracket_as_segments(content, span)?;
|
||||
let pasted = paste_segments(span, &segments)?;
|
||||
expanded.extend(pasted);
|
||||
*contains_paste = true;
|
||||
} else if is_none_delimited_flat_group(delimiter, &content) {
|
||||
expanded.extend(content);
|
||||
*contains_paste = true;
|
||||
} else {
|
||||
let mut group_contains_paste = false;
|
||||
let nested = expand(content, &mut group_contains_paste)?;
|
||||
let group = if group_contains_paste {
|
||||
let mut group = Group::new(delimiter, nested);
|
||||
group.set_span(span);
|
||||
*contains_paste = true;
|
||||
group
|
||||
} else {
|
||||
group.clone()
|
||||
};
|
||||
if delimiter != Delimiter::None {
|
||||
expanded.extend(iter::once(TokenTree::Group(group)));
|
||||
} else if prev_colon {
|
||||
expanded.extend(group.stream());
|
||||
*contains_paste = true;
|
||||
} else {
|
||||
prev_none_group = Some(group);
|
||||
}
|
||||
}
|
||||
prev_colon = false;
|
||||
colon = false;
|
||||
}
|
||||
Some(other) => {
|
||||
match &other {
|
||||
TokenTree::Punct(punct) if punct.as_char() == ':' => {
|
||||
prev_colon = colon;
|
||||
colon = punct.spacing() == Spacing::Joint;
|
||||
}
|
||||
_ => {
|
||||
prev_colon = false;
|
||||
colon = false;
|
||||
}
|
||||
}
|
||||
expanded.extend(iter::once(other));
|
||||
}
|
||||
None => return Ok(expanded),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// https://github.com/dtolnay/paste/issues/26
|
||||
fn is_none_delimited_single_ident_or_lifetime(delimiter: Delimiter, input: &TokenStream) -> bool {
|
||||
fn is_none_delimited_flat_group(delimiter: Delimiter, input: &TokenStream) -> bool {
|
||||
if delimiter != Delimiter::None {
|
||||
return false;
|
||||
}
|
||||
@ -82,77 +127,205 @@ fn is_none_delimited_single_ident_or_lifetime(delimiter: Delimiter, input: &Toke
|
||||
enum State {
|
||||
Init,
|
||||
Ident,
|
||||
Literal,
|
||||
Apostrophe,
|
||||
Lifetime,
|
||||
Colon1,
|
||||
Colon2,
|
||||
}
|
||||
|
||||
let mut state = State::Init;
|
||||
for tt in input.clone() {
|
||||
state = match (state, &tt) {
|
||||
(State::Init, TokenTree::Ident(_)) => State::Ident,
|
||||
(State::Init, TokenTree::Literal(_)) => State::Literal,
|
||||
(State::Init, TokenTree::Punct(punct)) if punct.as_char() == '\'' => State::Apostrophe,
|
||||
(State::Apostrophe, TokenTree::Ident(_)) => State::Lifetime,
|
||||
(State::Ident, TokenTree::Punct(punct))
|
||||
if punct.as_char() == ':' && punct.spacing() == Spacing::Joint =>
|
||||
{
|
||||
State::Colon1
|
||||
}
|
||||
(State::Colon1, TokenTree::Punct(punct))
|
||||
if punct.as_char() == ':' && punct.spacing() == Spacing::Alone =>
|
||||
{
|
||||
State::Colon2
|
||||
}
|
||||
(State::Colon2, TokenTree::Ident(_)) => State::Ident,
|
||||
_ => return false,
|
||||
};
|
||||
}
|
||||
state == State::Ident || state == State::Lifetime
|
||||
|
||||
state == State::Ident || state == State::Literal || state == State::Lifetime
|
||||
}
|
||||
|
||||
struct LitStr {
|
||||
value: String,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
struct Colon {
|
||||
span: Span,
|
||||
}
|
||||
|
||||
enum Segment {
|
||||
String(String),
|
||||
Apostrophe(Span),
|
||||
Env(LitStr),
|
||||
Modifier(Token![:], Ident),
|
||||
Modifier(Colon, Ident),
|
||||
}
|
||||
|
||||
fn parse_bracket_as_segments(input: ParseStream) -> Result<Vec<Segment>> {
|
||||
input.parse::<Token![<]>()?;
|
||||
fn is_paste_operation(input: &TokenStream) -> bool {
|
||||
let mut tokens = input.clone().into_iter();
|
||||
|
||||
let segments = parse_segments(input)?;
|
||||
|
||||
input.parse::<Token![>]>()?;
|
||||
if !input.is_empty() {
|
||||
return Err(input.error("invalid input"));
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
|
||||
_ => return false,
|
||||
}
|
||||
|
||||
let mut has_token = false;
|
||||
loop {
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {
|
||||
return has_token && tokens.next().is_none();
|
||||
}
|
||||
Some(_) => has_token = true,
|
||||
None => return false,
|
||||
}
|
||||
}
|
||||
Ok(segments)
|
||||
}
|
||||
|
||||
fn parse_segments(input: ParseStream) -> Result<Vec<Segment>> {
|
||||
fn parse_bracket_as_segments(input: TokenStream, scope: Span) -> Result<Vec<Segment>> {
|
||||
let mut tokens = input.into_iter().peekable();
|
||||
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
|
||||
Some(wrong) => return Err(Error::new(wrong.span(), "expected `<`")),
|
||||
None => return Err(Error::new(scope, "expected `[< ... >]`")),
|
||||
}
|
||||
|
||||
let segments = parse_segments(&mut tokens, scope)?;
|
||||
|
||||
match &tokens.next() {
|
||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {}
|
||||
Some(wrong) => return Err(Error::new(wrong.span(), "expected `>`")),
|
||||
None => return Err(Error::new(scope, "expected `[< ... >]`")),
|
||||
}
|
||||
|
||||
match tokens.next() {
|
||||
Some(unexpected) => Err(Error::new(
|
||||
unexpected.span(),
|
||||
"unexpected input, expected `[< ... >]`",
|
||||
)),
|
||||
None => Ok(segments),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_segments(
|
||||
tokens: &mut Peekable<token_stream::IntoIter>,
|
||||
scope: Span,
|
||||
) -> Result<Vec<Segment>> {
|
||||
let mut segments = Vec::new();
|
||||
while !(input.is_empty() || input.peek(Token![>])) {
|
||||
match input.parse()? {
|
||||
while match tokens.peek() {
|
||||
None => false,
|
||||
Some(TokenTree::Punct(punct)) => punct.as_char() != '>',
|
||||
Some(_) => true,
|
||||
} {
|
||||
match tokens.next().unwrap() {
|
||||
TokenTree::Ident(ident) => {
|
||||
let mut fragment = ident.to_string();
|
||||
if fragment.starts_with("r#") {
|
||||
fragment = fragment.split_off(2);
|
||||
}
|
||||
if fragment == "env" && input.peek(Token![!]) {
|
||||
input.parse::<Token![!]>()?;
|
||||
let arg;
|
||||
parenthesized!(arg in input);
|
||||
let var: LitStr = arg.parse()?;
|
||||
segments.push(Segment::Env(var));
|
||||
if fragment == "env"
|
||||
&& match tokens.peek() {
|
||||
Some(TokenTree::Punct(punct)) => punct.as_char() == '!',
|
||||
_ => false,
|
||||
}
|
||||
{
|
||||
tokens.next().unwrap(); // `!`
|
||||
let expect_group = tokens.next();
|
||||
let parenthesized = match &expect_group {
|
||||
Some(TokenTree::Group(group))
|
||||
if group.delimiter() == Delimiter::Parenthesis =>
|
||||
{
|
||||
group
|
||||
}
|
||||
Some(wrong) => return Err(Error::new(wrong.span(), "expected `(`")),
|
||||
None => return Err(Error::new(scope, "expected `(` after `env!`")),
|
||||
};
|
||||
let mut inner = parenthesized.stream().into_iter();
|
||||
let lit = match inner.next() {
|
||||
Some(TokenTree::Literal(lit)) => lit,
|
||||
Some(wrong) => {
|
||||
return Err(Error::new(wrong.span(), "expected string literal"))
|
||||
}
|
||||
None => {
|
||||
return Err(Error::new2(
|
||||
ident.span(),
|
||||
parenthesized.span(),
|
||||
"expected string literal as argument to env! macro",
|
||||
))
|
||||
}
|
||||
};
|
||||
let lit_string = lit.to_string();
|
||||
if lit_string.starts_with('"')
|
||||
&& lit_string.ends_with('"')
|
||||
&& lit_string.len() >= 2
|
||||
{
|
||||
// TODO: maybe handle escape sequences in the string if
|
||||
// someone has a use case.
|
||||
segments.push(Segment::Env(LitStr {
|
||||
value: lit_string[1..lit_string.len() - 1].to_owned(),
|
||||
span: lit.span(),
|
||||
}));
|
||||
} else {
|
||||
return Err(Error::new(lit.span(), "expected string literal"));
|
||||
}
|
||||
if let Some(unexpected) = inner.next() {
|
||||
return Err(Error::new(
|
||||
unexpected.span(),
|
||||
"unexpected token in env! macro",
|
||||
));
|
||||
}
|
||||
} else {
|
||||
segments.push(Segment::String(fragment));
|
||||
}
|
||||
}
|
||||
TokenTree::Literal(lit) => {
|
||||
let value = match syn::parse_str(&lit.to_string())? {
|
||||
Lit::Str(string) => string.value().replace('-', "_"),
|
||||
Lit::Int(_) => lit.to_string(),
|
||||
_ => return Err(Error::new(lit.span(), "unsupported literal")),
|
||||
};
|
||||
segments.push(Segment::String(value));
|
||||
let mut lit_string = lit.to_string();
|
||||
if lit_string.contains(&['#', '\\', '.', '+'][..]) {
|
||||
return Err(Error::new(lit.span(), "unsupported literal"));
|
||||
}
|
||||
lit_string = lit_string
|
||||
.replace('"', "")
|
||||
.replace('\'', "")
|
||||
.replace('-', "_");
|
||||
segments.push(Segment::String(lit_string));
|
||||
}
|
||||
TokenTree::Punct(punct) => match punct.as_char() {
|
||||
'_' => segments.push(Segment::String("_".to_string())),
|
||||
'_' => segments.push(Segment::String("_".to_owned())),
|
||||
'\'' => segments.push(Segment::Apostrophe(punct.span())),
|
||||
':' => segments.push(Segment::Modifier(Token![:](punct.span()), input.parse()?)),
|
||||
':' => {
|
||||
let colon = Colon { span: punct.span() };
|
||||
let ident = match tokens.next() {
|
||||
Some(TokenTree::Ident(ident)) => ident,
|
||||
wrong => {
|
||||
let span = wrong.as_ref().map_or(scope, TokenTree::span);
|
||||
return Err(Error::new(span, "expected identifier after `:`"));
|
||||
}
|
||||
};
|
||||
segments.push(Segment::Modifier(colon, ident));
|
||||
}
|
||||
_ => return Err(Error::new(punct.span(), "unexpected punct")),
|
||||
},
|
||||
TokenTree::Group(group) => {
|
||||
if group.delimiter() == Delimiter::None {
|
||||
let nested = parse_segments.parse2(group.stream())?;
|
||||
let mut inner = group.stream().into_iter().peekable();
|
||||
let nested = parse_segments(&mut inner, group.span())?;
|
||||
if let Some(unexpected) = inner.next() {
|
||||
return Err(Error::new(unexpected.span(), "unexpected token"));
|
||||
}
|
||||
segments.extend(nested);
|
||||
} else {
|
||||
return Err(Error::new(group.span(), "unexpected token"));
|
||||
@ -179,65 +352,87 @@ fn paste_segments(span: Span, segments: &[Segment]) -> Result<TokenStream> {
|
||||
is_lifetime = true;
|
||||
}
|
||||
Segment::Env(var) => {
|
||||
let resolved = match std::env::var(var.value()) {
|
||||
let resolved = match std::env::var(&var.value) {
|
||||
Ok(resolved) => resolved,
|
||||
Err(_) => {
|
||||
return Err(Error::new(var.span(), "no such env var"));
|
||||
return Err(Error::new(
|
||||
var.span,
|
||||
&format!("no such env var: {:?}", var.value),
|
||||
));
|
||||
}
|
||||
};
|
||||
let resolved = resolved.replace('-', "_");
|
||||
evaluated.push(resolved);
|
||||
}
|
||||
Segment::Modifier(colon, ident) => {
|
||||
let span = quote!(#colon #ident);
|
||||
let last = match evaluated.pop() {
|
||||
Some(last) => last,
|
||||
None => return Err(Error::new_spanned(span, "unexpected modifier")),
|
||||
None => {
|
||||
return Err(Error::new2(colon.span, ident.span(), "unexpected modifier"))
|
||||
}
|
||||
};
|
||||
if ident == "lower" {
|
||||
evaluated.push(last.to_lowercase());
|
||||
} else if ident == "upper" {
|
||||
evaluated.push(last.to_uppercase());
|
||||
} else if ident == "snake" {
|
||||
let mut acc = String::new();
|
||||
let mut prev = '_';
|
||||
for ch in last.chars() {
|
||||
if ch.is_uppercase() && prev != '_' {
|
||||
acc.push('_');
|
||||
}
|
||||
acc.push(ch);
|
||||
prev = ch;
|
||||
match ident.to_string().as_str() {
|
||||
"lower" => {
|
||||
evaluated.push(last.to_lowercase());
|
||||
}
|
||||
evaluated.push(acc.to_lowercase());
|
||||
} else if ident == "camel" {
|
||||
let mut acc = String::new();
|
||||
let mut prev = '_';
|
||||
for ch in last.chars() {
|
||||
if ch != '_' {
|
||||
if prev == '_' {
|
||||
for chu in ch.to_uppercase() {
|
||||
acc.push(chu);
|
||||
}
|
||||
} else if prev.is_uppercase() {
|
||||
for chl in ch.to_lowercase() {
|
||||
acc.push(chl);
|
||||
}
|
||||
} else {
|
||||
acc.push(ch);
|
||||
"upper" => {
|
||||
evaluated.push(last.to_uppercase());
|
||||
}
|
||||
"snake" => {
|
||||
let mut acc = String::new();
|
||||
let mut prev = '_';
|
||||
for ch in last.chars() {
|
||||
if ch.is_uppercase() && prev != '_' {
|
||||
acc.push('_');
|
||||
}
|
||||
acc.push(ch);
|
||||
prev = ch;
|
||||
}
|
||||
prev = ch;
|
||||
evaluated.push(acc.to_lowercase());
|
||||
}
|
||||
"camel" => {
|
||||
let mut acc = String::new();
|
||||
let mut prev = '_';
|
||||
for ch in last.chars() {
|
||||
if ch != '_' {
|
||||
if prev == '_' {
|
||||
for chu in ch.to_uppercase() {
|
||||
acc.push(chu);
|
||||
}
|
||||
} else if prev.is_uppercase() {
|
||||
for chl in ch.to_lowercase() {
|
||||
acc.push(chl);
|
||||
}
|
||||
} else {
|
||||
acc.push(ch);
|
||||
}
|
||||
}
|
||||
prev = ch;
|
||||
}
|
||||
evaluated.push(acc);
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::new2(
|
||||
colon.span,
|
||||
ident.span(),
|
||||
"unsupported modifier",
|
||||
));
|
||||
}
|
||||
evaluated.push(acc);
|
||||
} else {
|
||||
return Err(Error::new_spanned(span, "unsupported modifier"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let pasted = evaluated.into_iter().collect::<String>();
|
||||
let ident = TokenTree::Ident(Ident::new(&pasted, span));
|
||||
let ident = match panic::catch_unwind(|| Ident::new(&pasted, span)) {
|
||||
Ok(ident) => TokenTree::Ident(ident),
|
||||
Err(_) => {
|
||||
return Err(Error::new(
|
||||
span,
|
||||
&format!("`{:?}` is not a valid identifier", pasted),
|
||||
));
|
||||
}
|
||||
};
|
||||
let tokens = if is_lifetime {
|
||||
let apostrophe = TokenTree::Punct(Punct::new('\'', Spacing::Joint));
|
||||
vec![apostrophe, ident]
|
||||
|
2
third_party/rust/paste/.cargo-checksum.json
vendored
2
third_party/rust/paste/.cargo-checksum.json
vendored
@ -1 +1 @@
|
||||
{"files":{"Cargo.toml":"477260b435d48cb99849ea0459048d6b11107a50fdc3ba65e36687aff6b343e3","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7f697f191d6ffb32881a6bcf535e817ebbebaaa11b9aacf4f0b26fd32e2cc201","README.md":"5c26782fd4a0c9f5a2de81d692c1069c0ad846a4a00d75fe1f0b5f8f197e0041","src/lib.rs":"1863f79ae0c878576689f41262dbdda46c1340c4dba8fe0a1c44397648e9cbf1","tests/compiletest.rs":"0a52a44786aea1c299c695bf948b2ed2081e4cc344e5c2cadceab4eb03d0010d","tests/test.rs":"0470e71f6a8b55c75ec2a736e787705db0d3ee9f24505fa2661521de4265d0c4","tests/ui/case-warning.rs":"905a6721d6ca7820cb37acfa9d684ba0e6c6ef043e75d494b697d0b97a8894d0","tests/ui/case-warning.stderr":"c86e816d08af760a01840b1de831c57751469ed6cb1fe1e51af527cee2359dac","tests/ui/no-env-var.rs":"3b93779a3889236256520addafa3210fa95115b4a37b43890e726c0e4c6b41c4","tests/ui/no-env-var.stderr":"99283b6275c2572129109327d647bbcc789e2321ac1267be61d3f856c64c4108","tests/ui/unexpected-modifier.rs":"61c2a233cd947fdf04c44472608fbf04d797bfa3dac8d66ccdb91b01c7a27ca0","tests/ui/unexpected-modifier.stderr":"33b47ba0cfea8e93cd9e30456cc522e35d4ef7c64d5a46eb943e46ab20dc037e","tests/ui/unsupported-modifier.rs":"5b7159f9467ffaa790c82c0126a99e770a0637eadc4c0f9b5cdf43d0034fdc9e","tests/ui/unsupported-modifier.stderr":"5020c31d20e558846b82036dd940e09d92e40e09b5431f4c8450bbc6a3ba5114"},"package":"0a229b1c58c692edcaa5b9b0948084f130f55d2dcc15b02fcc5340b2b4521476"}
|
||||
{"files":{"Cargo.toml":"064af5acccc5515046c87e41197abcf454e90e6c7ff50f39f5340f292eb95e9b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"7f697f191d6ffb32881a6bcf535e817ebbebaaa11b9aacf4f0b26fd32e2cc201","README.md":"335f564b68e242d62b67840255d3fdad348fd51d5bb101b60af0092fc5ff99d8","src/lib.rs":"70031c2c904303aa950aeddd83a86f463eec273ea27317846e9adf716af2a08b","tests/compiletest.rs":"0a52a44786aea1c299c695bf948b2ed2081e4cc344e5c2cadceab4eb03d0010d","tests/test.rs":"1494b8209233e06c649df28312b8ee5e7c5b8021a9098132748f1500ee3f33c0","tests/ui/case-warning.rs":"905a6721d6ca7820cb37acfa9d684ba0e6c6ef043e75d494b697d0b97a8894d0","tests/ui/case-warning.stderr":"c86e816d08af760a01840b1de831c57751469ed6cb1fe1e51af527cee2359dac","tests/ui/env-empty.rs":"048eef1cc976d8614395859dc73dfccb1da58c534f9186c0333c8e8416488a1b","tests/ui/env-empty.stderr":"5b49cf08982fbd405873ba24ba1c43be789aeef9f8cfcc0c1aa7d580ebdb8655","tests/ui/env-non-string.rs":"cae61651d8396a68b08e46b5b0f5cc0775dabcb5d1b0adfd2755b621574b7f5e","tests/ui/env-non-string.stderr":"9842a3e9726d2fd2e108f12965011dd236b4cb51c2419f7bb8d9fc3fa99ea8b9","tests/ui/env-suffix.rs":"60a8fbcd2158c87ecdbbbe12b7d5ccd5513a36ad89639b669af06b39a099a10e","tests/ui/env-suffix.stderr":"eb5dac88beee8e04ec3d1d84acde701633dd232a331171b4fb6062f018e15197","tests/ui/env-unexpected.rs":"3f9383ffed8de76c4b51702926e32d477d6ffddfc47353f1ec4fb5d6c33cab69","tests/ui/env-unexpected.stderr":"702beeed06dbf977ac8772e8503c7e64f7810172dc4a802a3091b3048b266831","tests/ui/invalid-ident.rs":"04d7d4dceb69c4863f6c83af0bc55cb530d59fff26bea2757bc1b3461953f22f","tests/ui/invalid-ident.stderr":"df4fad1ac86261ff24eed29bb3a377f6cb96987c7acd470514cea212e96d703a","tests/ui/missing-paren-on-env.rs":"349699dd03df912d9dc0a9b26c2982040ff6358819169cfb9595608a8eaff28f","tests/ui/missing-paren-on-env.stderr":"75292b34e5f8508a2880f4031ccfee58938c40ddb7b8d5ffe23e56646ccc0b54","tests/ui/no-env-var.rs":"3b93779a3889236256520addafa3210fa95115b4a37b43890e726c0e4c6b41c4","tests/ui/no-env-var.stderr":"61a27339ad4be930da94d90fdba814745cd4831ecf61e8e468067df73067ce52","tests/ui/no-ident-after-colon.rs":"0d3b2d881cfe2f0c3e6a340be74ca7cf2ad0355dcf808ab0563ab14b5729d61d","tests/ui/no-ident-after-colon.stderr":"e9c6ea76ac796095fff9bdca26a3d22f4bae4481971b4594f58c878aa9ba60aa","tests/ui/unexpected-group.rs":"89e5a33d615ed92fb61b99c666f8579ed83d7039a2784e7c9a2812cc43977244","tests/ui/unexpected-group.stderr":"9c79da4bb8a7755745ddef5dc6b0df136a7138a9f1c64daaa238a6207441f744","tests/ui/unexpected-modifier.rs":"61c2a233cd947fdf04c44472608fbf04d797bfa3dac8d66ccdb91b01c7a27ca0","tests/ui/unexpected-modifier.stderr":"33b47ba0cfea8e93cd9e30456cc522e35d4ef7c64d5a46eb943e46ab20dc037e","tests/ui/unexpected-punct.rs":"c52dc3b112c61e109690a7ed78fc6fcd7ea95413e30143f8f9cd532ccc2b3d24","tests/ui/unexpected-punct.stderr":"b5d2b17e2a62fd6db2bf5962cb95ee6e7628950e889a6fd05ae528ba01e5722f","tests/ui/unsupported-literal.rs":"e74336730fdfc643ef0a2fbc2b32bb3b731cf8b43e963161531a9331020d1b26","tests/ui/unsupported-literal.stderr":"df9198b3cdb188a19bf919d43ee0f1d6946da45b6449103db8f5e5a5c85a1e08","tests/ui/unsupported-modifier.rs":"5b7159f9467ffaa790c82c0126a99e770a0637eadc4c0f9b5cdf43d0034fdc9e","tests/ui/unsupported-modifier.stderr":"5020c31d20e558846b82036dd940e09d92e40e09b5431f4c8450bbc6a3ba5114"},"package":"45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880"}
|
4
third_party/rust/paste/Cargo.toml
vendored
4
third_party/rust/paste/Cargo.toml
vendored
@ -13,7 +13,7 @@
|
||||
[package]
|
||||
edition = "2018"
|
||||
name = "paste"
|
||||
version = "0.1.12"
|
||||
version = "0.1.18"
|
||||
authors = ["David Tolnay <dtolnay@gmail.com>"]
|
||||
description = "Macros for all your token pasting needs"
|
||||
readme = "README.md"
|
||||
@ -22,7 +22,7 @@ repository = "https://github.com/dtolnay/paste"
|
||||
[package.metadata.docs.rs]
|
||||
targets = ["x86_64-unknown-linux-gnu"]
|
||||
[dependencies.paste-impl]
|
||||
version = "=0.1.12"
|
||||
version = "=0.1.18"
|
||||
|
||||
[dependencies.proc-macro-hack]
|
||||
version = "0.5.9"
|
||||
|
7
third_party/rust/paste/README.md
vendored
7
third_party/rust/paste/README.md
vendored
@ -1,9 +1,10 @@
|
||||
Macros for all your token pasting needs
|
||||
=======================================
|
||||
|
||||
[![Build Status](https://api.travis-ci.org/dtolnay/paste.svg?branch=master)](https://travis-ci.org/dtolnay/paste)
|
||||
[![Latest Version](https://img.shields.io/crates/v/paste.svg)](https://crates.io/crates/paste)
|
||||
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/paste)
|
||||
[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/paste-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/paste)
|
||||
[<img alt="crates.io" src="https://img.shields.io/crates/v/paste.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/paste)
|
||||
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-paste-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/paste)
|
||||
[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/paste/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/paste/actions?query=branch%3Amaster)
|
||||
|
||||
The nightly-only [`concat_idents!`] macro in the Rust standard library is
|
||||
notoriously underpowered in that its concatenated identifiers can only refer to
|
||||
|
8
third_party/rust/paste/src/lib.rs
vendored
8
third_party/rust/paste/src/lib.rs
vendored
@ -1,3 +1,11 @@
|
||||
//! [![github]](https://github.com/dtolnay/paste) [![crates-io]](https://crates.io/crates/paste) [![docs-rs]](https://docs.rs/paste)
|
||||
//!
|
||||
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
|
||||
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
|
||||
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
|
||||
//!
|
||||
//! <br>
|
||||
//!
|
||||
//! The nightly-only [`concat_idents!`] macro in the Rust standard library is
|
||||
//! notoriously underpowered in that its concatenated identifiers can only refer to
|
||||
//! existing items, they can never be used to define something new.
|
||||
|
135
third_party/rust/paste/tests/test.rs
vendored
135
third_party/rust/paste/tests/test.rs
vendored
@ -153,6 +153,14 @@ fn test_local_variable() {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty() {
|
||||
paste::expr! {
|
||||
assert_eq!(stringify!([<y y>]), "yy");
|
||||
assert_eq!(stringify!([<>]).replace(' ', ""), "[<>]");
|
||||
}
|
||||
}
|
||||
|
||||
mod test_none_delimited_single_ident {
|
||||
macro_rules! m {
|
||||
($id:ident) => {
|
||||
@ -305,3 +313,130 @@ fn test_env_to_camel() {
|
||||
let _ = LIBPaste;
|
||||
}
|
||||
}
|
||||
|
||||
mod test_doc_expr {
|
||||
// https://github.com/dtolnay/paste/issues/29
|
||||
|
||||
macro_rules! doc_expr {
|
||||
($doc:expr) => {
|
||||
paste::item! {
|
||||
#[doc = $doc]
|
||||
pub struct S;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
doc_expr!(stringify!());
|
||||
|
||||
#[test]
|
||||
fn test_doc_expr() {
|
||||
let _: S;
|
||||
}
|
||||
}
|
||||
|
||||
mod test_type_in_path {
|
||||
// https://github.com/dtolnay/paste/issues/31
|
||||
|
||||
mod keys {
|
||||
#[derive(Default)]
|
||||
pub struct Mib<T = ()>(std::marker::PhantomData<T>);
|
||||
}
|
||||
|
||||
macro_rules! types {
|
||||
($mib:ty) => {
|
||||
paste::item! {
|
||||
#[derive(Default)]
|
||||
pub struct S(pub keys::$mib);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! write {
|
||||
($fn:ident, $field:ty) => {
|
||||
paste::item! {
|
||||
pub fn $fn() -> $field {
|
||||
$field::default()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
types! {Mib<[usize; 2]>}
|
||||
write! {get_a, keys::Mib}
|
||||
write! {get_b, usize}
|
||||
|
||||
#[test]
|
||||
fn test_type_in_path() {
|
||||
let _: S;
|
||||
let _ = get_a;
|
||||
let _ = get_b;
|
||||
}
|
||||
}
|
||||
|
||||
mod test_type_in_fn_arg {
|
||||
// https://github.com/dtolnay/paste/issues/38
|
||||
|
||||
fn _jit_address(_node: ()) {}
|
||||
|
||||
macro_rules! jit_reexport {
|
||||
($fn:ident, $arg:ident : $typ:ty) => {
|
||||
paste::item! {
|
||||
pub fn $fn($arg: $typ) {
|
||||
[<_jit_ $fn>]($arg);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
jit_reexport!(address, node: ());
|
||||
|
||||
#[test]
|
||||
fn test_type_in_fn_arg() {
|
||||
let _ = address;
|
||||
}
|
||||
}
|
||||
|
||||
mod test_pat_in_expr_position {
|
||||
// https://github.com/xiph/rav1e/pull/2324/files
|
||||
|
||||
macro_rules! rav1e_bad {
|
||||
($e:pat) => {
|
||||
paste::item! {
|
||||
#[test]
|
||||
fn test() {
|
||||
let _ = $e;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
rav1e_bad!(std::fmt::Error);
|
||||
}
|
||||
|
||||
#[cfg(not(no_literal_matcher))]
|
||||
mod test_x86_feature_literal {
|
||||
// work around https://github.com/rust-lang/rust/issues/72726
|
||||
|
||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
|
||||
macro_rules! my_is_x86_feature_detected {
|
||||
($feat:literal) => {
|
||||
paste::item! {
|
||||
#[test]
|
||||
fn test() {
|
||||
let _ = is_x86_feature_detected!($feat);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))]
|
||||
macro_rules! my_is_x86_feature_detected {
|
||||
($feat:literal) => {
|
||||
#[ignore]
|
||||
#[test]
|
||||
fn test() {}
|
||||
};
|
||||
}
|
||||
|
||||
my_is_x86_feature_detected!("mmx");
|
||||
}
|
||||
|
5
third_party/rust/paste/tests/ui/env-empty.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/env-empty.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<env!()>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/env-empty.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/env-empty.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: expected string literal as argument to env! macro
|
||||
--> $DIR/env-empty.rs:2:10
|
||||
|
|
||||
2 | fn [<env!()>]() {}
|
||||
| ^^^^^^
|
5
third_party/rust/paste/tests/ui/env-non-string.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/env-non-string.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<env!(1.31)>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/env-non-string.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/env-non-string.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: expected string literal
|
||||
--> $DIR/env-non-string.rs:2:15
|
||||
|
|
||||
2 | fn [<env!(1.31)>]() {}
|
||||
| ^^^^
|
5
third_party/rust/paste/tests/ui/env-suffix.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/env-suffix.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<env!("VAR"suffix)>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/env-suffix.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/env-suffix.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: expected string literal
|
||||
--> $DIR/env-suffix.rs:2:15
|
||||
|
|
||||
2 | fn [<env!("VAR"suffix)>]() {}
|
||||
| ^^^^^^^^^^^
|
5
third_party/rust/paste/tests/ui/env-unexpected.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/env-unexpected.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<env!("VAR" "VAR")>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/env-unexpected.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/env-unexpected.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: unexpected token in env! macro
|
||||
--> $DIR/env-unexpected.rs:2:21
|
||||
|
|
||||
2 | fn [<env!("VAR" "VAR")>]() {}
|
||||
| ^^^^^
|
5
third_party/rust/paste/tests/ui/invalid-ident.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/invalid-ident.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<0 f>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/invalid-ident.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/invalid-ident.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: `"0f"` is not a valid identifier
|
||||
--> $DIR/invalid-ident.rs:2:8
|
||||
|
|
||||
2 | fn [<0 f>]() {}
|
||||
| ^^^^^^^
|
5
third_party/rust/paste/tests/ui/missing-paren-on-env.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/missing-paren-on-env.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<env! huh>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/missing-paren-on-env.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/missing-paren-on-env.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: expected `(`
|
||||
--> $DIR/missing-paren-on-env.rs:2:15
|
||||
|
|
||||
2 | fn [<env! huh>]() {}
|
||||
| ^^^
|
@ -1,4 +1,4 @@
|
||||
error: no such env var
|
||||
error: no such env var: "PASTE_UNKNOWN"
|
||||
--> $DIR/no-env-var.rs:2:17
|
||||
|
|
||||
2 | fn [<a env!("PASTE_UNKNOWN") b>]() {}
|
||||
|
5
third_party/rust/paste/tests/ui/no-ident-after-colon.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/no-ident-after-colon.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<name:0>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/no-ident-after-colon.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/no-ident-after-colon.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: expected identifier after `:`
|
||||
--> $DIR/no-ident-after-colon.rs:2:15
|
||||
|
|
||||
2 | fn [<name:0>]() {}
|
||||
| ^
|
5
third_party/rust/paste/tests/ui/unexpected-group.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/unexpected-group.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<a {} b>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/unexpected-group.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/unexpected-group.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: unexpected token
|
||||
--> $DIR/unexpected-group.rs:2:12
|
||||
|
|
||||
2 | fn [<a {} b>]() {}
|
||||
| ^^
|
5
third_party/rust/paste/tests/ui/unexpected-punct.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/unexpected-punct.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<a + b>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/unexpected-punct.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/unexpected-punct.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: unexpected punct
|
||||
--> $DIR/unexpected-punct.rs:2:12
|
||||
|
|
||||
2 | fn [<a + b>]() {}
|
||||
| ^
|
5
third_party/rust/paste/tests/ui/unsupported-literal.rs
vendored
Normal file
5
third_party/rust/paste/tests/ui/unsupported-literal.rs
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
paste::item! {
|
||||
fn [<1e+100>]() {}
|
||||
}
|
||||
|
||||
fn main() {}
|
5
third_party/rust/paste/tests/ui/unsupported-literal.stderr
vendored
Normal file
5
third_party/rust/paste/tests/ui/unsupported-literal.stderr
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
error: unsupported literal
|
||||
--> $DIR/unsupported-literal.rs:2:10
|
||||
|
|
||||
2 | fn [<1e+100>]() {}
|
||||
| ^^^^^^
|
Loading…
Reference in New Issue
Block a user