Prepare for the next proc-macro2 release

Depends on alexcrichton/proc-macro2#90
This commit is contained in:
Alex Crichton 2018-05-16 09:56:57 -07:00 committed by David Tolnay
parent c1ff101f5f
commit 9604c635f8
No known key found for this signature in database
GPG Key ID: F9BA143B95FF6D82
6 changed files with 81 additions and 256 deletions

View File

@ -10,4 +10,3 @@ rust:
script:
- cargo test
- cargo test --no-default-features

View File

@ -11,10 +11,4 @@ readme = "README.md"
include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
[dependencies]
proc-macro2 = { version = "0.3", default-features = false }
[features]
default = ["proc-macro"]
# Disabling the proc-macro feature removes the dynamic library dependency on
# libproc_macro in the rustc compiler.
proc-macro = ["proc-macro2/proc-macro"]
proc-macro2 = { version = "0.4", default-features = false }

View File

@ -99,7 +99,7 @@ extern crate proc_macro;
extern crate proc_macro2;
mod tokens;
pub use tokens::Tokens;
pub use tokens::TokenStreamExt;
mod to_tokens;
pub use to_tokens::ToTokens;
@ -111,9 +111,9 @@ pub mod __rt {
pub use proc_macro2::*;
// Not public API.
pub fn parse(tokens: &mut ::Tokens, span: Span, s: &str) {
pub fn parse(tokens: &mut TokenStream, span: Span, s: &str) {
let s: TokenStream = s.parse().expect("invalid token stream");
tokens.append_all(s.into_iter().map(|mut t| {
tokens.extend(s.into_iter().map(|mut t| {
t.set_span(span);
t
}));
@ -266,8 +266,8 @@ macro_rules! quote {
/// # extern crate quote;
/// # extern crate proc_macro2;
/// #
/// # use quote::{Tokens, ToTokens};
/// # use proc_macro2::Span;
/// # use quote::{TokenStreamExt, ToTokens};
/// # use proc_macro2::{Span, TokenStream};
/// #
/// # struct Type;
/// #
@ -278,7 +278,7 @@ macro_rules! quote {
/// # }
/// #
/// # impl ToTokens for Type {
/// # fn to_tokens(&self, _tokens: &mut Tokens) {}
/// # fn to_tokens(&self, _tokens: &mut TokenStream) {}
/// # }
/// #
/// # fn main() {
@ -314,7 +314,7 @@ macro_rules! quote {
macro_rules! quote_spanned {
($span:expr=> $($tt:tt)*) => {
{
let mut _s = $crate::Tokens::new();
let mut _s = $crate::__rt::TokenStream::empty();
let _span = $span;
quote_each_token!(_s _span $($tt)*);
_s
@ -452,13 +452,13 @@ macro_rules! quote_each_token {
($tokens:ident $span:ident # [ $($inner:tt)* ] $($rest:tt)*) => {
quote_each_token!($tokens $span #);
$tokens.append({
$tokens.extend({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Bracket,
quote_spanned!($span=> $($inner)*).into(),
);
g.set_span($span);
g
Some($crate::__rt::TokenTree::from(g))
});
quote_each_token!($tokens $span $($rest)*);
};
@ -469,37 +469,37 @@ macro_rules! quote_each_token {
};
($tokens:ident $span:ident ( $($first:tt)* ) $($rest:tt)*) => {
$tokens.append({
$tokens.extend({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Parenthesis,
quote_spanned!($span=> $($first)*).into(),
);
g.set_span($span);
g
Some($crate::__rt::TokenTree::from(g))
});
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident [ $($first:tt)* ] $($rest:tt)*) => {
$tokens.append({
$tokens.extend({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Bracket,
quote_spanned!($span=> $($first)*).into(),
);
g.set_span($span);
g
Some($crate::__rt::TokenTree::from(g))
});
quote_each_token!($tokens $span $($rest)*);
};
($tokens:ident $span:ident { $($first:tt)* } $($rest:tt)*) => {
$tokens.append({
$tokens.extend({
let mut g = $crate::__rt::Group::new(
$crate::__rt::Delimiter::Brace,
quote_spanned!($span=> $($first)*).into(),
);
g.set_span($span);
g
Some($crate::__rt::TokenTree::from(g))
});
quote_each_token!($tokens $span $($rest)*);
};

View File

@ -1,8 +1,8 @@
use super::Tokens;
use super::TokenStreamExt;
use std::borrow::Cow;
use proc_macro2::{Group, Literal, Op, Span, Term, TokenStream, TokenTree};
use proc_macro2::{Group, Literal, Punct, Span, Ident, TokenStream, TokenTree};
/// Types that can be interpolated inside a [`quote!`] invocation.
///
@ -15,10 +15,10 @@ pub trait ToTokens {
///
/// ```
/// extern crate quote;
/// use quote::{Tokens, ToTokens};
/// use quote::{TokenStreamExt, ToTokens};
///
/// extern crate proc_macro2;
/// use proc_macro2::{TokenTree, Spacing, Span, Op};
/// use proc_macro2::{TokenTree, Spacing, Span, Punct, TokenStream};
///
/// pub struct Path {
/// pub global: bool,
@ -26,12 +26,12 @@ pub trait ToTokens {
/// }
///
/// impl ToTokens for Path {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// fn to_tokens(&self, tokens: &mut TokenStream) {
/// for (i, segment) in self.segments.iter().enumerate() {
/// if i > 0 || self.global {
/// // Double colon `::`
/// tokens.append(Op::new(':', Spacing::Joint));
/// tokens.append(Op::new(':', Spacing::Alone));
/// tokens.append(Punct::new(':', Spacing::Joint));
/// tokens.append(Punct::new(':', Spacing::Alone));
/// }
/// segment.to_tokens(tokens);
/// }
@ -41,49 +41,49 @@ pub trait ToTokens {
/// # pub struct PathSegment;
/// #
/// # impl ToTokens for PathSegment {
/// # fn to_tokens(&self, tokens: &mut Tokens) {
/// # fn to_tokens(&self, tokens: &mut TokenStream) {
/// # unimplemented!()
/// # }
/// # }
/// #
/// # fn main() {}
/// ```
fn to_tokens(&self, tokens: &mut Tokens);
fn to_tokens(&self, tokens: &mut TokenStream);
/// Convert `self` directly into a `Tokens` object.
///
/// This method is implicitly implemented using `to_tokens`, and acts as a
/// convenience method for consumers of the `ToTokens` trait.
fn into_tokens(self) -> Tokens
fn into_token_stream(self) -> TokenStream
where
Self: Sized,
{
let mut tokens = Tokens::new();
let mut tokens = TokenStream::empty();
self.to_tokens(&mut tokens);
tokens
}
}
impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
(**self).to_tokens(tokens);
}
}
impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
(**self).to_tokens(tokens);
}
}
impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
(**self).to_tokens(tokens);
}
}
impl<T: ToTokens> ToTokens for Option<T> {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
if let Some(ref t) = *self {
t.to_tokens(tokens);
}
@ -91,13 +91,13 @@ impl<T: ToTokens> ToTokens for Option<T> {
}
impl ToTokens for str {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append(Literal::string(self));
}
}
impl ToTokens for String {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.as_str().to_tokens(tokens);
}
}
@ -105,7 +105,7 @@ impl ToTokens for String {
macro_rules! primitive {
($($t:ident => $name:ident)*) => ($(
impl ToTokens for $t {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append(Literal::$name(*self));
}
}
@ -130,50 +130,50 @@ primitive! {
}
impl ToTokens for char {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append(Literal::character(*self));
}
}
impl ToTokens for bool {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
let word = if *self { "true" } else { "false" };
tokens.append(Term::new(word, Span::call_site()));
tokens.append(Ident::new(word, Span::call_site()));
}
}
impl ToTokens for Group {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append(self.clone());
}
}
impl ToTokens for Term {
fn to_tokens(&self, tokens: &mut Tokens) {
impl ToTokens for Ident {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append(self.clone());
}
}
impl ToTokens for Op {
fn to_tokens(&self, tokens: &mut Tokens) {
impl ToTokens for Punct {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append(self.clone());
}
}
impl ToTokens for Literal {
fn to_tokens(&self, tokens: &mut Tokens) {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append(self.clone());
}
}
impl ToTokens for TokenTree {
fn to_tokens(&self, dst: &mut Tokens) {
fn to_tokens(&self, dst: &mut TokenStream) {
dst.append(self.clone());
}
}
impl ToTokens for TokenStream {
fn to_tokens(&self, dst: &mut Tokens) {
fn to_tokens(&self, dst: &mut TokenStream) {
dst.append_all(self.clone().into_iter());
}
}

View File

@ -1,45 +1,48 @@
use super::ToTokens;
use std::fmt::{self, Debug, Display};
use std::hash::{Hash, Hasher};
#[cfg(feature = "proc-macro")]
use proc_macro;
use proc_macro2::{TokenStream, TokenTree};
/// Tokens produced by a [`quote!`] invocation.
///
/// [`quote!`]: macro.quote.html
#[derive(Clone, Default)]
pub struct Tokens {
tts: Vec<TokenTree>,
pub trait TokenStreamExt {
fn append<U>(&mut self, token: U) where U: Into<TokenTree>;
fn append_all<T, I>(&mut self, iter: I)
where
T: ToTokens,
I: IntoIterator<Item = T>;
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
where
T: ToTokens,
I: IntoIterator<Item = T>,
U: ToTokens;
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
where
T: ToTokens,
I: IntoIterator<Item = T>,
U: ToTokens;
}
impl Tokens {
/// Empty tokens.
pub fn new() -> Self {
Tokens { tts: Vec::new() }
}
impl TokenStreamExt for TokenStream {
/// For use by `ToTokens` implementations.
///
/// Appends the token specified to this list of tokens.
pub fn append<U>(&mut self, token: U)
fn append<U>(&mut self, token: U)
where
U: Into<TokenTree>,
{
self.tts.push(token.into());
self.extend(Some(token.into()));
}
/// For use by `ToTokens` implementations.
///
/// ```
/// # #[macro_use] extern crate quote;
/// # use quote::{Tokens, ToTokens};
/// # extern crate proc_macro2;
/// # use quote::{TokenStreamExt, ToTokens};
/// # use proc_macro2::TokenStream;
/// # fn main() {
/// struct X;
///
/// impl ToTokens for X {
/// fn to_tokens(&self, tokens: &mut Tokens) {
/// fn to_tokens(&self, tokens: &mut TokenStream) {
/// tokens.append_all(&[true, false]);
/// }
/// }
@ -48,7 +51,7 @@ impl Tokens {
/// assert_eq!(tokens.to_string(), "true false");
/// # }
/// ```
pub fn append_all<T, I>(&mut self, iter: I)
fn append_all<T, I>(&mut self, iter: I)
where
T: ToTokens,
I: IntoIterator<Item = T>,
@ -62,7 +65,7 @@ impl Tokens {
///
/// Appends all of the items in the iterator `I`, separated by the tokens
/// `U`.
pub fn append_separated<T, I, U>(&mut self, iter: I, op: U)
fn append_separated<T, I, U>(&mut self, iter: I, op: U)
where
T: ToTokens,
I: IntoIterator<Item = T>,
@ -80,7 +83,7 @@ impl Tokens {
///
/// Appends all tokens in the iterator `I`, appending `U` after each
/// element, including after the last element of the iterator.
pub fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
where
T: ToTokens,
I: IntoIterator<Item = T>,
@ -92,175 +95,3 @@ impl Tokens {
}
}
}
impl ToTokens for Tokens {
fn to_tokens(&self, dst: &mut Tokens) {
dst.tts.extend(self.tts.iter().cloned());
}
fn into_tokens(self) -> Tokens {
self
}
}
impl From<Tokens> for TokenStream {
fn from(tokens: Tokens) -> TokenStream {
tokens.tts.into_iter().collect()
}
}
#[cfg(feature = "proc-macro")]
impl From<Tokens> for proc_macro::TokenStream {
fn from(tokens: Tokens) -> proc_macro::TokenStream {
TokenStream::from(tokens).into()
}
}
/// Allows a `Tokens` to be passed to `Tokens::append_all`.
impl IntoIterator for Tokens {
type Item = TokenTree;
type IntoIter = private::IntoIter;
fn into_iter(self) -> Self::IntoIter {
private::into_iter(self.tts.into_iter())
}
}
mod private {
use proc_macro2::TokenTree;
use std::vec;
pub struct IntoIter(vec::IntoIter<TokenTree>);
pub fn into_iter(tts: vec::IntoIter<TokenTree>) -> IntoIter {
IntoIter(tts)
}
impl Iterator for IntoIter {
type Item = TokenTree;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
}
}
impl Display for Tokens {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&TokenStream::from(self.clone()), formatter)
}
}
impl Debug for Tokens {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
struct DebugAsDisplay<'a, T: 'a>(&'a T);
impl<'a, T> Debug for DebugAsDisplay<'a, T>
where
T: Display,
{
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(self.0, formatter)
}
}
formatter
.debug_tuple("Tokens")
.field(&DebugAsDisplay(self))
.finish()
}
}
fn tt_eq(a: &TokenTree, b: &TokenTree) -> bool {
use proc_macro2::{Delimiter, Spacing};
match (a, b) {
(&TokenTree::Group(ref s1), &TokenTree::Group(ref s2)) => {
match (s1.delimiter(), s2.delimiter()) {
(Delimiter::Parenthesis, Delimiter::Parenthesis)
| (Delimiter::Brace, Delimiter::Brace)
| (Delimiter::Bracket, Delimiter::Bracket)
| (Delimiter::None, Delimiter::None) => {}
_ => return false,
}
let s1 = s1.stream().clone().into_iter();
let mut s2 = s2.stream().clone().into_iter();
for item1 in s1 {
let item2 = match s2.next() {
Some(item) => item,
None => return false,
};
if !tt_eq(&item1, &item2) {
return false;
}
}
s2.next().is_none()
}
(&TokenTree::Op(ref o1), &TokenTree::Op(ref o2)) => {
o1.op() == o2.op() && match (o1.spacing(), o2.spacing()) {
(Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
_ => false,
}
}
(&TokenTree::Literal(ref l1), &TokenTree::Literal(ref l2)) => {
l1.to_string() == l2.to_string()
}
(&TokenTree::Term(ref s1), &TokenTree::Term(ref s2)) => s1.as_str() == s2.as_str(),
_ => false,
}
}
impl PartialEq for Tokens {
fn eq(&self, other: &Self) -> bool {
if self.tts.len() != other.tts.len() {
return false;
}
self.tts
.iter()
.zip(other.tts.iter())
.all(|(a, b)| tt_eq(a, b))
}
}
fn tt_hash<H: Hasher>(tt: &TokenTree, h: &mut H) {
use proc_macro2::{Delimiter, Spacing};
match *tt {
TokenTree::Group(ref g) => {
0u8.hash(h);
match g.delimiter() {
Delimiter::Parenthesis => 0u8.hash(h),
Delimiter::Brace => 1u8.hash(h),
Delimiter::Bracket => 2u8.hash(h),
Delimiter::None => 3u8.hash(h),
}
for item in g.stream().clone() {
tt_hash(&item, h);
}
0xffu8.hash(h); // terminator w/ a variant we don't normally hash
}
TokenTree::Op(ref t) => {
1u8.hash(h);
t.op().hash(h);
match t.spacing() {
Spacing::Alone => 0u8.hash(h),
Spacing::Joint => 1u8.hash(h),
}
}
TokenTree::Literal(ref lit) => (2u8, lit.to_string()).hash(h),
TokenTree::Term(ref word) => (3u8, word.as_str()).hash(h),
}
}
impl<'a> Hash for Tokens {
fn hash<H: Hasher>(&self, h: &mut H) {
self.tts.len().hash(h);
for tt in &self.tts {
tt_hash(&tt, h);
}
}
}

View File

@ -6,13 +6,14 @@ extern crate proc_macro2;
#[macro_use]
extern crate quote;
use proc_macro2::{Span, Term};
use proc_macro2::{Span, Ident, TokenStream};
use quote::TokenStreamExt;
struct X;
impl quote::ToTokens for X {
fn to_tokens(&self, tokens: &mut quote::Tokens) {
tokens.append(Term::new("X", Span::call_site()));
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.append(Ident::new("X", Span::call_site()));
}
}
@ -182,8 +183,8 @@ fn test_string() {
#[test]
fn test_ident() {
let foo = Term::new("Foo", Span::call_site());
let bar = Term::new(&format!("Bar{}", 7), Span::call_site());
let foo = Ident::new("Foo", Span::call_site());
let bar = Ident::new(&format!("Bar{}", 7), Span::call_site());
let tokens = quote!(struct #foo; enum #bar {});
let expected = "struct Foo ; enum Bar7 { }";
assert_eq!(expected, tokens.to_string());
@ -257,9 +258,9 @@ fn test_box_str() {
#[test]
fn test_cow() {
let owned: Cow<Term> = Cow::Owned(Term::new("owned", Span::call_site()));
let owned: Cow<Ident> = Cow::Owned(Ident::new("owned", Span::call_site()));
let ident = Term::new("borrowed", Span::call_site());
let ident = Ident::new("borrowed", Span::call_site());
let borrowed = Cow::Borrowed(&ident);
let tokens = quote! { #owned #borrowed };
@ -268,8 +269,8 @@ fn test_cow() {
#[test]
fn test_closure() {
fn field_i(i: usize) -> Term {
Term::new(&format!("__field{}", i), Span::call_site())
fn field_i(i: usize) -> Ident {
Ident::new(&format!("__field{}", i), Span::call_site())
}
let fields = (0usize..3)