mirror of
https://gitee.com/openharmony/third_party_rust_proc-macro2
synced 2024-11-23 15:29:39 +00:00
Reimplement public interface for stability
More information to come later about this, but this is a result of the work week discussions we've had about stabilizing procedural macros
This commit is contained in:
parent
77451ca2c6
commit
af5bad4ef2
@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "0.2.3" # remember to update html_root_url
|
version = "0.3.0" # remember to update html_root_url
|
||||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||||
license = "MIT/Apache-2.0"
|
license = "MIT/Apache-2.0"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
477
src/lib.rs
477
src/lib.rs
@ -24,7 +24,7 @@
|
|||||||
//! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
|
//! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
|
||||||
|
|
||||||
// Proc-macro2 types in rustdoc of other crates get linked to here.
|
// Proc-macro2 types in rustdoc of other crates get linked to here.
|
||||||
#![doc(html_root_url = "https://docs.rs/proc-macro2/0.2.3")]
|
#![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.0")]
|
||||||
|
|
||||||
#![cfg_attr(feature = "nightly", feature(proc_macro))]
|
#![cfg_attr(feature = "nightly", feature(proc_macro))]
|
||||||
|
|
||||||
@ -35,8 +35,10 @@ extern crate proc_macro;
|
|||||||
extern crate unicode_xid;
|
extern crate unicode_xid;
|
||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::str::FromStr;
|
|
||||||
use std::iter::FromIterator;
|
use std::iter::FromIterator;
|
||||||
|
use std::marker;
|
||||||
|
use std::rc::Rc;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
#[cfg(not(feature = "nightly"))]
|
#[cfg(not(feature = "nightly"))]
|
||||||
@ -49,67 +51,80 @@ mod imp;
|
|||||||
#[cfg(feature = "nightly")]
|
#[cfg(feature = "nightly")]
|
||||||
mod imp;
|
mod imp;
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
mod macros;
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct TokenStream(imp::TokenStream);
|
pub struct TokenStream {
|
||||||
|
inner: imp::TokenStream,
|
||||||
|
_marker: marker::PhantomData<Rc<()>>,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct LexError(imp::LexError);
|
pub struct LexError {
|
||||||
|
inner: imp::LexError,
|
||||||
|
_marker: marker::PhantomData<Rc<()>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenStream {
|
||||||
|
fn _new(inner: imp::TokenStream) -> TokenStream {
|
||||||
|
TokenStream {
|
||||||
|
inner: inner,
|
||||||
|
_marker: marker::PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn empty() -> TokenStream {
|
||||||
|
TokenStream::_new(imp::TokenStream::empty())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.inner.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl FromStr for TokenStream {
|
impl FromStr for TokenStream {
|
||||||
type Err = LexError;
|
type Err = LexError;
|
||||||
|
|
||||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||||
match src.parse() {
|
let e = src.parse().map_err(|e| {
|
||||||
Ok(e) => Ok(TokenStream(e)),
|
LexError { inner: e, _marker: marker::PhantomData }
|
||||||
Err(e) => Err(LexError(e)),
|
})?;
|
||||||
}
|
Ok(TokenStream::_new(e))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "proc-macro")]
|
#[cfg(feature = "proc-macro")]
|
||||||
impl From<proc_macro::TokenStream> for TokenStream {
|
impl From<proc_macro::TokenStream> for TokenStream {
|
||||||
fn from(inner: proc_macro::TokenStream) -> TokenStream {
|
fn from(inner: proc_macro::TokenStream) -> TokenStream {
|
||||||
TokenStream(inner.into())
|
TokenStream::_new(inner.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "proc-macro")]
|
#[cfg(feature = "proc-macro")]
|
||||||
impl From<TokenStream> for proc_macro::TokenStream {
|
impl From<TokenStream> for proc_macro::TokenStream {
|
||||||
fn from(inner: TokenStream) -> proc_macro::TokenStream {
|
fn from(inner: TokenStream) -> proc_macro::TokenStream {
|
||||||
inner.0.into()
|
inner.inner.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<TokenTree> for TokenStream {
|
impl FromIterator<TokenTree> for TokenStream {
|
||||||
fn from(tree: TokenTree) -> TokenStream {
|
fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
|
||||||
TokenStream(tree.into())
|
TokenStream::_new(streams.into_iter().collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Into<TokenStream>> FromIterator<T> for TokenStream {
|
impl fmt::Display for TokenStream {
|
||||||
fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
TokenStream(streams.into_iter().map(|t| t.into().0).collect())
|
self.inner.fmt(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoIterator for TokenStream {
|
impl fmt::Debug for TokenStream {
|
||||||
type Item = TokenTree;
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
type IntoIter = TokenTreeIter;
|
self.inner.fmt(f)
|
||||||
|
|
||||||
fn into_iter(self) -> TokenTreeIter {
|
|
||||||
TokenTreeIter(self.0.into_iter())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenStream {
|
impl fmt::Debug for LexError {
|
||||||
pub fn empty() -> TokenStream {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
TokenStream(imp::TokenStream::empty())
|
self.inner.fmt(f)
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_empty(&self) -> bool {
|
|
||||||
self.0.is_empty()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -153,83 +168,147 @@ pub struct LineColumn {
|
|||||||
pub column: usize,
|
pub column: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct Span(imp::Span);
|
#[cfg_attr(procmacro2_semver_exempt, derive(PartialEq, Eq))]
|
||||||
|
pub struct Span {
|
||||||
impl Span {
|
inner: imp::Span,
|
||||||
pub fn call_site() -> Span {
|
_marker: marker::PhantomData<Rc<()>>,
|
||||||
Span(imp::Span::call_site())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Span {
|
||||||
|
fn _new(inner: imp::Span) -> Span {
|
||||||
|
Span {
|
||||||
|
inner: inner,
|
||||||
|
_marker: marker::PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn call_site() -> Span {
|
||||||
|
Span::_new(imp::Span::call_site())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(procmacro2_semver_exempt)]
|
||||||
pub fn def_site() -> Span {
|
pub fn def_site() -> Span {
|
||||||
Span(imp::Span::def_site())
|
Span::_new(imp::Span::def_site())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new span with the same line/column information as `self` but
|
/// Creates a new span with the same line/column information as `self` but
|
||||||
/// that resolves symbols as though it were at `other`.
|
/// that resolves symbols as though it were at `other`.
|
||||||
|
#[cfg(procmacro2_semver_exempt)]
|
||||||
pub fn resolved_at(&self, other: Span) -> Span {
|
pub fn resolved_at(&self, other: Span) -> Span {
|
||||||
Span(self.0.resolved_at(other.0))
|
Span::_new(self.inner.resolved_at(other.inner))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new span with the same name resolution behavior as `self` but
|
/// Creates a new span with the same name resolution behavior as `self` but
|
||||||
/// with the line/column information of `other`.
|
/// with the line/column information of `other`.
|
||||||
|
#[cfg(procmacro2_semver_exempt)]
|
||||||
pub fn located_at(&self, other: Span) -> Span {
|
pub fn located_at(&self, other: Span) -> Span {
|
||||||
Span(self.0.located_at(other.0))
|
Span::_new(self.inner.located_at(other.inner))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This method is only available when the `"nightly"` feature is enabled.
|
/// This method is only available when the `"nightly"` feature is enabled.
|
||||||
#[cfg(all(feature = "nightly", feature = "proc-macro"))]
|
#[cfg(all(feature = "nightly", feature = "proc-macro"))]
|
||||||
pub fn unstable(self) -> proc_macro::Span {
|
pub fn unstable(self) -> proc_macro::Span {
|
||||||
self.0.unstable()
|
self.inner.unstable()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
#[cfg(procmacro2_semver_exempt)]
|
||||||
pub fn source_file(&self) -> SourceFile {
|
pub fn source_file(&self) -> SourceFile {
|
||||||
SourceFile(self.0.source_file())
|
SourceFile(self.inner.source_file())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
#[cfg(procmacro2_semver_exempt)]
|
||||||
pub fn start(&self) -> LineColumn {
|
pub fn start(&self) -> LineColumn {
|
||||||
let imp::LineColumn{ line, column } = self.0.start();
|
let imp::LineColumn{ line, column } = self.inner.start();
|
||||||
LineColumn { line: line, column: column }
|
LineColumn { line: line, column: column }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
#[cfg(procmacro2_semver_exempt)]
|
||||||
pub fn end(&self) -> LineColumn {
|
pub fn end(&self) -> LineColumn {
|
||||||
let imp::LineColumn{ line, column } = self.0.end();
|
let imp::LineColumn{ line, column } = self.inner.end();
|
||||||
LineColumn { line: line, column: column }
|
LineColumn { line: line, column: column }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
#[cfg(procmacro2_semver_exempt)]
|
||||||
pub fn join(&self, other: Span) -> Option<Span> {
|
pub fn join(&self, other: Span) -> Option<Span> {
|
||||||
self.0.join(other.0).map(Span)
|
self.inner.join(other.inner).map(Span::_new)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Span {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
self.inner.fmt(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct TokenTree {
|
pub enum TokenTree {
|
||||||
pub span: Span,
|
Group(Group),
|
||||||
pub kind: TokenNode,
|
Term(Term),
|
||||||
|
Op(Op),
|
||||||
|
Literal(Literal),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<TokenNode> for TokenTree {
|
impl TokenTree {
|
||||||
fn from(kind: TokenNode) -> TokenTree {
|
pub fn span(&self) -> Span {
|
||||||
TokenTree { span: Span::def_site(), kind: kind }
|
match *self {
|
||||||
|
TokenTree::Group(ref t) => t.span(),
|
||||||
|
TokenTree::Term(ref t) => t.span(),
|
||||||
|
TokenTree::Op(ref t) => t.span(),
|
||||||
|
TokenTree::Literal(ref t) => t.span(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_span(&mut self, span: Span) {
|
||||||
|
match *self {
|
||||||
|
TokenTree::Group(ref mut t) => t.set_span(span),
|
||||||
|
TokenTree::Term(ref mut t) => t.set_span(span),
|
||||||
|
TokenTree::Op(ref mut t) => t.set_span(span),
|
||||||
|
TokenTree::Literal(ref mut t) => t.set_span(span),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Group> for TokenTree {
|
||||||
|
fn from(g: Group) -> TokenTree {
|
||||||
|
TokenTree::Group(g)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Term> for TokenTree {
|
||||||
|
fn from(g: Term) -> TokenTree {
|
||||||
|
TokenTree::Term(g)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Op> for TokenTree {
|
||||||
|
fn from(g: Op) -> TokenTree {
|
||||||
|
TokenTree::Op(g)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Literal> for TokenTree {
|
||||||
|
fn from(g: Literal) -> TokenTree {
|
||||||
|
TokenTree::Literal(g)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for TokenTree {
|
impl fmt::Display for TokenTree {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
TokenStream::from(self.clone()).fmt(f)
|
match *self {
|
||||||
|
TokenTree::Group(ref t) => t.fmt(f),
|
||||||
|
TokenTree::Term(ref t) => t.fmt(f),
|
||||||
|
TokenTree::Op(ref t) => t.fmt(f),
|
||||||
|
TokenTree::Literal(ref t) => t.fmt(f),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum TokenNode {
|
pub struct Group {
|
||||||
Group(Delimiter, TokenStream),
|
delimiter: Delimiter,
|
||||||
Term(Term),
|
stream: TokenStream,
|
||||||
Op(char, Spacing),
|
span: Span,
|
||||||
Literal(Literal),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||||
@ -240,17 +319,43 @@ pub enum Delimiter {
|
|||||||
None,
|
None,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
impl Group {
|
||||||
pub struct Term(imp::Term);
|
pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
|
||||||
|
Group {
|
||||||
impl Term {
|
delimiter: delimiter,
|
||||||
pub fn intern(string: &str) -> Term {
|
stream: stream,
|
||||||
Term(imp::Term::intern(string))
|
span: Span::call_site(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_str(&self) -> &str {
|
pub fn delimiter(&self) -> Delimiter {
|
||||||
self.0.as_str()
|
self.delimiter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn stream(&self) -> TokenStream {
|
||||||
|
self.stream.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_span(&mut self, span: Span) {
|
||||||
|
self.span = span;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Group {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
self.stream.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug)]
|
||||||
|
pub struct Op {
|
||||||
|
op: char,
|
||||||
|
spacing: Spacing,
|
||||||
|
span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||||
@ -259,86 +364,234 @@ pub enum Spacing {
|
|||||||
Joint,
|
Joint,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
impl Op {
|
||||||
pub struct Literal(imp::Literal);
|
pub fn new(op: char, spacing: Spacing) -> Op {
|
||||||
|
Op {
|
||||||
|
op: op,
|
||||||
|
spacing: spacing,
|
||||||
|
span: Span::call_site(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
macro_rules! int_literals {
|
pub fn op(&self) -> char {
|
||||||
($($kind:ident,)*) => ($(
|
self.op
|
||||||
pub fn $kind(n: $kind) -> Literal {
|
}
|
||||||
Literal(n.into())
|
|
||||||
|
pub fn spacing(&self) -> Spacing {
|
||||||
|
self.spacing
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_span(&mut self, span: Span) {
|
||||||
|
self.span = span;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Op {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
self.op.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone)]
|
||||||
|
pub struct Term {
|
||||||
|
inner: imp::Term,
|
||||||
|
span: Span,
|
||||||
|
_marker: marker::PhantomData<Rc<()>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Term {
|
||||||
|
fn _new(inner: imp::Term, span: Span) -> Term {
|
||||||
|
Term {
|
||||||
|
inner: inner,
|
||||||
|
span: span,
|
||||||
|
_marker: marker::PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new(string: &str, span: Span) -> Term {
|
||||||
|
Term::_new(imp::Term::intern(string), span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_str(&self) -> &str {
|
||||||
|
self.inner.as_str()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_span(&mut self, span: Span) {
|
||||||
|
self.span = span;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Term {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
self.as_str().fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Term {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
self.inner.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Literal {
|
||||||
|
inner: imp::Literal,
|
||||||
|
span: Span,
|
||||||
|
_marker: marker::PhantomData<Rc<()>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! suffixed_int_literals {
|
||||||
|
($($name:ident => $kind:ident,)*) => ($(
|
||||||
|
#[allow(unused_comparisons)]
|
||||||
|
pub fn $name(n: $kind) -> Literal {
|
||||||
|
Literal::_new(n.into())
|
||||||
|
}
|
||||||
|
)*)
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! unsuffixed_int_literals {
|
||||||
|
($($name:ident => $kind:ident,)*) => ($(
|
||||||
|
#[allow(unused_comparisons)]
|
||||||
|
pub fn $name(n: $kind) -> Literal {
|
||||||
|
Literal::_new(imp::Literal::integer(n as i64))
|
||||||
}
|
}
|
||||||
)*)
|
)*)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Literal {
|
impl Literal {
|
||||||
pub fn integer(s: i64) -> Literal {
|
fn _new(inner: imp::Literal) -> Literal {
|
||||||
Literal(imp::Literal::integer(s))
|
Literal {
|
||||||
|
inner: inner,
|
||||||
|
span: Span::call_site(),
|
||||||
|
_marker: marker::PhantomData,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int_literals! {
|
suffixed_int_literals! {
|
||||||
u8, u16, u32, u64, usize,
|
u8_suffixed => u8,
|
||||||
i8, i16, i32, i64, isize,
|
u16_suffixed => u16,
|
||||||
|
u32_suffixed => u32,
|
||||||
|
u64_suffixed => u64,
|
||||||
|
usize_suffixed => usize,
|
||||||
|
i8_suffixed => i8,
|
||||||
|
i16_suffixed => i16,
|
||||||
|
i32_suffixed => i32,
|
||||||
|
i64_suffixed => i64,
|
||||||
|
isize_suffixed => isize,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn float(f: f64) -> Literal {
|
unsuffixed_int_literals! {
|
||||||
Literal(imp::Literal::float(f))
|
u8_unsuffixed => u8,
|
||||||
|
u16_unsuffixed => u16,
|
||||||
|
u32_unsuffixed => u32,
|
||||||
|
u64_unsuffixed => u64,
|
||||||
|
usize_unsuffixed => usize,
|
||||||
|
i8_unsuffixed => i8,
|
||||||
|
i16_unsuffixed => i16,
|
||||||
|
i32_unsuffixed => i32,
|
||||||
|
i64_unsuffixed => i64,
|
||||||
|
isize_unsuffixed => isize,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn f64(f: f64) -> Literal {
|
pub fn f64_unsuffixed(f: f64) -> Literal {
|
||||||
Literal(f.into())
|
assert!(f.is_finite());
|
||||||
|
Literal::_new(imp::Literal::float(f))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn f32(f: f32) -> Literal {
|
pub fn f64_suffixed(f: f64) -> Literal {
|
||||||
Literal(f.into())
|
assert!(f.is_finite());
|
||||||
|
Literal::_new(f.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn f32_unsuffixed(f: f32) -> Literal {
|
||||||
|
assert!(f.is_finite());
|
||||||
|
Literal::_new(imp::Literal::float(f as f64))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn f32_suffixed(f: f32) -> Literal {
|
||||||
|
assert!(f.is_finite());
|
||||||
|
Literal::_new(f.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn string(string: &str) -> Literal {
|
pub fn string(string: &str) -> Literal {
|
||||||
Literal(string.into())
|
Literal::_new(string.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn character(ch: char) -> Literal {
|
pub fn character(ch: char) -> Literal {
|
||||||
Literal(ch.into())
|
Literal::_new(ch.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn byte_string(s: &[u8]) -> Literal {
|
pub fn byte_string(s: &[u8]) -> Literal {
|
||||||
Literal(imp::Literal::byte_string(s))
|
Literal::_new(imp::Literal::byte_string(s))
|
||||||
}
|
}
|
||||||
|
|
||||||
// =======================================================================
|
pub fn span(&self) -> Span {
|
||||||
// Not present upstream in proc_macro yet
|
self.span
|
||||||
|
|
||||||
pub fn byte_char(b: u8) -> Literal {
|
|
||||||
Literal(imp::Literal::byte_char(b))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn doccomment(s: &str) -> Literal {
|
pub fn set_span(&mut self, span: Span) {
|
||||||
Literal(imp::Literal::doccomment(s))
|
self.span = span;
|
||||||
}
|
|
||||||
|
|
||||||
pub fn raw_string(s: &str, pounds: usize) -> Literal {
|
|
||||||
Literal(imp::Literal::raw_string(s, pounds))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn raw_byte_string(s: &str, pounds: usize) -> Literal {
|
|
||||||
Literal(imp::Literal::raw_byte_string(s, pounds))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct TokenTreeIter(imp::TokenTreeIter);
|
impl fmt::Debug for Literal {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
self.inner.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Iterator for TokenTreeIter {
|
impl fmt::Display for Literal {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
self.inner.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod token_stream {
|
||||||
|
use std::fmt;
|
||||||
|
use std::marker;
|
||||||
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
use imp;
|
||||||
|
use TokenTree;
|
||||||
|
pub use TokenStream;
|
||||||
|
|
||||||
|
pub struct IntoIter {
|
||||||
|
inner: imp::TokenTreeIter,
|
||||||
|
_marker: marker::PhantomData<Rc<()>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl Iterator for IntoIter {
|
||||||
type Item = TokenTree;
|
type Item = TokenTree;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<TokenTree> {
|
fn next(&mut self) -> Option<TokenTree> {
|
||||||
self.0.next()
|
self.inner.next()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
forward_fmt!(Debug for LexError);
|
impl fmt::Debug for IntoIter {
|
||||||
forward_fmt!(Debug for Literal);
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
forward_fmt!(Debug for Span);
|
self.inner.fmt(f)
|
||||||
forward_fmt!(Debug for Term);
|
}
|
||||||
forward_fmt!(Debug for TokenTreeIter);
|
}
|
||||||
forward_fmt!(Debug for TokenStream);
|
|
||||||
forward_fmt!(Display for Literal);
|
impl IntoIterator for TokenStream {
|
||||||
forward_fmt!(Display for TokenStream);
|
type Item = TokenTree;
|
||||||
|
type IntoIter = IntoIter;
|
||||||
|
|
||||||
|
fn into_iter(self) -> IntoIter {
|
||||||
|
IntoIter {
|
||||||
|
inner: self.inner.into_iter(),
|
||||||
|
_marker: marker::PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
macro_rules! forward_fmt {
|
|
||||||
($tr:ident for $ty:ident) => {
|
|
||||||
impl ::std::fmt::$tr for $ty {
|
|
||||||
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
|
|
||||||
::std::fmt::$tr::fmt(&self.0, f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,3 +1,5 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use std::ascii;
|
use std::ascii;
|
||||||
use std::borrow::Borrow;
|
use std::borrow::Borrow;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
@ -14,7 +16,7 @@ use std::vec;
|
|||||||
use unicode_xid::UnicodeXID;
|
use unicode_xid::UnicodeXID;
|
||||||
use strnom::{Cursor, PResult, skip_whitespace, block_comment, whitespace, word_break};
|
use strnom::{Cursor, PResult, skip_whitespace, block_comment, whitespace, word_break};
|
||||||
|
|
||||||
use {TokenTree, TokenNode, Delimiter, Spacing};
|
use {TokenTree, Delimiter, Spacing, Group, Op};
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct TokenStream {
|
pub struct TokenStream {
|
||||||
@ -67,7 +69,7 @@ impl FromStr for TokenStream {
|
|||||||
if skip_whitespace(input).len() != 0 {
|
if skip_whitespace(input).len() != 0 {
|
||||||
Err(LexError)
|
Err(LexError)
|
||||||
} else {
|
} else {
|
||||||
Ok(output.0)
|
Ok(output.inner)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(LexError) => Err(LexError),
|
Err(LexError) => Err(LexError),
|
||||||
@ -83,32 +85,32 @@ impl fmt::Display for TokenStream {
|
|||||||
write!(f, " ")?;
|
write!(f, " ")?;
|
||||||
}
|
}
|
||||||
joint = false;
|
joint = false;
|
||||||
match tt.kind {
|
match *tt {
|
||||||
TokenNode::Group(delim, ref stream) => {
|
TokenTree::Group(ref tt) => {
|
||||||
let (start, end) = match delim {
|
let (start, end) = match tt.delimiter() {
|
||||||
Delimiter::Parenthesis => ("(", ")"),
|
Delimiter::Parenthesis => ("(", ")"),
|
||||||
Delimiter::Brace => ("{", "}"),
|
Delimiter::Brace => ("{", "}"),
|
||||||
Delimiter::Bracket => ("[", "]"),
|
Delimiter::Bracket => ("[", "]"),
|
||||||
Delimiter::None => ("", ""),
|
Delimiter::None => ("", ""),
|
||||||
};
|
};
|
||||||
if stream.0.inner.len() == 0 {
|
if tt.stream().inner.inner.len() == 0 {
|
||||||
write!(f, "{} {}", start, end)?
|
write!(f, "{} {}", start, end)?
|
||||||
} else {
|
} else {
|
||||||
write!(f, "{} {} {}", start, stream, end)?
|
write!(f, "{} {} {}", start, tt.stream(), end)?
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenNode::Term(ref sym) => write!(f, "{}", sym.as_str())?,
|
TokenTree::Term(ref tt) => write!(f, "{}", tt.as_str())?,
|
||||||
TokenNode::Op(ch, ref op) => {
|
TokenTree::Op(ref tt) => {
|
||||||
write!(f, "{}", ch)?;
|
write!(f, "{}", tt.op())?;
|
||||||
match *op {
|
match tt.spacing() {
|
||||||
Spacing::Alone => {}
|
Spacing::Alone => {}
|
||||||
Spacing::Joint => joint = true,
|
Spacing::Joint => joint = true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenNode::Literal(ref literal) => {
|
TokenTree::Literal(ref tt) => {
|
||||||
write!(f, "{}", literal)?;
|
write!(f, "{}", tt)?;
|
||||||
// handle comments
|
// handle comments
|
||||||
if (literal.0).0.starts_with("/") {
|
if tt.inner.0.starts_with("/") {
|
||||||
write!(f, "\n")?;
|
write!(f, "\n")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -140,12 +142,12 @@ impl From<TokenTree> for TokenStream {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl iter::FromIterator<TokenStream> for TokenStream {
|
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||||
fn from_iter<I: IntoIterator<Item=TokenStream>>(streams: I) -> Self {
|
fn from_iter<I: IntoIterator<Item=TokenTree>>(streams: I) -> Self {
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
|
|
||||||
for stream in streams.into_iter() {
|
for token in streams.into_iter() {
|
||||||
v.extend(stream.inner);
|
v.push(token);
|
||||||
}
|
}
|
||||||
|
|
||||||
TokenStream { inner: v }
|
TokenStream { inner: v }
|
||||||
@ -589,64 +591,58 @@ impl From<char> for Literal {
|
|||||||
|
|
||||||
named!(token_stream -> ::TokenStream, map!(
|
named!(token_stream -> ::TokenStream, map!(
|
||||||
many0!(token_tree),
|
many0!(token_tree),
|
||||||
|trees| ::TokenStream(TokenStream { inner: trees })
|
|trees| ::TokenStream::_new(TokenStream { inner: trees })
|
||||||
));
|
));
|
||||||
|
|
||||||
#[cfg(not(procmacro2_semver_exempt))]
|
#[cfg(not(procmacro2_semver_exempt))]
|
||||||
fn token_tree(input: Cursor) -> PResult<TokenTree> {
|
fn token_tree(input: Cursor) -> PResult<TokenTree> {
|
||||||
let (input, kind) = token_kind(input)?;
|
token_kind(input)
|
||||||
Ok((input, TokenTree {
|
|
||||||
span: ::Span(Span {}),
|
|
||||||
kind: kind,
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
#[cfg(procmacro2_semver_exempt)]
|
||||||
fn token_tree(input: Cursor) -> PResult<TokenTree> {
|
fn token_tree(input: Cursor) -> PResult<TokenTree> {
|
||||||
let input = skip_whitespace(input);
|
let input = skip_whitespace(input);
|
||||||
let lo = input.off;
|
let lo = input.off;
|
||||||
let (input, kind) = token_kind(input)?;
|
let (input, mut token) = token_kind(input)?;
|
||||||
let hi = input.off;
|
let hi = input.off;
|
||||||
Ok((input, TokenTree {
|
token.set_span(::Span::_new(Span {
|
||||||
span: ::Span(Span {
|
|
||||||
lo: lo,
|
lo: lo,
|
||||||
hi: hi,
|
hi: hi,
|
||||||
}),
|
}));
|
||||||
kind: kind,
|
Ok((input, token))
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
named!(token_kind -> TokenNode, alt!(
|
named!(token_kind -> TokenTree, alt!(
|
||||||
map!(delimited, |(d, s)| TokenNode::Group(d, s))
|
map!(group, TokenTree::Group)
|
||||||
|
|
|
|
||||||
map!(literal, TokenNode::Literal) // must be before symbol
|
map!(literal, TokenTree::Literal) // must be before symbol
|
||||||
|
|
|
|
||||||
symbol
|
symbol
|
||||||
|
|
|
|
||||||
map!(op, |(op, kind)| TokenNode::Op(op, kind))
|
map!(op, TokenTree::Op)
|
||||||
));
|
));
|
||||||
|
|
||||||
named!(delimited -> (Delimiter, ::TokenStream), alt!(
|
named!(group -> Group, alt!(
|
||||||
delimited!(
|
delimited!(
|
||||||
punct!("("),
|
punct!("("),
|
||||||
token_stream,
|
token_stream,
|
||||||
punct!(")")
|
punct!(")")
|
||||||
) => { |ts| (Delimiter::Parenthesis, ts) }
|
) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
|
||||||
|
|
|
|
||||||
delimited!(
|
delimited!(
|
||||||
punct!("["),
|
punct!("["),
|
||||||
token_stream,
|
token_stream,
|
||||||
punct!("]")
|
punct!("]")
|
||||||
) => { |ts| (Delimiter::Bracket, ts) }
|
) => { |ts| Group::new(Delimiter::Bracket, ts) }
|
||||||
|
|
|
|
||||||
delimited!(
|
delimited!(
|
||||||
punct!("{"),
|
punct!("{"),
|
||||||
token_stream,
|
token_stream,
|
||||||
punct!("}")
|
punct!("}")
|
||||||
) => { |ts| (Delimiter::Brace, ts) }
|
) => { |ts| Group::new(Delimiter::Brace, ts) }
|
||||||
));
|
));
|
||||||
|
|
||||||
fn symbol(mut input: Cursor) -> PResult<TokenNode> {
|
fn symbol(mut input: Cursor) -> PResult<TokenTree> {
|
||||||
input = skip_whitespace(input);
|
input = skip_whitespace(input);
|
||||||
|
|
||||||
let mut chars = input.char_indices();
|
let mut chars = input.char_indices();
|
||||||
@ -674,9 +670,9 @@ fn symbol(mut input: Cursor) -> PResult<TokenNode> {
|
|||||||
} else {
|
} else {
|
||||||
let a = &input.rest[..end];
|
let a = &input.rest[..end];
|
||||||
if a == "_" {
|
if a == "_" {
|
||||||
Ok((input.advance(end), TokenNode::Op('_', Spacing::Alone)))
|
Ok((input.advance(end), Op::new('_', Spacing::Alone).into()))
|
||||||
} else {
|
} else {
|
||||||
Ok((input.advance(end), TokenNode::Term(::Term::intern(a))))
|
Ok((input.advance(end), ::Term::new(a, ::Span::call_site()).into()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -700,7 +696,7 @@ fn literal(input: Cursor) -> PResult<::Literal> {
|
|||||||
let start = input.len() - input_no_ws.len();
|
let start = input.len() - input_no_ws.len();
|
||||||
let len = input_no_ws.len() - a.len();
|
let len = input_no_ws.len() - a.len();
|
||||||
let end = start + len;
|
let end = start + len;
|
||||||
Ok((a, ::Literal(Literal(input.rest[start..end].to_string()))))
|
Ok((a, ::Literal::_new(Literal(input.rest[start..end].to_string()))))
|
||||||
}
|
}
|
||||||
Err(LexError) => Err(LexError),
|
Err(LexError) => Err(LexError),
|
||||||
}
|
}
|
||||||
@ -1147,7 +1143,7 @@ fn digits(mut input: Cursor) -> PResult<()> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn op(input: Cursor) -> PResult<(char, Spacing)> {
|
fn op(input: Cursor) -> PResult<Op> {
|
||||||
let input = skip_whitespace(input);
|
let input = skip_whitespace(input);
|
||||||
match op_char(input) {
|
match op_char(input) {
|
||||||
Ok((rest, ch)) => {
|
Ok((rest, ch)) => {
|
||||||
@ -1155,7 +1151,7 @@ fn op(input: Cursor) -> PResult<(char, Spacing)> {
|
|||||||
Ok(_) => Spacing::Joint,
|
Ok(_) => Spacing::Joint,
|
||||||
Err(LexError) => Spacing::Alone,
|
Err(LexError) => Spacing::Alone,
|
||||||
};
|
};
|
||||||
Ok((rest, (ch, kind)))
|
Ok((rest, Op::new(ch, kind)))
|
||||||
}
|
}
|
||||||
Err(LexError) => Err(LexError),
|
Err(LexError) => Err(LexError),
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#![allow(dead_code)]
|
||||||
|
|
||||||
use std::ascii;
|
use std::ascii;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
@ -5,7 +7,7 @@ use std::str::FromStr;
|
|||||||
|
|
||||||
use proc_macro;
|
use proc_macro;
|
||||||
|
|
||||||
use {TokenTree, TokenNode, Delimiter, Spacing};
|
use {TokenTree, Delimiter, Spacing, Group, Op};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct TokenStream(proc_macro::TokenStream);
|
pub struct TokenStream(proc_macro::TokenStream);
|
||||||
@ -49,40 +51,40 @@ impl From<TokenStream> for proc_macro::TokenStream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl From<TokenTree> for TokenStream {
|
impl From<TokenTree> for TokenStream {
|
||||||
fn from(tree: TokenTree) -> TokenStream {
|
fn from(token: TokenTree) -> TokenStream {
|
||||||
TokenStream(proc_macro::TokenTree {
|
let (span, kind) = match token {
|
||||||
span: (tree.span.0).0,
|
TokenTree::Group(tt) => {
|
||||||
kind: match tree.kind {
|
let delim = match tt.delimiter() {
|
||||||
TokenNode::Group(delim, s) => {
|
|
||||||
let delim = match delim {
|
|
||||||
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
|
Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
|
||||||
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
|
Delimiter::Bracket => proc_macro::Delimiter::Bracket,
|
||||||
Delimiter::Brace => proc_macro::Delimiter::Brace,
|
Delimiter::Brace => proc_macro::Delimiter::Brace,
|
||||||
Delimiter::None => proc_macro::Delimiter::None,
|
Delimiter::None => proc_macro::Delimiter::None,
|
||||||
};
|
};
|
||||||
proc_macro::TokenNode::Group(delim, (s.0).0)
|
let span = tt.span();
|
||||||
|
let group = proc_macro::TokenNode::Group(delim, tt.stream.inner.0);
|
||||||
|
(span, group)
|
||||||
}
|
}
|
||||||
TokenNode::Op(ch, kind) => {
|
TokenTree::Op(tt) => {
|
||||||
let kind = match kind {
|
let kind = match tt.spacing() {
|
||||||
Spacing::Joint => proc_macro::Spacing::Joint,
|
Spacing::Joint => proc_macro::Spacing::Joint,
|
||||||
Spacing::Alone => proc_macro::Spacing::Alone,
|
Spacing::Alone => proc_macro::Spacing::Alone,
|
||||||
};
|
};
|
||||||
proc_macro::TokenNode::Op(ch, kind)
|
(tt.span(), proc_macro::TokenNode::Op(tt.op(), kind))
|
||||||
}
|
}
|
||||||
TokenNode::Term(s) => {
|
TokenTree::Term(tt) => {
|
||||||
proc_macro::TokenNode::Term((s.0).0)
|
(tt.span(), proc_macro::TokenNode::Term(tt.inner.0))
|
||||||
}
|
}
|
||||||
TokenNode::Literal(l) => {
|
TokenTree::Literal(tt) => {
|
||||||
proc_macro::TokenNode::Literal((l.0).0)
|
(tt.span(), proc_macro::TokenNode::Literal(tt.inner.0))
|
||||||
}
|
}
|
||||||
},
|
};
|
||||||
}.into())
|
TokenStream(proc_macro::TokenTree { span: span.inner.0, kind }.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl iter::FromIterator<TokenStream> for TokenStream {
|
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||||
fn from_iter<I: IntoIterator<Item=TokenStream>>(streams: I) -> Self {
|
fn from_iter<I: IntoIterator<Item=TokenTree>>(streams: I) -> Self {
|
||||||
let streams = streams.into_iter().map(|s| s.0);
|
let streams = streams.into_iter().map(TokenStream::from);
|
||||||
TokenStream(streams.collect::<proc_macro::TokenStream>())
|
TokenStream(streams.collect::<proc_macro::TokenStream>())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -114,13 +116,9 @@ impl Iterator for TokenTreeIter {
|
|||||||
type Item = TokenTree;
|
type Item = TokenTree;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<TokenTree> {
|
fn next(&mut self) -> Option<TokenTree> {
|
||||||
let token = match self.0.next() {
|
let token = self.0.next()?;
|
||||||
Some(n) => n,
|
let span = ::Span::_new(Span(token.span));
|
||||||
None => return None,
|
Some(match token.kind {
|
||||||
};
|
|
||||||
Some(TokenTree {
|
|
||||||
span: ::Span(Span(token.span)),
|
|
||||||
kind: match token.kind {
|
|
||||||
proc_macro::TokenNode::Group(delim, s) => {
|
proc_macro::TokenNode::Group(delim, s) => {
|
||||||
let delim = match delim {
|
let delim = match delim {
|
||||||
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
|
proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
|
||||||
@ -128,22 +126,28 @@ impl Iterator for TokenTreeIter {
|
|||||||
proc_macro::Delimiter::Brace => Delimiter::Brace,
|
proc_macro::Delimiter::Brace => Delimiter::Brace,
|
||||||
proc_macro::Delimiter::None => Delimiter::None,
|
proc_macro::Delimiter::None => Delimiter::None,
|
||||||
};
|
};
|
||||||
TokenNode::Group(delim, ::TokenStream(TokenStream(s)))
|
let stream = ::TokenStream::_new(TokenStream(s));
|
||||||
|
let mut g = Group::new(delim, stream);
|
||||||
|
g.set_span(span);
|
||||||
|
g.into()
|
||||||
}
|
}
|
||||||
proc_macro::TokenNode::Op(ch, kind) => {
|
proc_macro::TokenNode::Op(ch, kind) => {
|
||||||
let kind = match kind {
|
let kind = match kind {
|
||||||
proc_macro::Spacing::Joint => Spacing::Joint,
|
proc_macro::Spacing::Joint => Spacing::Joint,
|
||||||
proc_macro::Spacing::Alone => Spacing::Alone,
|
proc_macro::Spacing::Alone => Spacing::Alone,
|
||||||
};
|
};
|
||||||
TokenNode::Op(ch, kind)
|
let mut o = Op::new(ch, kind);
|
||||||
|
o.span = span;
|
||||||
|
o.into()
|
||||||
}
|
}
|
||||||
proc_macro::TokenNode::Term(s) => {
|
proc_macro::TokenNode::Term(s) => {
|
||||||
TokenNode::Term(::Term(Term(s)))
|
::Term::_new(Term(s), span).into()
|
||||||
}
|
}
|
||||||
proc_macro::TokenNode::Literal(l) => {
|
proc_macro::TokenNode::Literal(l) => {
|
||||||
TokenNode::Literal(::Literal(Literal(l)))
|
let mut l = ::Literal::_new(Literal(l));
|
||||||
|
l.span = span;
|
||||||
|
l.into()
|
||||||
}
|
}
|
||||||
},
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -158,11 +162,9 @@ impl fmt::Debug for TokenTreeIter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
#[derive(Clone, PartialEq, Eq)]
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
pub struct FileName(String);
|
pub struct FileName(String);
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
impl fmt::Display for FileName {
|
impl fmt::Display for FileName {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
self.0.fmt(f)
|
self.0.fmt(f)
|
||||||
@ -171,11 +173,9 @@ impl fmt::Display for FileName {
|
|||||||
|
|
||||||
// NOTE: We have to generate our own filename object here because we can't wrap
|
// NOTE: We have to generate our own filename object here because we can't wrap
|
||||||
// the one provided by proc_macro.
|
// the one provided by proc_macro.
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
#[derive(Clone, PartialEq, Eq)]
|
#[derive(Clone, PartialEq, Eq)]
|
||||||
pub struct SourceFile(proc_macro::SourceFile, FileName);
|
pub struct SourceFile(proc_macro::SourceFile, FileName);
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
impl SourceFile {
|
impl SourceFile {
|
||||||
fn new(sf: proc_macro::SourceFile) -> Self {
|
fn new(sf: proc_macro::SourceFile) -> Self {
|
||||||
let filename = FileName(sf.path().to_string());
|
let filename = FileName(sf.path().to_string());
|
||||||
@ -192,21 +192,18 @@ impl SourceFile {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
impl AsRef<FileName> for SourceFile {
|
impl AsRef<FileName> for SourceFile {
|
||||||
fn as_ref(&self) -> &FileName {
|
fn as_ref(&self) -> &FileName {
|
||||||
self.path()
|
self.path()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
impl fmt::Debug for SourceFile {
|
impl fmt::Debug for SourceFile {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
self.0.fmt(f)
|
self.0.fmt(f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
pub struct LineColumn {
|
pub struct LineColumn {
|
||||||
pub line: usize,
|
pub line: usize,
|
||||||
pub column: usize,
|
pub column: usize,
|
||||||
@ -217,7 +214,7 @@ pub struct Span(proc_macro::Span);
|
|||||||
|
|
||||||
impl From<proc_macro::Span> for ::Span {
|
impl From<proc_macro::Span> for ::Span {
|
||||||
fn from(proc_span: proc_macro::Span) -> ::Span {
|
fn from(proc_span: proc_macro::Span) -> ::Span {
|
||||||
::Span(Span(proc_span))
|
::Span::_new(Span(proc_span))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -242,24 +239,20 @@ impl Span {
|
|||||||
self.0
|
self.0
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
pub fn source_file(&self) -> SourceFile {
|
pub fn source_file(&self) -> SourceFile {
|
||||||
SourceFile::new(self.0.source_file())
|
SourceFile::new(self.0.source_file())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
pub fn start(&self) -> LineColumn {
|
pub fn start(&self) -> LineColumn {
|
||||||
let proc_macro::LineColumn{ line, column } = self.0.start();
|
let proc_macro::LineColumn{ line, column } = self.0.start();
|
||||||
LineColumn { line, column }
|
LineColumn { line, column }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
pub fn end(&self) -> LineColumn {
|
pub fn end(&self) -> LineColumn {
|
||||||
let proc_macro::LineColumn{ line, column } = self.0.end();
|
let proc_macro::LineColumn{ line, column } = self.0.end();
|
||||||
LineColumn { line, column }
|
LineColumn { line, column }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
pub fn join(&self, other: Span) -> Option<Span> {
|
pub fn join(&self, other: Span) -> Option<Span> {
|
||||||
self.0.join(other.0).map(Span)
|
self.0.join(other.0).map(Span)
|
||||||
}
|
}
|
||||||
|
@ -2,26 +2,19 @@ extern crate proc_macro2;
|
|||||||
|
|
||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
use proc_macro2::{Term, Literal, TokenStream};
|
use proc_macro2::{Term, Literal, TokenStream, Span};
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
use proc_macro2::TokenNode;
|
|
||||||
|
|
||||||
#[cfg(procmacro2_semver_exempt)]
|
|
||||||
#[cfg(not(feature = "nightly"))]
|
|
||||||
use proc_macro2::Span;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn symbols() {
|
fn symbols() {
|
||||||
assert_eq!(Term::intern("foo").as_str(), "foo");
|
assert_eq!(Term::new("foo", Span::call_site()).as_str(), "foo");
|
||||||
assert_eq!(Term::intern("bar").as_str(), "bar");
|
assert_eq!(Term::new("bar", Span::call_site()).as_str(), "bar");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn literals() {
|
fn literals() {
|
||||||
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
|
assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
|
||||||
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
|
assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
|
||||||
assert_eq!(Literal::float(10.0).to_string(), "10.0");
|
assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -78,6 +71,8 @@ fn fail() {
|
|||||||
#[cfg(procmacro2_semver_exempt)]
|
#[cfg(procmacro2_semver_exempt)]
|
||||||
#[test]
|
#[test]
|
||||||
fn span_test() {
|
fn span_test() {
|
||||||
|
use proc_macro2::TokenTree;
|
||||||
|
|
||||||
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
|
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
|
||||||
let ts = p.parse::<TokenStream>().unwrap();
|
let ts = p.parse::<TokenStream>().unwrap();
|
||||||
check_spans_internal(ts, &mut lines);
|
check_spans_internal(ts, &mut lines);
|
||||||
@ -91,17 +86,18 @@ fn span_test() {
|
|||||||
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
|
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
|
||||||
*lines = rest;
|
*lines = rest;
|
||||||
|
|
||||||
let start = i.span.start();
|
let start = i.span().start();
|
||||||
assert_eq!(start.line, sline, "sline did not match for {}", i);
|
assert_eq!(start.line, sline, "sline did not match for {}", i);
|
||||||
assert_eq!(start.column, scol, "scol did not match for {}", i);
|
assert_eq!(start.column, scol, "scol did not match for {}", i);
|
||||||
|
|
||||||
let end = i.span.end();
|
let end = i.span().end();
|
||||||
assert_eq!(end.line, eline, "eline did not match for {}", i);
|
assert_eq!(end.line, eline, "eline did not match for {}", i);
|
||||||
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
|
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
|
||||||
|
|
||||||
match i.kind {
|
match i {
|
||||||
TokenNode::Group(_, stream) =>
|
TokenTree::Group(ref g) => {
|
||||||
check_spans_internal(stream, lines),
|
check_spans_internal(g.stream().clone(), lines);
|
||||||
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -146,11 +142,11 @@ fn span_join() {
|
|||||||
let source2 =
|
let source2 =
|
||||||
"ccc\nddd".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
|
"ccc\nddd".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
|
||||||
|
|
||||||
assert!(source1[0].span.source_file() != source2[0].span.source_file());
|
assert!(source1[0].span().source_file() != source2[0].span().source_file());
|
||||||
assert_eq!(source1[0].span.source_file(), source1[1].span.source_file());
|
assert_eq!(source1[0].span().source_file(), source1[1].span().source_file());
|
||||||
|
|
||||||
let joined1 = source1[0].span.join(source1[1].span);
|
let joined1 = source1[0].span().join(source1[1].span());
|
||||||
let joined2 = source1[0].span.join(source2[0].span);
|
let joined2 = source1[0].span().join(source2[0].span());
|
||||||
assert!(joined1.is_some());
|
assert!(joined1.is_some());
|
||||||
assert!(joined2.is_none());
|
assert!(joined2.is_none());
|
||||||
|
|
||||||
@ -161,7 +157,7 @@ fn span_join() {
|
|||||||
assert_eq!(end.line, 2);
|
assert_eq!(end.line, 2);
|
||||||
assert_eq!(end.column, 3);
|
assert_eq!(end.column, 3);
|
||||||
|
|
||||||
assert_eq!(joined1.unwrap().source_file(), source1[0].span.source_file());
|
assert_eq!(joined1.unwrap().source_file(), source1[0].span().source_file());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -179,8 +175,8 @@ fn tricky_doc_commaent() {
|
|||||||
let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
|
let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
|
||||||
let tokens = stream.into_iter().collect::<Vec<_>>();
|
let tokens = stream.into_iter().collect::<Vec<_>>();
|
||||||
assert!(tokens.len() == 1, "not length 1 -- {:?}", tokens);
|
assert!(tokens.len() == 1, "not length 1 -- {:?}", tokens);
|
||||||
match tokens[0].kind {
|
match tokens[0] {
|
||||||
proc_macro2::TokenNode::Literal(_) => {}
|
proc_macro2::TokenTree::Literal(_) => {}
|
||||||
_ => panic!("wrong token {:?}", tokens[0]),
|
_ => panic!("wrong token {:?}", tokens[0]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user