Bug 1716518 - Upgrade proc-macro2 to v1.0.27. r=emilio

Differential Revision: https://phabricator.services.mozilla.com/D117828
This commit is contained in:
Mike Hommey 2021-06-15 22:17:24 +00:00
parent e651cd961d
commit 8017e7c301
11 changed files with 528 additions and 196 deletions

4
Cargo.lock generated
View File

@ -3929,9 +3929,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.20"
version = "1.0.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"
checksum = "f0d8caf72986c1a598726adc988bb5984792ef84f5ee5aa50209145ee8077038"
dependencies = [
"unicode-xid",
]

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"c20c4c52342e65ea11ad8382edc636e628e8f8c5ab7cffddc32426b2fe8fe4cd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"332185d7ad4c859210f5edd7a76bc95146c8277726a2f81417f34927c4424d68","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"239f9a25c0f2ab57592288d944c7f1a0f887536b6d4dc2428a17640af8d10a41","src/lib.rs":"2b1d98424c9b23b547dabf85554120e5e65472026a0f3f711b3a097bca7c32fe","src/parse.rs":"500edee9773132e27e44d0fdaa042b1cb9451e29e65124493986f51710c0664c","src/wrapper.rs":"d36c0dced7ec0e7585c1f935cda836080bcae6de1de3d7851d962e9e11a3ac48","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"310c856e27ff61c9ec7f0a5cd96031aac02971557b1621f5e17b089d58e79bcd","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"}
{"files":{"Cargo.toml":"f146a19842771e569274bb49cc45bdec523f5684635e436fac722ca8acca9f1e","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"a71283fbc495095eebbbf46753df3fe2c19505c745b508dea157f65796b64dd7","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"c161f65f18d7d19bcbd568f5c0bea1cfc1ce3bd9c66427b1fdb4944ad7966ce0","src/lib.rs":"233e3b81bd55cfc9ea03e3441750df43482f2542fb1ce766579b2b440b59a8c5","src/marker.rs":"87fce2d0357f5b7998b6d9dfb064f4a0cbc9dabb19e33d4b514a446243ebe2e8","src/parse.rs":"e9490087ca7d1c10a94c835e725a5189145647492be2735dcd842b6e5ae5b57c","src/wrapper.rs":"9b932595ff6534b1d54ed4917b1d7b63e748ac4786d5377bd6f3bc7da78c2f83","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"652db9f25c69ffc65baa60cdca8f195aa2e254d4de0a9ddc85de4dc2470544b6","tests/test.rs":"597186c00ebf51191934c88ff970b5457ca0fb8e608bf896be1ebf8d74c17f4d","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"f0d8caf72986c1a598726adc988bb5984792ef84f5ee5aa50209145ee8077038"}

View File

@ -13,7 +13,7 @@
[package]
edition = "2018"
name = "proc-macro2"
version = "1.0.20"
version = "1.0.27"
authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
documentation = "https://docs.rs/proc-macro2"
@ -24,7 +24,7 @@ license = "MIT OR Apache-2.0"
repository = "https://github.com/alexcrichton/proc-macro2"
[package.metadata.docs.rs]
rustc-args = ["--cfg", "procmacro2_semver_exempt"]
rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"]
targets = ["x86_64-unknown-linux-gnu"]
[package.metadata.playground]

View File

@ -14,7 +14,7 @@
// procmacro2_semver_exempt surface area is implemented by using the
// nightly-only proc_macro API.
//
// "hygiene"
// "hygiene"
// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
// and Span::located_at. Enabled on Rust 1.45+.
//
@ -61,10 +61,18 @@ fn main() {
println!("cargo:rustc-cfg=span_locations");
}
if version.minor < 32 {
println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe");
}
if version.minor < 39 {
println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
}
if version.minor >= 44 {
println!("cargo:rustc-cfg=lexerror_display");
}
if version.minor >= 45 {
println!("cargo:rustc-cfg=hygiene");
}

View File

@ -1,4 +1,4 @@
use crate::parse::{token_stream, Cursor};
use crate::parse::{self, Cursor};
use crate::{Delimiter, Spacing, TokenTree};
#[cfg(span_locations)]
use std::cell::RefCell;
@ -35,7 +35,21 @@ pub(crate) struct TokenStream {
}
#[derive(Debug)]
pub(crate) struct LexError;
pub(crate) struct LexError {
pub(crate) span: Span,
}
impl LexError {
pub(crate) fn span(&self) -> Span {
self.span
}
fn call_site() -> Self {
LexError {
span: Span::call_site(),
}
}
}
impl TokenStream {
pub fn new() -> TokenStream {
@ -139,12 +153,13 @@ impl FromStr for TokenStream {
// Create a dummy file & add it to the source map
let cursor = get_cursor(src);
let (rest, tokens) = token_stream(cursor)?;
if rest.is_empty() {
Ok(tokens)
} else {
Err(LexError)
}
parse::token_stream(cursor)
}
}
impl Display for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("cannot parse string into token stream")
}
}
@ -878,6 +893,20 @@ impl Literal {
}
}
impl FromStr for Literal {
type Err = LexError;
fn from_str(repr: &str) -> Result<Self, Self::Err> {
let cursor = get_cursor(repr);
if let Ok((_rest, literal)) = parse::literal(cursor) {
if literal.text.len() == repr.len() {
return Ok(literal);
}
}
Err(LexError::call_site())
}
}
impl Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.text, f)

View File

@ -78,25 +78,16 @@
//! a different thread.
// Proc-macro2 types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.20")]
#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.27")]
#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
#![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
#![allow(clippy::needless_doctest_main)]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![allow(clippy::needless_doctest_main, clippy::vec_init_then_push)]
#[cfg(use_proc_macro)]
extern crate proc_macro;
use std::cmp::Ordering;
use std::fmt::{self, Debug, Display};
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
use std::marker;
use std::ops::RangeBounds;
#[cfg(procmacro2_semver_exempt)]
use std::path::PathBuf;
use std::rc::Rc;
use std::str::FromStr;
mod marker;
mod parse;
#[cfg(wrap_proc_macro)]
@ -113,6 +104,17 @@ use crate::fallback as imp;
#[cfg(wrap_proc_macro)]
mod imp;
use crate::marker::Marker;
use std::cmp::Ordering;
use std::error::Error;
use std::fmt::{self, Debug, Display};
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
use std::ops::RangeBounds;
#[cfg(procmacro2_semver_exempt)]
use std::path::PathBuf;
use std::str::FromStr;
/// An abstract stream of tokens, or more concretely a sequence of token trees.
///
/// This type provides interfaces for iterating over token trees and for
@ -123,27 +125,27 @@ mod imp;
#[derive(Clone)]
pub struct TokenStream {
inner: imp::TokenStream,
_marker: marker::PhantomData<Rc<()>>,
_marker: Marker,
}
/// Error returned from `TokenStream::from_str`.
pub struct LexError {
inner: imp::LexError,
_marker: marker::PhantomData<Rc<()>>,
_marker: Marker,
}
impl TokenStream {
fn _new(inner: imp::TokenStream) -> TokenStream {
TokenStream {
inner,
_marker: marker::PhantomData,
_marker: Marker,
}
}
fn _new_stable(inner: fallback::TokenStream) -> TokenStream {
TokenStream {
inner: inner.into(),
_marker: marker::PhantomData,
_marker: Marker,
}
}
@ -180,7 +182,7 @@ impl FromStr for TokenStream {
fn from_str(src: &str) -> Result<TokenStream, LexError> {
let e = src.parse().map_err(|e| LexError {
inner: e,
_marker: marker::PhantomData,
_marker: Marker,
})?;
Ok(TokenStream::_new(e))
}
@ -248,20 +250,35 @@ impl Debug for TokenStream {
}
}
impl LexError {
pub fn span(&self) -> Span {
Span::_new(self.inner.span())
}
}
impl Debug for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(&self.inner, f)
}
}
impl Display for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.inner, f)
}
}
impl Error for LexError {}
/// The source file of a given `Span`.
///
/// This type is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
#[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
#[derive(Clone, PartialEq, Eq)]
pub struct SourceFile {
inner: imp::SourceFile,
_marker: marker::PhantomData<Rc<()>>,
_marker: Marker,
}
#[cfg(procmacro2_semver_exempt)]
@ -269,7 +286,7 @@ impl SourceFile {
fn _new(inner: imp::SourceFile) -> Self {
SourceFile {
inner,
_marker: marker::PhantomData,
_marker: Marker,
}
}
@ -308,6 +325,7 @@ impl Debug for SourceFile {
///
/// This type is semver exempt and not exposed by default.
#[cfg(span_locations)]
#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct LineColumn {
/// The 1-indexed line in the source file on which the span starts or ends
@ -338,21 +356,21 @@ impl PartialOrd for LineColumn {
#[derive(Copy, Clone)]
pub struct Span {
inner: imp::Span,
_marker: marker::PhantomData<Rc<()>>,
_marker: Marker,
}
impl Span {
fn _new(inner: imp::Span) -> Span {
Span {
inner,
_marker: marker::PhantomData,
_marker: Marker,
}
}
fn _new_stable(inner: fallback::Span) -> Span {
Span {
inner: inner.into(),
_marker: marker::PhantomData,
_marker: Marker,
}
}
@ -379,6 +397,7 @@ impl Span {
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
#[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
pub fn def_site() -> Span {
Span::_new(imp::Span::def_site())
}
@ -421,6 +440,7 @@ impl Span {
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
#[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
pub fn source_file(&self) -> SourceFile {
SourceFile::_new(self.inner.source_file())
}
@ -428,7 +448,14 @@ impl Span {
/// Get the starting line/column in the source file for this span.
///
/// This method requires the `"span-locations"` feature to be enabled.
///
/// When executing in a procedural macro context, the returned line/column
/// are only meaningful if compiled with a nightly toolchain. The stable
/// toolchain does not have this information available. When executing
/// outside of a procedural macro, such as main.rs or build.rs, the
/// line/column are always meaningful regardless of toolchain.
#[cfg(span_locations)]
#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
pub fn start(&self) -> LineColumn {
let imp::LineColumn { line, column } = self.inner.start();
LineColumn { line, column }
@ -437,7 +464,14 @@ impl Span {
/// Get the ending line/column in the source file for this span.
///
/// This method requires the `"span-locations"` feature to be enabled.
///
/// When executing in a procedural macro context, the returned line/column
/// are only meaningful if compiled with a nightly toolchain. The stable
/// toolchain does not have this information available. When executing
/// outside of a procedural macro, such as main.rs or build.rs, the
/// line/column are always meaningful regardless of toolchain.
#[cfg(span_locations)]
#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))]
pub fn end(&self) -> LineColumn {
let imp::LineColumn { line, column } = self.inner.end();
LineColumn { line, column }
@ -460,6 +494,7 @@ impl Span {
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
#[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
pub fn eq(&self, other: &Span) -> bool {
self.inner.eq(&other.inner)
}
@ -690,18 +725,18 @@ impl Debug for Group {
}
}
/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
/// A `Punct` is a single punctuation character like `+`, `-` or `#`.
///
/// Multicharacter operators like `+=` are represented as two instances of
/// `Punct` with different forms of `Spacing` returned.
#[derive(Clone)]
pub struct Punct {
op: char,
ch: char,
spacing: Spacing,
span: Span,
}
/// Whether an `Punct` is followed immediately by another `Punct` or followed by
/// Whether a `Punct` is followed immediately by another `Punct` or followed by
/// another token or whitespace.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum Spacing {
@ -722,9 +757,9 @@ impl Punct {
///
/// The returned `Punct` will have the default span of `Span::call_site()`
/// which can be further configured with the `set_span` method below.
pub fn new(op: char, spacing: Spacing) -> Punct {
pub fn new(ch: char, spacing: Spacing) -> Punct {
Punct {
op,
ch,
spacing,
span: Span::call_site(),
}
@ -732,7 +767,7 @@ impl Punct {
/// Returns the value of this punctuation character as `char`.
pub fn as_char(&self) -> char {
self.op
self.ch
}
/// Returns the spacing of this punctuation character, indicating whether
@ -759,14 +794,14 @@ impl Punct {
/// convertible back into the same character.
impl Display for Punct {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Display::fmt(&self.op, f)
Display::fmt(&self.ch, f)
}
}
impl Debug for Punct {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut debug = fmt.debug_struct("Punct");
debug.field("op", &self.op);
debug.field("char", &self.ch);
debug.field("spacing", &self.spacing);
imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
debug.finish()
@ -840,14 +875,14 @@ impl Debug for Punct {
#[derive(Clone)]
pub struct Ident {
inner: imp::Ident,
_marker: marker::PhantomData<Rc<()>>,
_marker: Marker,
}
impl Ident {
fn _new(inner: imp::Ident) -> Ident {
Ident {
inner,
_marker: marker::PhantomData,
_marker: Marker,
}
}
@ -890,6 +925,7 @@ impl Ident {
///
/// This method is semver exempt and not exposed by default.
#[cfg(procmacro2_semver_exempt)]
#[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))]
pub fn new_raw(string: &str, span: Span) -> Ident {
Ident::_new_raw(string, span)
}
@ -968,7 +1004,7 @@ impl Debug for Ident {
#[derive(Clone)]
pub struct Literal {
inner: imp::Literal,
_marker: marker::PhantomData<Rc<()>>,
_marker: Marker,
}
macro_rules! suffixed_int_literals {
@ -978,7 +1014,7 @@ macro_rules! suffixed_int_literals {
/// This function will create an integer like `1u32` where the integer
/// value specified is the first part of the token and the integral is
/// also suffixed at the end. Literals created from negative numbers may
/// not survive rountrips through `TokenStream` or strings and may be
/// not survive roundtrips through `TokenStream` or strings and may be
/// broken into two tokens (`-` and positive literal).
///
/// Literals created through this method have the `Span::call_site()`
@ -999,7 +1035,7 @@ macro_rules! unsuffixed_int_literals {
/// specified on this token, meaning that invocations like
/// `Literal::i8_unsuffixed(1)` are equivalent to
/// `Literal::u32_unsuffixed(1)`. Literals created from negative numbers
/// may not survive rountrips through `TokenStream` or strings and may
/// may not survive roundtrips through `TokenStream` or strings and may
/// be broken into two tokens (`-` and positive literal).
///
/// Literals created through this method have the `Span::call_site()`
@ -1015,14 +1051,14 @@ impl Literal {
fn _new(inner: imp::Literal) -> Literal {
Literal {
inner,
_marker: marker::PhantomData,
_marker: Marker,
}
}
fn _new_stable(inner: fallback::Literal) -> Literal {
Literal {
inner: inner.into(),
_marker: marker::PhantomData,
_marker: Marker,
}
}
@ -1167,6 +1203,17 @@ impl Literal {
}
}
impl FromStr for Literal {
type Err = LexError;
fn from_str(repr: &str) -> Result<Self, LexError> {
repr.parse().map(Literal::_new).map_err(|inner| LexError {
inner,
_marker: Marker,
})
}
}
impl Debug for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
Debug::fmt(&self.inner, f)
@ -1181,10 +1228,9 @@ impl Display for Literal {
/// Public implementation details for the `TokenStream` type, such as iterators.
pub mod token_stream {
use crate::marker::Marker;
use crate::{imp, TokenTree};
use std::fmt::{self, Debug};
use std::marker;
use std::rc::Rc;
pub use crate::TokenStream;
@ -1195,7 +1241,7 @@ pub mod token_stream {
#[derive(Clone)]
pub struct IntoIter {
inner: imp::TokenTreeIter,
_marker: marker::PhantomData<Rc<()>>,
_marker: Marker,
}
impl Iterator for IntoIter {
@ -1219,7 +1265,7 @@ pub mod token_stream {
fn into_iter(self) -> IntoIter {
IntoIter {
inner: self.inner.into_iter(),
_marker: marker::PhantomData,
_marker: Marker,
}
}
}

View File

@ -0,0 +1,18 @@
use std::marker::PhantomData;
use std::panic::{RefUnwindSafe, UnwindSafe};
use std::rc::Rc;
// Zero sized marker with the correct set of autotrait impls we want all proc
// macro types to have.
pub(crate) type Marker = PhantomData<ProcMacroAutoTraits>;
pub(crate) use self::value::*;
mod value {
pub(crate) use std::marker::PhantomData as Marker;
}
pub(crate) struct ProcMacroAutoTraits(Rc<()>);
impl UnwindSafe for ProcMacroAutoTraits {}
impl RefUnwindSafe for ProcMacroAutoTraits {}

View File

@ -2,8 +2,8 @@ use crate::fallback::{
is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
};
use crate::{Delimiter, Punct, Spacing, TokenTree};
use std::char;
use std::str::{Bytes, CharIndices, Chars};
use unicode_xid::UnicodeXID;
#[derive(Copy, Clone, Eq, PartialEq)]
pub(crate) struct Cursor<'a> {
@ -26,7 +26,7 @@ impl<'a> Cursor<'a> {
self.rest.starts_with(s)
}
pub(crate) fn is_empty(&self) -> bool {
fn is_empty(&self) -> bool {
self.rest.is_empty()
}
@ -50,16 +50,17 @@ impl<'a> Cursor<'a> {
self.rest.char_indices()
}
fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
fn parse(&self, tag: &str) -> Result<Cursor<'a>, Reject> {
if self.starts_with(tag) {
Ok(self.advance(tag.len()))
} else {
Err(LexError)
Err(Reject)
}
}
}
type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
pub(crate) struct Reject;
type PResult<'a, O> = Result<(Cursor<'a>, O), Reject>;
fn skip_whitespace(input: Cursor) -> Cursor {
let mut s = input;
@ -86,7 +87,7 @@ fn skip_whitespace(input: Cursor) -> Cursor {
s = rest;
continue;
}
Err(LexError) => return s,
Err(Reject) => return s,
}
}
}
@ -111,7 +112,7 @@ fn skip_whitespace(input: Cursor) -> Cursor {
fn block_comment(input: Cursor) -> PResult<&str> {
if !input.starts_with("/*") {
return Err(LexError);
return Err(Reject);
}
let mut depth = 0;
@ -133,7 +134,7 @@ fn block_comment(input: Cursor) -> PResult<&str> {
i += 1;
}
Err(LexError)
Err(Reject)
}
fn is_whitespace(ch: char) -> bool {
@ -141,14 +142,14 @@ fn is_whitespace(ch: char) -> bool {
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
}
fn word_break(input: Cursor) -> Result<Cursor, LexError> {
fn word_break(input: Cursor) -> Result<Cursor, Reject> {
match input.chars().next() {
Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
Some(ch) if is_ident_continue(ch) => Err(Reject),
Some(_) | None => Ok(input),
}
}
pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
pub(crate) fn token_stream(mut input: Cursor) -> Result<TokenStream, LexError> {
let mut trees = Vec::new();
let mut stack = Vec::new();
@ -166,7 +167,17 @@ pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
let first = match input.bytes().next() {
Some(first) => first,
None => break,
None => match stack.last() {
None => return Ok(TokenStream { inner: trees }),
#[cfg(span_locations)]
Some((lo, _frame)) => {
return Err(LexError {
span: Span { lo: *lo, hi: *lo },
})
}
#[cfg(not(span_locations))]
Some(_frame) => return Err(LexError { span: Span {} }),
},
};
if let Some(open_delimiter) = match first {
@ -187,14 +198,17 @@ pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
b'}' => Some(Delimiter::Brace),
_ => None,
} {
input = input.advance(1);
let frame = stack.pop().ok_or(LexError)?;
let frame = match stack.pop() {
Some(frame) => frame,
None => return Err(lex_error(input)),
};
#[cfg(span_locations)]
let (lo, frame) = frame;
let (open_delimiter, outer) = frame;
if open_delimiter != close_delimiter {
return Err(LexError);
return Err(lex_error(input));
}
input = input.advance(1);
let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
g.set_span(Span {
#[cfg(span_locations)]
@ -205,7 +219,10 @@ pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
trees = outer;
trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
} else {
let (rest, mut tt) = leaf_token(input)?;
let (rest, mut tt) = match leaf_token(input) {
Ok((rest, tt)) => (rest, tt),
Err(Reject) => return Err(lex_error(input)),
};
tt.set_span(crate::Span::_new_stable(Span {
#[cfg(span_locations)]
lo,
@ -216,11 +233,18 @@ pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
input = rest;
}
}
}
if stack.is_empty() {
Ok((input, TokenStream { inner: trees }))
} else {
Err(LexError)
fn lex_error(cursor: Cursor) -> LexError {
#[cfg(not(span_locations))]
let _ = cursor;
LexError {
span: Span {
#[cfg(span_locations)]
lo: cursor.off,
#[cfg(span_locations)]
hi: cursor.off,
},
}
}
@ -228,16 +252,27 @@ fn leaf_token(input: Cursor) -> PResult<TokenTree> {
if let Ok((input, l)) = literal(input) {
// must be parsed before ident
Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
} else if let Ok((input, p)) = op(input) {
} else if let Ok((input, p)) = punct(input) {
Ok((input, TokenTree::Punct(p)))
} else if let Ok((input, i)) = ident(input) {
Ok((input, TokenTree::Ident(i)))
} else {
Err(LexError)
Err(Reject)
}
}
fn ident(input: Cursor) -> PResult<crate::Ident> {
if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"]
.iter()
.any(|prefix| input.starts_with(prefix))
{
Err(Reject)
} else {
ident_any(input)
}
}
fn ident_any(input: Cursor) -> PResult<crate::Ident> {
let raw = input.starts_with("r#");
let rest = input.advance((raw as usize) << 1);
@ -249,7 +284,7 @@ fn ident(input: Cursor) -> PResult<crate::Ident> {
}
if sym == "_" {
return Err(LexError);
return Err(Reject);
}
let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
@ -261,7 +296,7 @@ fn ident_not_raw(input: Cursor) -> PResult<&str> {
match chars.next() {
Some((_, ch)) if is_ident_start(ch) => {}
_ => return Err(LexError),
_ => return Err(Reject),
}
let mut end = input.len();
@ -275,17 +310,13 @@ fn ident_not_raw(input: Cursor) -> PResult<&str> {
Ok((input.advance(end), &input.rest[..end]))
}
fn literal(input: Cursor) -> PResult<Literal> {
match literal_nocapture(input) {
Ok(a) => {
let end = input.len() - a.len();
Ok((a, Literal::_new(input.rest[..end].to_string())))
}
Err(LexError) => Err(LexError),
}
pub(crate) fn literal(input: Cursor) -> PResult<Literal> {
let rest = literal_nocapture(input)?;
let end = input.len() - rest.len();
Ok((rest, Literal::_new(input.rest[..end].to_string())))
}
fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
fn literal_nocapture(input: Cursor) -> Result<Cursor, Reject> {
if let Ok(ok) = string(input) {
Ok(ok)
} else if let Ok(ok) = byte_string(input) {
@ -299,28 +330,28 @@ fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
} else if let Ok(ok) = int(input) {
Ok(ok)
} else {
Err(LexError)
Err(Reject)
}
}
fn literal_suffix(input: Cursor) -> Cursor {
match ident_not_raw(input) {
Ok((input, _)) => input,
Err(LexError) => input,
Err(Reject) => input,
}
}
fn string(input: Cursor) -> Result<Cursor, LexError> {
fn string(input: Cursor) -> Result<Cursor, Reject> {
if let Ok(input) = input.parse("\"") {
cooked_string(input)
} else if let Ok(input) = input.parse("r") {
raw_string(input)
} else {
Err(LexError)
Err(Reject)
}
}
fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
fn cooked_string(input: Cursor) -> Result<Cursor, Reject> {
let mut chars = input.char_indices().peekable();
while let Some((i, ch)) = chars.next() {
@ -329,13 +360,10 @@ fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
let input = input.advance(i + 1);
return Ok(literal_suffix(input));
}
'\r' => {
if let Some((_, '\n')) = chars.next() {
// ...
} else {
break;
}
}
'\r' => match chars.next() {
Some((_, '\n')) => {}
_ => break,
},
'\\' => match chars.next() {
Some((_, 'x')) => {
if !backslash_x_char(&mut chars) {
@ -349,12 +377,18 @@ fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
break;
}
}
Some((_, '\n')) | Some((_, '\r')) => {
while let Some(&(_, ch)) = chars.peek() {
if ch.is_whitespace() {
chars.next();
} else {
break;
Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => {
let mut last = ch;
loop {
if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
return Err(Reject);
}
match chars.peek() {
Some((_, ch)) if ch.is_whitespace() => {
last = *ch;
chars.next();
}
_ => break,
}
}
}
@ -363,34 +397,31 @@ fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
_ch => {}
}
}
Err(LexError)
Err(Reject)
}
fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
fn byte_string(input: Cursor) -> Result<Cursor, Reject> {
if let Ok(input) = input.parse("b\"") {
cooked_byte_string(input)
} else if let Ok(input) = input.parse("br") {
raw_string(input)
} else {
Err(LexError)
Err(Reject)
}
}
fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, Reject> {
let mut bytes = input.bytes().enumerate();
'outer: while let Some((offset, b)) = bytes.next() {
while let Some((offset, b)) = bytes.next() {
match b {
b'"' => {
let input = input.advance(offset + 1);
return Ok(literal_suffix(input));
}
b'\r' => {
if let Some((_, b'\n')) = bytes.next() {
// ...
} else {
break;
}
}
b'\r' => match bytes.next() {
Some((_, b'\n')) => {}
_ => break,
},
b'\\' => match bytes.next() {
Some((_, b'x')) => {
if !backslash_x_byte(&mut bytes) {
@ -399,16 +430,24 @@ fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
}
Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
| Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
Some((newline, b'\n')) | Some((newline, b'\r')) => {
Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => {
let mut last = b as char;
let rest = input.advance(newline + 1);
for (offset, ch) in rest.char_indices() {
if !ch.is_whitespace() {
input = rest.advance(offset);
bytes = input.bytes().enumerate();
continue 'outer;
let mut chars = rest.char_indices();
loop {
if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') {
return Err(Reject);
}
match chars.next() {
Some((_, ch)) if ch.is_whitespace() => last = ch,
Some((offset, _)) => {
input = rest.advance(offset);
bytes = input.bytes().enumerate();
break;
}
None => return Err(Reject),
}
}
break;
}
_ => break,
},
@ -416,10 +455,10 @@ fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
_ => break,
}
}
Err(LexError)
Err(Reject)
}
fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
fn raw_string(input: Cursor) -> Result<Cursor, Reject> {
let mut chars = input.char_indices();
let mut n = 0;
while let Some((i, ch)) = chars.next() {
@ -429,23 +468,26 @@ fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
break;
}
'#' => {}
_ => return Err(LexError),
_ => return Err(Reject),
}
}
for (i, ch) in chars {
while let Some((i, ch)) = chars.next() {
match ch {
'"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
let rest = input.advance(i + 1 + n);
return Ok(literal_suffix(rest));
}
'\r' => {}
'\r' => match chars.next() {
Some((_, '\n')) => {}
_ => break,
},
_ => {}
}
}
Err(LexError)
Err(Reject)
}
fn byte(input: Cursor) -> Result<Cursor, LexError> {
fn byte(input: Cursor) -> Result<Cursor, Reject> {
let input = input.parse("b'")?;
let mut bytes = input.bytes().enumerate();
let ok = match bytes.next().map(|(_, b)| b) {
@ -458,17 +500,17 @@ fn byte(input: Cursor) -> Result<Cursor, LexError> {
b => b.is_some(),
};
if !ok {
return Err(LexError);
return Err(Reject);
}
let (offset, _) = bytes.next().ok_or(LexError)?;
let (offset, _) = bytes.next().ok_or(Reject)?;
if !input.chars().as_str().is_char_boundary(offset) {
return Err(LexError);
return Err(Reject);
}
let input = input.advance(offset).parse("'")?;
Ok(literal_suffix(input))
}
fn character(input: Cursor) -> Result<Cursor, LexError> {
fn character(input: Cursor) -> Result<Cursor, Reject> {
let input = input.parse("'")?;
let mut chars = input.char_indices();
let ok = match chars.next().map(|(_, ch)| ch) {
@ -483,9 +525,9 @@ fn character(input: Cursor) -> Result<Cursor, LexError> {
ch => ch.is_some(),
};
if !ok {
return Err(LexError);
return Err(Reject);
}
let (idx, _) = chars.next().ok_or(LexError)?;
let (idx, _) = chars.next().ok_or(Reject)?;
let input = input.advance(idx).parse("'")?;
Ok(literal_suffix(input))
}
@ -525,16 +567,28 @@ where
I: Iterator<Item = (usize, char)>,
{
next_ch!(chars @ '{');
next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
loop {
let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
if c == '}' {
return true;
let mut value = 0;
let mut len = 0;
for (_, ch) in chars {
let digit = match ch {
'0'..='9' => ch as u8 - b'0',
'a'..='f' => 10 + ch as u8 - b'a',
'A'..='F' => 10 + ch as u8 - b'A',
'_' if len > 0 => continue,
'}' if len > 0 => return char::from_u32(value).is_some(),
_ => return false,
};
if len == 6 {
return false;
}
value *= 0x10;
value += u32::from(digit);
len += 1;
}
false
}
fn float(input: Cursor) -> Result<Cursor, LexError> {
fn float(input: Cursor) -> Result<Cursor, Reject> {
let mut rest = float_digits(input)?;
if let Some(ch) = rest.chars().next() {
if is_ident_start(ch) {
@ -544,11 +598,11 @@ fn float(input: Cursor) -> Result<Cursor, LexError> {
word_break(rest)
}
fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
fn float_digits(input: Cursor) -> Result<Cursor, Reject> {
let mut chars = input.chars().peekable();
match chars.next() {
Some(ch) if ch >= '0' && ch <= '9' => {}
_ => return Err(LexError),
_ => return Err(Reject),
}
let mut len = 1;
@ -570,7 +624,7 @@ fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
.map(|&ch| ch == '.' || is_ident_start(ch))
.unwrap_or(false)
{
return Err(LexError);
return Err(Reject);
}
len += 1;
has_dot = true;
@ -585,12 +639,17 @@ fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
}
}
let rest = input.advance(len);
if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
return Err(LexError);
if !(has_dot || has_exp) {
return Err(Reject);
}
if has_exp {
let token_before_exp = if has_dot {
Ok(input.advance(len - 1))
} else {
Err(Reject)
};
let mut has_sign = false;
let mut has_exp_value = false;
while let Some(&ch) = chars.peek() {
match ch {
@ -598,8 +657,12 @@ fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
if has_exp_value {
break;
}
if has_sign {
return token_before_exp;
}
chars.next();
len += 1;
has_sign = true;
}
'0'..='9' => {
chars.next();
@ -614,14 +677,14 @@ fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
}
}
if !has_exp_value {
return Err(LexError);
return token_before_exp;
}
}
Ok(input.advance(len))
}
fn int(input: Cursor) -> Result<Cursor, LexError> {
fn int(input: Cursor) -> Result<Cursor, Reject> {
let mut rest = digits(input)?;
if let Some(ch) = rest.chars().next() {
if is_ident_start(ch) {
@ -631,7 +694,7 @@ fn int(input: Cursor) -> Result<Cursor, LexError> {
word_break(rest)
}
fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
fn digits(mut input: Cursor) -> Result<Cursor, Reject> {
let base = if input.starts_with("0x") {
input = input.advance(2);
16
@ -648,67 +711,79 @@ fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
let mut len = 0;
let mut empty = true;
for b in input.bytes() {
let digit = match b {
b'0'..=b'9' => (b - b'0') as u64,
b'a'..=b'f' => 10 + (b - b'a') as u64,
b'A'..=b'F' => 10 + (b - b'A') as u64,
match b {
b'0'..=b'9' => {
let digit = (b - b'0') as u64;
if digit >= base {
return Err(Reject);
}
}
b'a'..=b'f' => {
let digit = 10 + (b - b'a') as u64;
if digit >= base {
break;
}
}
b'A'..=b'F' => {
let digit = 10 + (b - b'A') as u64;
if digit >= base {
break;
}
}
b'_' => {
if empty && base == 10 {
return Err(LexError);
return Err(Reject);
}
len += 1;
continue;
}
_ => break,
};
if digit >= base {
return Err(LexError);
}
len += 1;
empty = false;
}
if empty {
Err(LexError)
Err(Reject)
} else {
Ok(input.advance(len))
}
}
fn op(input: Cursor) -> PResult<Punct> {
match op_char(input) {
Ok((rest, '\'')) => {
ident(rest)?;
fn punct(input: Cursor) -> PResult<Punct> {
let (rest, ch) = punct_char(input)?;
if ch == '\'' {
if ident_any(rest)?.0.starts_with("'") {
Err(Reject)
} else {
Ok((rest, Punct::new('\'', Spacing::Joint)))
}
Ok((rest, ch)) => {
let kind = match op_char(rest) {
Ok(_) => Spacing::Joint,
Err(LexError) => Spacing::Alone,
};
Ok((rest, Punct::new(ch, kind)))
}
Err(LexError) => Err(LexError),
} else {
let kind = match punct_char(rest) {
Ok(_) => Spacing::Joint,
Err(Reject) => Spacing::Alone,
};
Ok((rest, Punct::new(ch, kind)))
}
}
fn op_char(input: Cursor) -> PResult<char> {
fn punct_char(input: Cursor) -> PResult<char> {
if input.starts_with("//") || input.starts_with("/*") {
// Do not accept `/` of a comment as an op.
return Err(LexError);
// Do not accept `/` of a comment as a punct.
return Err(Reject);
}
let mut chars = input.chars();
let first = match chars.next() {
Some(ch) => ch,
None => {
return Err(LexError);
return Err(Reject);
}
};
let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
if recognized.contains(first) {
Ok((input.advance(first.len_utf8()), first))
} else {
Err(LexError)
Err(Reject)
}
}
@ -727,7 +802,7 @@ fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
while let Some(cr) = scan_for_bare_cr.find('\r') {
let rest = &scan_for_bare_cr[cr + 1..];
if !rest.starts_with('\n') {
return Err(LexError);
return Err(Reject);
}
scan_for_bare_cr = rest;
}
@ -764,7 +839,7 @@ fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
} else if input.starts_with("///") {
let input = input.advance(3);
if input.starts_with("/") {
return Err(LexError);
return Err(Reject);
}
let (input, s) = take_until_newline_or_eof(input);
Ok((input, (s, false)))
@ -772,7 +847,7 @@ fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
let (input, s) = block_comment(input)?;
Ok((input, (&s[3..s.len() - 2], false)))
} else {
Err(LexError)
Err(Reject)
}
}

View File

@ -29,6 +29,14 @@ pub(crate) enum LexError {
Fallback(fallback::LexError),
}
impl LexError {
fn call_site() -> Self {
LexError::Fallback(fallback::LexError {
span: fallback::Span::call_site(),
})
}
}
fn mismatch() -> ! {
panic!("stable/nightly mismatch")
}
@ -107,8 +115,8 @@ impl FromStr for TokenStream {
// Work around https://github.com/rust-lang/rust/issues/58736.
fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
.unwrap_or(Err(LexError::Fallback(fallback::LexError)))
let result = panic::catch_unwind(|| src.parse().map_err(LexError::Compiler));
result.unwrap_or_else(|_| Err(LexError::call_site()))
}
impl Display for TokenStream {
@ -150,9 +158,9 @@ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
Spacing::Joint => proc_macro::Spacing::Joint,
Spacing::Alone => proc_macro::Spacing::Alone,
};
let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
op.set_span(tt.span().inner.unwrap_nightly());
op.into()
let mut punct = proc_macro::Punct::new(tt.as_char(), spacing);
punct.set_span(tt.span().inner.unwrap_nightly());
punct.into()
}
TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
@ -243,6 +251,15 @@ impl Debug for TokenStream {
}
}
impl LexError {
pub(crate) fn span(&self) -> Span {
match self {
LexError::Compiler(_) => Span::call_site(),
LexError::Fallback(e) => Span::Fallback(e.span()),
}
}
}
impl From<proc_macro::LexError> for LexError {
fn from(e: proc_macro::LexError) -> LexError {
LexError::Compiler(e)
@ -264,6 +281,23 @@ impl Debug for LexError {
}
}
impl Display for LexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
#[cfg(lexerror_display)]
LexError::Compiler(e) => Display::fmt(e, f),
#[cfg(not(lexerror_display))]
LexError::Compiler(_e) => Display::fmt(
&fallback::LexError {
span: fallback::Span::call_site(),
},
f,
),
LexError::Fallback(e) => Display::fmt(e, f),
}
}
}
#[derive(Clone)]
pub(crate) enum TokenTreeIter {
Compiler(proc_macro::token_stream::IntoIter),
@ -882,6 +916,30 @@ impl From<fallback::Literal> for Literal {
}
}
impl FromStr for Literal {
type Err = LexError;
fn from_str(repr: &str) -> Result<Self, Self::Err> {
if inside_proc_macro() {
// TODO: use libproc_macro's FromStr impl once it is available in
// rustc. https://github.com/rust-lang/rust/pull/84717
let tokens = proc_macro_parse(repr)?;
let mut iter = tokens.into_iter();
if let (Some(proc_macro::TokenTree::Literal(literal)), None) =
(iter.next(), iter.next())
{
if literal.to_string().len() == repr.len() {
return Ok(Literal::Compiler(literal));
}
}
Err(LexError::call_site())
} else {
let literal = fallback::Literal::from_str(repr)?;
Ok(Literal::Fallback(literal))
}
}
}
impl Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {

View File

@ -57,3 +57,36 @@ mod semver_exempt {
assert_impl!(SourceFile is not Send or Sync);
}
#[cfg(not(no_libprocmacro_unwind_safe))]
mod unwind_safe {
use super::*;
use std::panic::{RefUnwindSafe, UnwindSafe};
macro_rules! assert_unwind_safe {
($($types:ident)*) => {
$(
assert_impl!($types is UnwindSafe and RefUnwindSafe);
)*
};
}
assert_unwind_safe! {
Delimiter
Group
Ident
LexError
Literal
Punct
Spacing
Span
TokenStream
TokenTree
}
#[cfg(procmacro2_semver_exempt)]
assert_unwind_safe! {
LineColumn
SourceFile
}
}

View File

@ -1,4 +1,5 @@
use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
use std::panic;
use std::str::{self, FromStr};
#[test]
@ -71,9 +72,24 @@ fn lifetime_number() {
}
#[test]
#[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
fn lifetime_invalid() {
Ident::new("'a#", Span::call_site());
let result = panic::catch_unwind(|| Ident::new("'a#", Span::call_site()));
match result {
Err(box_any) => {
let message = box_any.downcast_ref::<String>().unwrap();
let expected1 = r#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0
let expected2 = r#""'a#" is not a valid Ident"#; // 1.53.0 ..
assert!(
message == expected1 || message == expected2,
"panic message does not match expected string\n\
\x20 panic message: `{:?}`\n\
\x20expected message: `{:?}`",
message,
expected2,
);
}
Ok(_) => panic!("test did not panic as expected"),
}
}
#[test]
@ -83,6 +99,11 @@ fn literal_string() {
assert_eq!(Literal::string("didn't").to_string(), "\"didn't\"");
}
#[test]
fn literal_raw_string() {
"r\"\r\n\"".parse::<TokenStream>().unwrap();
}
#[test]
fn literal_character() {
assert_eq!(Literal::character('x').to_string(), "'x'");
@ -115,6 +136,10 @@ fn literal_suffix() {
assert_eq!(token_count("r#\"\"#r"), 1);
assert_eq!(token_count("'c'c"), 1);
assert_eq!(token_count("b'b'b"), 1);
assert_eq!(token_count("0E"), 1);
assert_eq!(token_count("0o0A"), 1);
assert_eq!(token_count("0E--0"), 4);
assert_eq!(token_count("0.0ECMA"), 1);
}
#[test]
@ -138,6 +163,20 @@ fn literal_iter_negative() {
assert!(iter.next().is_none());
}
#[test]
fn literal_parse() {
assert!("1".parse::<Literal>().is_ok());
assert!("1.0".parse::<Literal>().is_ok());
assert!("'a'".parse::<Literal>().is_ok());
assert!("\"\n\"".parse::<Literal>().is_ok());
assert!("0 1".parse::<Literal>().is_err());
assert!(" 0".parse::<Literal>().is_err());
assert!("0 ".parse::<Literal>().is_err());
assert!("/* comment */0".parse::<Literal>().is_err());
assert!("0/* comment */".parse::<Literal>().is_err());
assert!("0// comment".parse::<Literal>().is_err());
}
#[test]
fn roundtrip() {
fn roundtrip(p: &str) {
@ -187,6 +226,16 @@ fn fail() {
fail("' static");
fail("r#1");
fail("r#_");
fail("\"\\u{0000000}\""); // overlong unicode escape (rust allows at most 6 hex digits)
fail("\"\\u{999999}\""); // outside of valid range of char
fail("\"\\u{_0}\""); // leading underscore
fail("\"\\u{}\""); // empty
fail("b\"\r\""); // bare carriage return in byte string
fail("r\"\r\""); // bare carriage return in raw string
fail("\"\\\r \""); // backslash carriage return
fail("'aa'aa");
fail("br##\"\"#");
fail("\"\\\n\u{85}\r\"");
}
#[cfg(span_locations)]
@ -274,7 +323,7 @@ fn no_panic() {
}
#[test]
fn op_before_comment() {
fn punct_before_comment() {
let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
match tts.next().unwrap() {
TokenTree::Punct(tt) => {
@ -285,6 +334,22 @@ fn op_before_comment() {
}
}
#[test]
fn joint_last_token() {
// This test verifies that we match the behavior of libproc_macro *not* in
// the range nightly-2020-09-06 through nightly-2020-09-10, in which this
// behavior was temporarily broken.
// See https://github.com/rust-lang/rust/issues/76399
let joint_punct = Punct::new(':', Spacing::Joint);
let stream = TokenStream::from(TokenTree::Punct(joint_punct));
let punct = match stream.into_iter().next().unwrap() {
TokenTree::Punct(punct) => punct,
_ => unreachable!(),
};
assert_eq!(punct.spacing(), Spacing::Joint);
}
#[test]
fn raw_identifier() {
let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
@ -322,7 +387,7 @@ TokenStream [
sym: a,
},
Punct {
op: '+',
char: '+',
spacing: Alone,
},
Literal {
@ -343,7 +408,7 @@ TokenStream [
sym: a
},
Punct {
op: '+',
char: '+',
spacing: Alone
},
Literal {
@ -365,7 +430,7 @@ TokenStream [
span: bytes(2..3),
},
Punct {
op: '+',
char: '+',
spacing: Alone,
span: bytes(4..5),
},
@ -390,7 +455,7 @@ TokenStream [
span: bytes(2..3)
},
Punct {
op: '+',
char: '+',
spacing: Alone,
span: bytes(4..5)
},