mirror of
https://gitee.com/openharmony/third_party_rust_proc-macro2
synced 2024-11-23 15:29:39 +00:00
Merge pull request #36 from mystor/stable_span
Support meaningful spans in the stable version of proc-macro2
This commit is contained in:
commit
92ab1b4e3e
@ -12,12 +12,15 @@ matrix:
|
||||
script:
|
||||
- cargo test
|
||||
- cargo build --features unstable
|
||||
- cargo doc --no-deps
|
||||
- RUSTFLAGS='--cfg procmacro2_unstable' cargo test
|
||||
- RUSTFLAGS='--cfg procmacro2_unstable' cargo build --features unstable
|
||||
- RUSTFLAGS='--cfg procmacro2_unstable' cargo doc --no-deps
|
||||
after_success:
|
||||
- travis-cargo --only nightly doc-upload
|
||||
|
||||
script:
|
||||
- cargo test
|
||||
- RUSTFLAGS='--cfg procmacro2_unstable' cargo test
|
||||
env:
|
||||
global:
|
||||
- TRAVIS_CARGO_NIGHTLY_FEATURE=""
|
||||
|
11
README.md
11
README.md
@ -59,6 +59,17 @@ proc-macro2 = { version = "0.1", features = ["unstable"] }
|
||||
```
|
||||
|
||||
|
||||
## Unstable Features
|
||||
|
||||
`proc-macro2` supports exporting some methods from `proc_macro` which are
|
||||
currently highly unstable, and may not be stabilized in the first pass of
|
||||
`proc_macro` stabilizations. These features are not exported by default.
|
||||
|
||||
To export these features, the `procmacro2_unstable` config flag must be passed
|
||||
to rustc. To pass this flag, run `cargo` with
|
||||
`RUSTFLAGS='--cfg procmacro2_unstable' cargo build`.
|
||||
|
||||
|
||||
# License
|
||||
|
||||
This project is licensed under either of
|
||||
|
62
src/lib.rs
62
src/lib.rs
@ -103,6 +103,46 @@ impl TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
// Returned by reference, so we can't easily wrap it.
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub use imp::FileName;
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct SourceFile(imp::SourceFile);
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl SourceFile {
|
||||
/// Get the path to this source file as a string.
|
||||
pub fn path(&self) -> &FileName {
|
||||
self.0.path()
|
||||
}
|
||||
|
||||
pub fn is_real(&self) -> bool {
|
||||
self.0.is_real()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl AsRef<FileName> for SourceFile {
|
||||
fn as_ref(&self) -> &FileName {
|
||||
self.0.path()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl fmt::Debug for SourceFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub struct LineColumn {
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct Span(imp::Span);
|
||||
|
||||
@ -121,6 +161,28 @@ impl Span {
|
||||
pub fn def_site() -> Span {
|
||||
Span(imp::Span::def_site())
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
SourceFile(self.0.source_file())
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
let imp::LineColumn{ line, column } = self.0.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
let imp::LineColumn{ line, column } = self.0.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
self.0.join(other.0).map(Span)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
324
src/stable.rs
324
src/stable.rs
@ -1,6 +1,8 @@
|
||||
use std::ascii;
|
||||
use std::borrow::Borrow;
|
||||
use std::cell::RefCell;
|
||||
#[cfg(procmacro2_unstable)]
|
||||
use std::cmp;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
use std::iter;
|
||||
@ -12,7 +14,7 @@ use std::vec;
|
||||
|
||||
use proc_macro;
|
||||
use unicode_xid::UnicodeXID;
|
||||
use strnom::{PResult, skip_whitespace, block_comment, whitespace, word_break};
|
||||
use strnom::{Cursor, PResult, skip_whitespace, block_comment, whitespace, word_break};
|
||||
|
||||
use {TokenTree, TokenNode, Delimiter, Spacing};
|
||||
|
||||
@ -34,11 +36,36 @@ impl TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
fn get_cursor(src: &str) -> Cursor {
|
||||
// Create a dummy file & add it to the codemap
|
||||
CODEMAP.with(|cm| {
|
||||
let mut cm = cm.borrow_mut();
|
||||
let name = format!("<parsed string {}>", cm.files.len());
|
||||
let span = cm.add_file(&name, src);
|
||||
Cursor {
|
||||
rest: src,
|
||||
off: span.lo,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(procmacro2_unstable))]
|
||||
fn get_cursor(src: &str) -> Cursor {
|
||||
Cursor {
|
||||
rest: src,
|
||||
off: 0,
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TokenStream {
|
||||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
match token_stream(src) {
|
||||
// Create a dummy file & add it to the codemap
|
||||
let cursor = get_cursor(src);
|
||||
|
||||
match token_stream(cursor) {
|
||||
Ok((input, output)) => {
|
||||
if skip_whitespace(input).len() != 0 {
|
||||
Err(LexError)
|
||||
@ -137,16 +164,208 @@ impl IntoIterator for TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct FileName(String);
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl fmt::Display for FileName {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct SourceFile {
|
||||
name: FileName,
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl SourceFile {
|
||||
/// Get the path to this source file as a string.
|
||||
pub fn path(&self) -> &FileName {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn is_real(&self) -> bool {
|
||||
// XXX(nika): Support real files in the future?
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl AsRef<FileName> for SourceFile {
|
||||
fn as_ref(&self) -> &FileName {
|
||||
self.path()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl fmt::Debug for SourceFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("SourceFile")
|
||||
.field("path", &self.path())
|
||||
.field("is_real", &self.is_real())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub struct LineColumn {
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
thread_local! {
|
||||
static CODEMAP: RefCell<Codemap> = RefCell::new(Codemap {
|
||||
// NOTE: We start with a single dummy file which all call_site() and
|
||||
// def_site() spans reference.
|
||||
files: vec![FileInfo {
|
||||
name: "<unspecified>".to_owned(),
|
||||
span: Span { lo: 0, hi: 0 },
|
||||
lines: vec![0],
|
||||
}],
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
struct FileInfo {
|
||||
name: String,
|
||||
span: Span,
|
||||
lines: Vec<usize>,
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl FileInfo {
|
||||
fn offset_line_column(&self, offset: usize) -> LineColumn {
|
||||
assert!(self.span_within(Span { lo: offset as u32, hi: offset as u32 }));
|
||||
let offset = offset - self.span.lo as usize;
|
||||
match self.lines.binary_search(&offset) {
|
||||
Ok(found) => LineColumn {
|
||||
line: found + 1,
|
||||
column: 0
|
||||
},
|
||||
Err(idx) => LineColumn {
|
||||
line: idx,
|
||||
column: offset - self.lines[idx - 1]
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn span_within(&self, span: Span) -> bool {
|
||||
span.lo >= self.span.lo && span.hi <= self.span.hi
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the offsets of each line in the given source string.
|
||||
#[cfg(procmacro2_unstable)]
|
||||
fn lines_offsets(s: &str) -> Vec<usize> {
|
||||
let mut lines = vec![0];
|
||||
let mut prev = 0;
|
||||
while let Some(len) = s[prev..].find('\n') {
|
||||
prev += len + 1;
|
||||
lines.push(prev);
|
||||
}
|
||||
lines
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
struct Codemap {
|
||||
files: Vec<FileInfo>,
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl Codemap {
|
||||
fn next_start_pos(&self) -> u32 {
|
||||
// Add 1 so there's always space between files.
|
||||
//
|
||||
// We'll always have at least 1 file, as we initialize our files list
|
||||
// with a dummy file.
|
||||
self.files.last().unwrap().span.hi + 1
|
||||
}
|
||||
|
||||
fn add_file(&mut self, name: &str, src: &str) -> Span {
|
||||
let lines = lines_offsets(src);
|
||||
let lo = self.next_start_pos();
|
||||
// XXX(nika): Shouild we bother doing a checked cast or checked add here?
|
||||
let span = Span { lo: lo, hi: lo + (src.len() as u32) };
|
||||
|
||||
self.files.push(FileInfo {
|
||||
name: name.to_owned(),
|
||||
span: span,
|
||||
lines: lines,
|
||||
});
|
||||
|
||||
span
|
||||
}
|
||||
|
||||
fn fileinfo(&self, span: Span) -> &FileInfo {
|
||||
for file in &self.files {
|
||||
if file.span_within(span) {
|
||||
return file;
|
||||
}
|
||||
}
|
||||
panic!("Invalid span with no related FileInfo!");
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct Span;
|
||||
pub struct Span { lo: u32, hi: u32 }
|
||||
|
||||
impl Span {
|
||||
pub fn call_site() -> Span {
|
||||
Span
|
||||
Span { lo: 0, hi: 0 }
|
||||
}
|
||||
|
||||
pub fn def_site() -> Span {
|
||||
Span
|
||||
Span { lo: 0, hi: 0 }
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
CODEMAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
SourceFile {
|
||||
name: FileName(fi.name.clone()),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
CODEMAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
fi.offset_line_column(self.lo as usize)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
CODEMAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
let fi = cm.fileinfo(*self);
|
||||
fi.offset_line_column(self.hi as usize)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
CODEMAP.with(|cm| {
|
||||
let cm = cm.borrow();
|
||||
// If `other` is not within the same FileInfo as us, return None.
|
||||
if !cm.fileinfo(*self).span_within(other) {
|
||||
return None;
|
||||
}
|
||||
Some(Span {
|
||||
lo: cmp::min(self.lo, other.lo),
|
||||
hi: cmp::max(self.hi, other.hi),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -349,13 +568,19 @@ named!(token_stream -> ::TokenStream, map!(
|
||||
|trees| ::TokenStream(TokenStream { inner: trees })
|
||||
));
|
||||
|
||||
named!(token_tree -> TokenTree,
|
||||
map!(token_kind, |s: TokenNode| {
|
||||
TokenTree {
|
||||
span: ::Span(Span),
|
||||
kind: s,
|
||||
}
|
||||
}));
|
||||
fn token_tree(input: Cursor) -> PResult<TokenTree> {
|
||||
let input = skip_whitespace(input);
|
||||
let lo = input.off;
|
||||
let (input, kind) = token_kind(input)?;
|
||||
let hi = input.off;
|
||||
Ok((input, TokenTree {
|
||||
span: ::Span(Span {
|
||||
lo: lo,
|
||||
hi: hi,
|
||||
}),
|
||||
kind: kind,
|
||||
}))
|
||||
}
|
||||
|
||||
named!(token_kind -> TokenNode, alt!(
|
||||
map!(delimited, |(d, s)| TokenNode::Group(d, s))
|
||||
@ -387,7 +612,7 @@ named!(delimited -> (Delimiter, ::TokenStream), alt!(
|
||||
) => { |ts| (Delimiter::Brace, ts) }
|
||||
));
|
||||
|
||||
fn symbol(mut input: &str) -> PResult<TokenNode> {
|
||||
fn symbol(mut input: Cursor) -> PResult<TokenNode> {
|
||||
input = skip_whitespace(input);
|
||||
|
||||
let mut chars = input.char_indices();
|
||||
@ -410,14 +635,14 @@ fn symbol(mut input: &str) -> PResult<TokenNode> {
|
||||
}
|
||||
}
|
||||
|
||||
if lifetime && &input[..end] != "'static" && KEYWORDS.contains(&&input[1..end]) {
|
||||
if lifetime && &input.rest[..end] != "'static" && KEYWORDS.contains(&&input.rest[1..end]) {
|
||||
Err(LexError)
|
||||
} else {
|
||||
let (a, b) = input.split_at(end);
|
||||
let a = &input.rest[..end];
|
||||
if a == "_" {
|
||||
Ok((b, TokenNode::Op('_', Spacing::Alone)))
|
||||
Ok((input.advance(end), TokenNode::Op('_', Spacing::Alone)))
|
||||
} else {
|
||||
Ok((b, TokenNode::Term(::Term::intern(a))))
|
||||
Ok((input.advance(end), TokenNode::Term(::Term::intern(a))))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -433,7 +658,7 @@ static KEYWORDS: &'static [&'static str] = &[
|
||||
"yield",
|
||||
];
|
||||
|
||||
fn literal(input: &str) -> PResult<::Literal> {
|
||||
fn literal(input: Cursor) -> PResult<::Literal> {
|
||||
let input_no_ws = skip_whitespace(input);
|
||||
|
||||
match literal_nocapture(input_no_ws) {
|
||||
@ -441,7 +666,7 @@ fn literal(input: &str) -> PResult<::Literal> {
|
||||
let start = input.len() - input_no_ws.len();
|
||||
let len = input_no_ws.len() - a.len();
|
||||
let end = start + len;
|
||||
Ok((a, ::Literal(Literal(input[start..end].to_string()))))
|
||||
Ok((a, ::Literal(Literal(input.rest[start..end].to_string()))))
|
||||
}
|
||||
Err(LexError) => Err(LexError),
|
||||
}
|
||||
@ -480,12 +705,12 @@ named!(quoted_string -> (), delimited!(
|
||||
tag!("\"")
|
||||
));
|
||||
|
||||
fn cooked_string(input: &str) -> PResult<()> {
|
||||
fn cooked_string(input: Cursor) -> PResult<()> {
|
||||
let mut chars = input.char_indices().peekable();
|
||||
while let Some((byte_offset, ch)) = chars.next() {
|
||||
match ch {
|
||||
'"' => {
|
||||
return Ok((&input[byte_offset..], ()));
|
||||
return Ok((input.advance(byte_offset), ()));
|
||||
}
|
||||
'\r' => {
|
||||
if let Some((_, '\n')) = chars.next() {
|
||||
@ -544,12 +769,12 @@ named!(byte_string -> (), alt!(
|
||||
) => { |_| () }
|
||||
));
|
||||
|
||||
fn cooked_byte_string(mut input: &str) -> PResult<()> {
|
||||
fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
'outer: while let Some((offset, b)) = bytes.next() {
|
||||
match b {
|
||||
b'"' => {
|
||||
return Ok((&input[offset..], ()));
|
||||
return Ok((input.advance(offset), ()));
|
||||
}
|
||||
b'\r' => {
|
||||
if let Some((_, b'\n')) = bytes.next() {
|
||||
@ -574,10 +799,10 @@ fn cooked_byte_string(mut input: &str) -> PResult<()> {
|
||||
Some((_, b'"')) => {}
|
||||
Some((newline, b'\n')) |
|
||||
Some((newline, b'\r')) => {
|
||||
let rest = &input[newline + 1..];
|
||||
let rest = input.advance(newline + 1);
|
||||
for (offset, ch) in rest.char_indices() {
|
||||
if !ch.is_whitespace() {
|
||||
input = &rest[offset..];
|
||||
input = rest.advance(offset);
|
||||
bytes = input.bytes().enumerate();
|
||||
continue 'outer;
|
||||
}
|
||||
@ -594,7 +819,7 @@ fn cooked_byte_string(mut input: &str) -> PResult<()> {
|
||||
Err(LexError)
|
||||
}
|
||||
|
||||
fn raw_string(input: &str) -> PResult<()> {
|
||||
fn raw_string(input: Cursor) -> PResult<()> {
|
||||
let mut chars = input.char_indices();
|
||||
let mut n = 0;
|
||||
while let Some((byte_offset, ch)) = chars.next() {
|
||||
@ -609,8 +834,8 @@ fn raw_string(input: &str) -> PResult<()> {
|
||||
}
|
||||
for (byte_offset, ch) in chars {
|
||||
match ch {
|
||||
'"' if input[byte_offset + 1..].starts_with(&input[..n]) => {
|
||||
let rest = &input[byte_offset + 1 + n..];
|
||||
'"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
|
||||
let rest = input.advance(byte_offset + 1 + n);
|
||||
return Ok((rest, ()))
|
||||
}
|
||||
'\r' => {}
|
||||
@ -628,7 +853,7 @@ named!(byte -> (), do_parse!(
|
||||
(())
|
||||
));
|
||||
|
||||
fn cooked_byte(input: &str) -> PResult<()> {
|
||||
fn cooked_byte(input: Cursor) -> PResult<()> {
|
||||
let mut bytes = input.bytes().enumerate();
|
||||
let ok = match bytes.next().map(|(_, b)| b) {
|
||||
Some(b'\\') => {
|
||||
@ -648,8 +873,8 @@ fn cooked_byte(input: &str) -> PResult<()> {
|
||||
};
|
||||
if ok {
|
||||
match bytes.next() {
|
||||
Some((offset, _)) => Ok((&input[offset..], ())),
|
||||
None => Ok(("", ())),
|
||||
Some((offset, _)) => Ok((input.advance(offset), ())),
|
||||
None => Ok((input.advance(input.len()), ())),
|
||||
}
|
||||
} else {
|
||||
Err(LexError)
|
||||
@ -663,7 +888,7 @@ named!(character -> (), do_parse!(
|
||||
(())
|
||||
));
|
||||
|
||||
fn cooked_char(input: &str) -> PResult<()> {
|
||||
fn cooked_char(input: Cursor) -> PResult<()> {
|
||||
let mut chars = input.char_indices();
|
||||
let ok = match chars.next().map(|(_, ch)| ch) {
|
||||
Some('\\') => {
|
||||
@ -683,7 +908,10 @@ fn cooked_char(input: &str) -> PResult<()> {
|
||||
ch => ch.is_some(),
|
||||
};
|
||||
if ok {
|
||||
Ok((chars.as_str(), ()))
|
||||
match chars.next() {
|
||||
Some((idx, _)) => Ok((input.advance(idx), ())),
|
||||
None => Ok((input.advance(input.len()), ())),
|
||||
}
|
||||
} else {
|
||||
Err(LexError)
|
||||
}
|
||||
@ -730,17 +958,17 @@ fn backslash_u<I>(chars: &mut I) -> bool
|
||||
}
|
||||
}
|
||||
|
||||
fn float(input: &str) -> PResult<()> {
|
||||
fn float(input: Cursor) -> PResult<()> {
|
||||
let (rest, ()) = float_digits(input)?;
|
||||
for suffix in &["f32", "f64"] {
|
||||
if rest.starts_with(suffix) {
|
||||
return word_break(&rest[suffix.len()..]);
|
||||
return word_break(rest.advance(suffix.len()));
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn float_digits(input: &str) -> PResult<()> {
|
||||
fn float_digits(input: Cursor) -> PResult<()> {
|
||||
let mut chars = input.chars().peekable();
|
||||
match chars.next() {
|
||||
Some(ch) if ch >= '0' && ch <= '9' => {}
|
||||
@ -779,7 +1007,7 @@ fn float_digits(input: &str) -> PResult<()> {
|
||||
}
|
||||
}
|
||||
|
||||
let rest = &input[len..];
|
||||
let rest = input.advance(len);
|
||||
if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
|
||||
return Err(LexError);
|
||||
}
|
||||
@ -812,10 +1040,10 @@ fn float_digits(input: &str) -> PResult<()> {
|
||||
}
|
||||
}
|
||||
|
||||
Ok((&input[len..], ()))
|
||||
Ok((input.advance(len), ()))
|
||||
}
|
||||
|
||||
fn int(input: &str) -> PResult<()> {
|
||||
fn int(input: Cursor) -> PResult<()> {
|
||||
let (rest, ()) = digits(input)?;
|
||||
for suffix in &[
|
||||
"isize",
|
||||
@ -832,21 +1060,21 @@ fn int(input: &str) -> PResult<()> {
|
||||
"u128",
|
||||
] {
|
||||
if rest.starts_with(suffix) {
|
||||
return word_break(&rest[suffix.len()..]);
|
||||
return word_break(rest.advance(suffix.len()));
|
||||
}
|
||||
}
|
||||
word_break(rest)
|
||||
}
|
||||
|
||||
fn digits(mut input: &str) -> PResult<()> {
|
||||
fn digits(mut input: Cursor) -> PResult<()> {
|
||||
let base = if input.starts_with("0x") {
|
||||
input = &input[2..];
|
||||
input = input.advance(2);
|
||||
16
|
||||
} else if input.starts_with("0o") {
|
||||
input = &input[2..];
|
||||
input = input.advance(2);
|
||||
8
|
||||
} else if input.starts_with("0b") {
|
||||
input = &input[2..];
|
||||
input = input.advance(2);
|
||||
2
|
||||
} else {
|
||||
10
|
||||
@ -877,7 +1105,7 @@ fn digits(mut input: &str) -> PResult<()> {
|
||||
if empty {
|
||||
Err(LexError)
|
||||
} else {
|
||||
Ok((&input[len..], ()))
|
||||
Ok((input.advance(len), ()))
|
||||
}
|
||||
}
|
||||
|
||||
@ -887,7 +1115,7 @@ named!(boolean -> (), alt!(
|
||||
keyword!("false") => { |_| () }
|
||||
));
|
||||
|
||||
fn op(input: &str) -> PResult<(char, Spacing)> {
|
||||
fn op(input: Cursor) -> PResult<(char, Spacing)> {
|
||||
let input = skip_whitespace(input);
|
||||
match op_char(input) {
|
||||
Ok((rest, ch)) => {
|
||||
@ -901,7 +1129,7 @@ fn op(input: &str) -> PResult<(char, Spacing)> {
|
||||
}
|
||||
}
|
||||
|
||||
fn op_char(input: &str) -> PResult<char> {
|
||||
fn op_char(input: Cursor) -> PResult<char> {
|
||||
let mut chars = input.chars();
|
||||
let first = match chars.next() {
|
||||
Some(ch) => ch,
|
||||
@ -911,7 +1139,7 @@ fn op_char(input: &str) -> PResult<char> {
|
||||
};
|
||||
let recognized = "~!@#$%^&*-=+|;:,<.>/?";
|
||||
if recognized.contains(first) {
|
||||
Ok((chars.as_str(), first))
|
||||
Ok((input.advance(first.len_utf8()), first))
|
||||
} else {
|
||||
Err(LexError)
|
||||
}
|
||||
|
@ -1,12 +1,61 @@
|
||||
//! Adapted from [`nom`](https://github.com/Geal/nom).
|
||||
|
||||
use std::str::{Chars, CharIndices, Bytes};
|
||||
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use imp::LexError;
|
||||
|
||||
pub type PResult<'a, O> = Result<(&'a str, O), LexError>;
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub struct Cursor<'a> {
|
||||
pub rest: &'a str,
|
||||
pub off: u32,
|
||||
}
|
||||
|
||||
pub fn whitespace(input: &str) -> PResult<()> {
|
||||
impl<'a> Cursor<'a> {
|
||||
pub fn advance(&self, amt: usize) -> Cursor<'a> {
|
||||
Cursor {
|
||||
rest: &self.rest[amt..],
|
||||
off: self.off + (amt as u32),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find(&self, p: char) -> Option<usize> {
|
||||
self.rest.find(p)
|
||||
}
|
||||
|
||||
pub fn starts_with(&self, s: &str) -> bool {
|
||||
self.rest.starts_with(s)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.rest.is_empty()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.rest.len()
|
||||
}
|
||||
|
||||
pub fn as_bytes(&self) -> &'a [u8] {
|
||||
self.rest.as_bytes()
|
||||
}
|
||||
|
||||
pub fn bytes(&self) -> Bytes<'a> {
|
||||
self.rest.bytes()
|
||||
}
|
||||
|
||||
pub fn chars(&self) -> Chars<'a> {
|
||||
self.rest.chars()
|
||||
}
|
||||
|
||||
pub fn char_indices(&self) -> CharIndices<'a> {
|
||||
self.rest.char_indices()
|
||||
}
|
||||
}
|
||||
|
||||
pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
|
||||
|
||||
pub fn whitespace(input: Cursor) -> PResult<()> {
|
||||
if input.is_empty() {
|
||||
return Err(LexError);
|
||||
}
|
||||
@ -14,7 +63,7 @@ pub fn whitespace(input: &str) -> PResult<()> {
|
||||
let bytes = input.as_bytes();
|
||||
let mut i = 0;
|
||||
while i < bytes.len() {
|
||||
let s = &input[i..];
|
||||
let s = input.advance(i);
|
||||
if bytes[i] == b'/' {
|
||||
if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////")) &&
|
||||
!s.starts_with("//!") {
|
||||
@ -50,10 +99,10 @@ pub fn whitespace(input: &str) -> PResult<()> {
|
||||
Err(LexError)
|
||||
};
|
||||
}
|
||||
Ok(("", ()))
|
||||
Ok((input.advance(input.len()), ()))
|
||||
}
|
||||
|
||||
pub fn block_comment(input: &str) -> PResult<&str> {
|
||||
pub fn block_comment(input: Cursor) -> PResult<&str> {
|
||||
if !input.starts_with("/*") {
|
||||
return Err(LexError);
|
||||
}
|
||||
@ -69,7 +118,7 @@ pub fn block_comment(input: &str) -> PResult<&str> {
|
||||
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
return Ok((&input[i + 2..], &input[..i + 2]));
|
||||
return Ok((input.advance(i + 2), &input.rest[..i + 2]));
|
||||
}
|
||||
i += 1; // eat '/'
|
||||
}
|
||||
@ -78,7 +127,7 @@ pub fn block_comment(input: &str) -> PResult<&str> {
|
||||
Err(LexError)
|
||||
}
|
||||
|
||||
pub fn skip_whitespace(input: &str) -> &str {
|
||||
pub fn skip_whitespace(input: Cursor) -> Cursor {
|
||||
match whitespace(input) {
|
||||
Ok((rest, _)) => rest,
|
||||
Err(LexError) => input,
|
||||
@ -90,7 +139,7 @@ fn is_whitespace(ch: char) -> bool {
|
||||
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
|
||||
}
|
||||
|
||||
pub fn word_break(input: &str) -> PResult<()> {
|
||||
pub fn word_break(input: Cursor) -> PResult<()> {
|
||||
match input.chars().next() {
|
||||
Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
|
||||
Some(_) | None => Ok((input, ())),
|
||||
@ -99,7 +148,7 @@ pub fn word_break(input: &str) -> PResult<()> {
|
||||
|
||||
macro_rules! named {
|
||||
($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
|
||||
fn $name(i: &str) -> $crate::strnom::PResult<$o> {
|
||||
fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
|
||||
$submac!(i, $($args)*)
|
||||
}
|
||||
};
|
||||
@ -228,7 +277,7 @@ macro_rules! take_until {
|
||||
}
|
||||
}
|
||||
if parsed {
|
||||
Ok((&$i[offset..], &$i[..offset]))
|
||||
Ok(($i.advance(offset), &$i.rest[..offset]))
|
||||
} else {
|
||||
Err(LexError)
|
||||
}
|
||||
@ -294,7 +343,7 @@ macro_rules! not {
|
||||
macro_rules! tag {
|
||||
($i:expr, $tag:expr) => {
|
||||
if $i.starts_with($tag) {
|
||||
Ok((&$i[$tag.len()..], &$i[..$tag.len()]))
|
||||
Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
|
||||
} else {
|
||||
Err(LexError)
|
||||
}
|
||||
@ -308,10 +357,10 @@ macro_rules! punct {
|
||||
}
|
||||
|
||||
/// Do not use directly. Use `punct!`.
|
||||
pub fn punct<'a>(input: &'a str, token: &'static str) -> PResult<'a, &'a str> {
|
||||
pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
|
||||
let input = skip_whitespace(input);
|
||||
if input.starts_with(token) {
|
||||
Ok((&input[token.len()..], token))
|
||||
Ok((input.advance(token.len()), token))
|
||||
} else {
|
||||
Err(LexError)
|
||||
}
|
||||
@ -324,7 +373,7 @@ macro_rules! keyword {
|
||||
}
|
||||
|
||||
/// Do not use directly. Use `keyword!`.
|
||||
pub fn keyword<'a>(input: &'a str, token: &'static str) -> PResult<'a, &'a str> {
|
||||
pub fn keyword<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
|
||||
match punct(input, token) {
|
||||
Ok((rest, _)) => {
|
||||
match word_break(rest) {
|
||||
|
@ -159,6 +159,60 @@ impl fmt::Debug for TokenTreeIter {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct FileName(String);
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl fmt::Display for FileName {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: We have to generate our own filename object here because we can't wrap
|
||||
// the one provided by proc_macro.
|
||||
#[cfg(procmacro2_unstable)]
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct SourceFile(proc_macro::SourceFile, FileName);
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl SourceFile {
|
||||
fn new(sf: proc_macro::SourceFile) -> Self {
|
||||
let filename = FileName(sf.path().to_string());
|
||||
SourceFile(sf, filename)
|
||||
}
|
||||
|
||||
/// Get the path to this source file as a string.
|
||||
pub fn path(&self) -> &FileName {
|
||||
&self.1
|
||||
}
|
||||
|
||||
pub fn is_real(&self) -> bool {
|
||||
self.0.is_real()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl AsRef<FileName> for SourceFile {
|
||||
fn as_ref(&self) -> &FileName {
|
||||
self.path()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
impl fmt::Debug for SourceFile {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub struct LineColumn {
|
||||
pub line: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct Span(proc_macro::Span);
|
||||
|
||||
@ -170,6 +224,28 @@ impl Span {
|
||||
pub fn def_site() -> Span {
|
||||
Span(proc_macro::Span::def_site())
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn source_file(&self) -> SourceFile {
|
||||
SourceFile::new(self.0.source_file())
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn start(&self) -> LineColumn {
|
||||
let proc_macro::LineColumn{ line, column } = self.0.start();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn end(&self) -> LineColumn {
|
||||
let proc_macro::LineColumn{ line, column } = self.0.end();
|
||||
LineColumn { line, column }
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
pub fn join(&self, other: Span) -> Option<Span> {
|
||||
self.0.join(other.0).map(Span)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Span {
|
||||
|
@ -1,6 +1,6 @@
|
||||
extern crate proc_macro2;
|
||||
|
||||
use proc_macro2::{Term, Literal, TokenStream};
|
||||
use proc_macro2::{Term, Literal, TokenStream, TokenNode, Span};
|
||||
|
||||
#[test]
|
||||
fn symbols() {
|
||||
@ -64,3 +64,92 @@ fn fail() {
|
||||
fail("' static");
|
||||
fail("'mut");
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
#[test]
|
||||
fn span_test() {
|
||||
fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
|
||||
let ts = p.parse::<TokenStream>().unwrap();
|
||||
check_spans_internal(ts, &mut lines);
|
||||
}
|
||||
|
||||
fn check_spans_internal(
|
||||
ts: TokenStream,
|
||||
lines: &mut &[(usize, usize, usize, usize)],
|
||||
) {
|
||||
for i in ts {
|
||||
if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
|
||||
*lines = rest;
|
||||
|
||||
let start = i.span.start();
|
||||
assert_eq!(start.line, sline, "sline did not match for {}", i);
|
||||
assert_eq!(start.column, scol, "scol did not match for {}", i);
|
||||
|
||||
let end = i.span.end();
|
||||
assert_eq!(end.line, eline, "eline did not match for {}", i);
|
||||
assert_eq!(end.column, ecol, "ecol did not match for {}", i);
|
||||
|
||||
match i.kind {
|
||||
TokenNode::Group(_, stream) =>
|
||||
check_spans_internal(stream, lines),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
check_spans("\
|
||||
/// This is a document comment
|
||||
testing 123
|
||||
{
|
||||
testing 234
|
||||
}", &[
|
||||
(1, 0, 1, 30),
|
||||
(2, 0, 2, 7),
|
||||
(2, 8, 2, 11),
|
||||
(3, 0, 5, 1),
|
||||
(4, 2, 4, 9),
|
||||
(4, 10, 4, 13),
|
||||
]);
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
#[cfg(not(feature = "unstable"))]
|
||||
#[test]
|
||||
fn default_span() {
|
||||
let start = Span::call_site().start();
|
||||
assert_eq!(start.line, 1);
|
||||
assert_eq!(start.column, 0);
|
||||
let end = Span::call_site().end();
|
||||
assert_eq!(end.line, 1);
|
||||
assert_eq!(end.column, 0);
|
||||
let source_file = Span::call_site().source_file();
|
||||
assert_eq!(source_file.path().to_string(), "<unspecified>");
|
||||
assert!(!source_file.is_real());
|
||||
}
|
||||
|
||||
#[cfg(procmacro2_unstable)]
|
||||
#[test]
|
||||
fn span_join() {
|
||||
let source1 =
|
||||
"aaa\nbbb".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
|
||||
let source2 =
|
||||
"ccc\nddd".parse::<TokenStream>().unwrap().into_iter().collect::<Vec<_>>();
|
||||
|
||||
assert!(source1[0].span.source_file() != source2[0].span.source_file());
|
||||
assert_eq!(source1[0].span.source_file(), source1[1].span.source_file());
|
||||
|
||||
let joined1 = source1[0].span.join(source1[1].span);
|
||||
let joined2 = source1[0].span.join(source2[0].span);
|
||||
assert!(joined1.is_some());
|
||||
assert!(joined2.is_none());
|
||||
|
||||
let start = joined1.unwrap().start();
|
||||
let end = joined1.unwrap().end();
|
||||
assert_eq!(start.line, 1);
|
||||
assert_eq!(start.column, 0);
|
||||
assert_eq!(end.line, 2);
|
||||
assert_eq!(end.column, 3);
|
||||
|
||||
assert_eq!(joined1.unwrap().source_file(), source1[0].span.source_file());
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user