Merge pull request #74 from alexcrichton/lex-doc-comments

Lex doc comments as attributes
This commit is contained in:
Alex Crichton 2018-04-04 17:04:57 -05:00 committed by GitHub
commit 6e05dac376
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 113 additions and 71 deletions

View File

@ -591,24 +591,55 @@ impl fmt::Display for Literal {
}
}
named!(token_stream -> ::TokenStream, map!(
many0!(token_tree),
|trees| ::TokenStream::_new(TokenStream { inner: trees })
));
fn token_stream(mut input: Cursor) -> PResult<::TokenStream> {
let mut trees = Vec::new();
loop {
let input_no_ws = skip_whitespace(input);
if input_no_ws.rest.len() == 0 {
break
}
if let Ok((a, tokens)) = doc_comment(input_no_ws) {
input = a;
trees.extend(tokens);
continue
}
let (a, tt) = match token_tree(input_no_ws) {
Ok(p) => p,
Err(_) => break,
};
trees.push(tt);
input = a;
}
Ok((input, ::TokenStream::_new(TokenStream { inner: trees })))
}
#[cfg(not(procmacro2_semver_exempt))]
fn token_tree(input: Cursor) -> PResult<TokenTree> {
token_kind(input)
fn spanned<'a, T>(
input: Cursor<'a>,
f: fn(Cursor<'a>) -> PResult<'a, T>,
) -> PResult<'a, (T, ::Span)> {
let (a, b) = f(skip_whitespace(input))?;
Ok((a, ((b, ::Span::_new(Span { })))))
}
#[cfg(procmacro2_semver_exempt)]
fn token_tree(input: Cursor) -> PResult<TokenTree> {
fn spanned<'a, T>(
input: Cursor<'a>,
f: fn(Cursor<'a>) -> PResult<'a, T>,
) -> PResult<'a, (T, ::Span)> {
let input = skip_whitespace(input);
let lo = input.off;
let (input, mut token) = token_kind(input)?;
let hi = input.off;
token.set_span(::Span::_new(Span { lo: lo, hi: hi }));
Ok((input, token))
let (a, b) = f(input)?;
let hi = a.off;
let span = ::Span::_new(Span { lo: lo, hi: hi });
Ok((a, (b, span)))
}
fn token_tree(input: Cursor) -> PResult<TokenTree> {
let (rest, (mut tt, span)) = spanned(input, token_kind)?;
tt.set_span(span);
Ok((rest, tt))
}
named!(token_kind -> TokenTree, alt!(
@ -721,8 +752,6 @@ named!(literal_nocapture -> (), alt!(
float
|
int
|
doc_comment
));
named!(string -> (), alt!(
@ -1146,31 +1175,53 @@ fn op_char(input: Cursor) -> PResult<char> {
}
}
named!(doc_comment -> (), alt!(
fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
let mut trees = Vec::new();
let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
trees.push(TokenTree::Op(Op::new('#', Spacing::Alone)));
if inner {
trees.push(Op::new('!', Spacing::Alone).into());
}
let mut stream = vec![
TokenTree::Term(::Term::new("doc", span)),
TokenTree::Op(Op::new('=', Spacing::Alone)),
TokenTree::Literal(::Literal::string(comment)),
];
for tt in stream.iter_mut() {
tt.set_span(span);
}
trees.push(Group::new(Delimiter::Bracket, stream.into_iter().collect()).into());
for tt in trees.iter_mut() {
tt.set_span(span);
}
Ok((rest, trees))
}
named!(doc_comment_contents -> (&str, bool), alt!(
do_parse!(
punct!("//!") >>
take_until_newline_or_eof!() >>
(())
s: take_until_newline_or_eof!() >>
((s, true))
)
|
do_parse!(
option!(whitespace) >>
peek!(tag!("/*!")) >>
block_comment >>
(())
s: block_comment >>
((s, true))
)
|
do_parse!(
punct!("///") >>
not!(tag!("/")) >>
take_until_newline_or_eof!() >>
(())
s: take_until_newline_or_eof!() >>
((s, false))
)
|
do_parse!(
option!(whitespace) >>
peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
block_comment >>
(())
s: block_comment >>
((s, false))
)
));

View File

@ -268,7 +268,7 @@ macro_rules! take_until_newline_or_eof {
} else {
match $i.find('\n') {
Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
None => Ok(($i.advance($i.len()), "")),
None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
}
}
}};
@ -389,37 +389,3 @@ macro_rules! map {
map!($i, call!($f), $g)
};
}
macro_rules! many0 {
($i:expr, $f:expr) => {{
let ret;
let mut res = ::std::vec::Vec::new();
let mut input = $i;
loop {
if input.is_empty() {
ret = Ok((input, res));
break;
}
match $f(input) {
Err(LexError) => {
ret = Ok((input, res));
break;
}
Ok((i, o)) => {
// loop trip must always consume (otherwise infinite loops)
if i.len() == input.len() {
ret = Err(LexError);
break;
}
res.push(o);
input = i;
}
}
}
ret
}};
}

View File

@ -29,12 +29,6 @@ fn roundtrip() {
roundtrip("a");
roundtrip("<<");
roundtrip("<<=");
roundtrip(
"
/// a
wut
",
);
roundtrip(
"
1
@ -115,12 +109,16 @@ testing 123
testing 234
}",
&[
(1, 0, 1, 30),
(2, 0, 2, 7),
(2, 8, 2, 11),
(3, 0, 5, 1),
(4, 2, 4, 9),
(4, 10, 4, 13),
(1, 0, 1, 30), // #
(1, 0, 1, 30), // [ ... ]
(1, 0, 1, 30), // doc
(1, 0, 1, 30), // =
(1, 0, 1, 30), // "This is..."
(2, 0, 2, 7), // testing
(2, 8, 2, 11), // 123
(3, 0, 5, 1), // { ... }
(4, 2, 4, 9), // testing
(4, 10, 4, 13), // 234
],
);
}
@ -192,11 +190,38 @@ fn tricky_doc_comment() {
let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 1, "not length 1 -- {:?}", tokens);
assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
match tokens[0] {
proc_macro2::TokenTree::Literal(_) => {}
proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
_ => panic!("wrong token {:?}", tokens[0]),
}
let mut tokens = match tokens[1] {
proc_macro2::TokenTree::Group(ref tt) => {
assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
tt.stream().into_iter()
}
_ => panic!("wrong token {:?}", tokens[0]),
};
match tokens.next().unwrap() {
proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
t => panic!("wrong token {:?}", t),
}
match tokens.next().unwrap() {
proc_macro2::TokenTree::Literal(ref tt) => {
assert_eq!(tt.to_string(), "\" doc\"");
}
t => panic!("wrong token {:?}", t),
}
assert!(tokens.next().is_none());
let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
let tokens = stream.into_iter().collect::<Vec<_>>();
assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
}
#[test]