Bug 1342759 - Revendor dependencies r=manishearth

MozReview-Commit-ID: byBQQbHb8J

--HG--
rename : third_party/rust/syn/.cargo-checksum.json => third_party/rust/syn-0.10.8/.cargo-checksum.json
rename : third_party/rust/syn/Cargo.toml => third_party/rust/syn-0.10.8/Cargo.toml
rename : third_party/rust/syn/src/aster/qpath.rs => third_party/rust/syn-0.10.8/src/aster/qpath.rs
rename : third_party/rust/syn/src/aster/ty.rs => third_party/rust/syn-0.10.8/src/aster/ty.rs
rename : third_party/rust/syn/src/attr.rs => third_party/rust/syn-0.10.8/src/attr.rs
rename : third_party/rust/syn/src/escape.rs => third_party/rust/syn-0.10.8/src/escape.rs
rename : third_party/rust/syn/src/expr.rs => third_party/rust/syn-0.10.8/src/expr.rs
rename : third_party/rust/syn/src/generics.rs => third_party/rust/syn-0.10.8/src/generics.rs
rename : third_party/rust/syn/src/helper.rs => third_party/rust/syn-0.10.8/src/helper.rs
rename : third_party/rust/syn/src/ident.rs => third_party/rust/syn-0.10.8/src/ident.rs
rename : third_party/rust/syn/src/item.rs => third_party/rust/syn-0.10.8/src/item.rs
rename : third_party/rust/syn/src/krate.rs => third_party/rust/syn-0.10.8/src/krate.rs
rename : third_party/rust/syn/src/lib.rs => third_party/rust/syn-0.10.8/src/lib.rs
rename : third_party/rust/syn/src/lit.rs => third_party/rust/syn-0.10.8/src/lit.rs
rename : third_party/rust/syn/src/mac.rs => third_party/rust/syn-0.10.8/src/mac.rs
rename : third_party/rust/syn/src/macro_input.rs => third_party/rust/syn-0.10.8/src/macro_input.rs
rename : third_party/rust/syn/src/nom.rs => third_party/rust/syn-0.10.8/src/nom.rs
rename : third_party/rust/syn/src/registry.rs => third_party/rust/syn-0.10.8/src/registry.rs
rename : third_party/rust/syn/src/space.rs => third_party/rust/syn-0.10.8/src/space.rs
rename : third_party/rust/syn/src/ty.rs => third_party/rust/syn-0.10.8/src/ty.rs
rename : third_party/rust/syn/src/visit.rs => third_party/rust/syn-0.10.8/src/visit.rs
This commit is contained in:
Manish Goregaokar 2017-02-26 04:10:07 -08:00
parent 32fe1965f3
commit fcedbf4075
74 changed files with 14221 additions and 519 deletions

View File

@ -0,0 +1 @@
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"ebe4110b0c97747210ec48a874d3b128531f59868f3ee1e169023854413cc948","lib.rs":"f7219b22e9cccc53a02f5a5e40f38d50640042727566913d4b2b579517667983"},"package":"a85e1452f40a50777c8424fa7fcaa7dd7074c7bc5419014fbffe7ea3d750dee8"}

View File

View File

@ -0,0 +1,18 @@
[package]
name = "cssparser-macros"
version = "0.1.0"
authors = ["Simon Sapin <simon.sapin@exyr.org>"]
description = "Procedural macros for cssparser"
documentation = "https://docs.rs/cssparser-macros/"
repository = "https://github.com/servo/rust-cssparser"
license = "MPL-2.0"
[lib]
path = "lib.rs"
proc-macro = true
[dependencies]
phf_codegen = "0.7"
quote = "0.3"
syn = "0.11"

123
third_party/rust/cssparser-macros/lib.rs vendored Normal file
View File

@ -0,0 +1,123 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate phf_codegen;
extern crate proc_macro;
#[macro_use] extern crate quote;
extern crate syn;
use std::ascii::AsciiExt;
/// Find a `#[cssparser__assert_ascii_lowercase__data(string = "…", string = "…")]` attribute,
/// and panic if any string contains ASCII uppercase letters.
#[proc_macro_derive(cssparser__assert_ascii_lowercase,
attributes(cssparser__assert_ascii_lowercase__data))]
pub fn assert_ascii_lowercase(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = syn::parse_macro_input(&input.to_string()).unwrap();
let data = list_attr(&input, "cssparser__assert_ascii_lowercase__data");
for sub_attr in data {
let string = sub_attr_value(sub_attr, "string");
assert_eq!(*string, string.to_ascii_lowercase(),
"the expected strings must be given in ASCII lowercase");
}
"".parse().unwrap()
}
/// Find a `#[cssparser__max_len__data(string = "…", string = "…")]` attribute,
/// panic if any string contains ASCII uppercase letters,
/// emit a `MAX_LENGTH` constant with the length of the longest string.
#[proc_macro_derive(cssparser__max_len,
attributes(cssparser__max_len__data))]
pub fn max_len(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = syn::parse_macro_input(&input.to_string()).unwrap();
let data = list_attr(&input, "cssparser__max_len__data");
let lengths = data.iter().map(|sub_attr| sub_attr_value(sub_attr, "string").len());
let max_length = lengths.max().expect("expected at least one string");
let tokens = quote! {
const MAX_LENGTH: usize = #max_length;
};
tokens.as_str().parse().unwrap()
}
/// On `struct $Name($ValueType)`, add a new static method
/// `fn map() -> &'static ::phf::Map<&'static str, $ValueType>`.
/// The maps content is given as:
/// `#[cssparser__phf_map__kv_pairs(key = "…", value = "…", key = "…", value = "…")]`.
/// Keys are ASCII-lowercased.
#[proc_macro_derive(cssparser__phf_map,
attributes(cssparser__phf_map__kv_pairs))]
pub fn phf_map(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = syn::parse_macro_input(&input.to_string()).unwrap();
let name = &input.ident;
let value_type = match input.body {
syn::Body::Struct(syn::VariantData::Tuple(ref fields)) if fields.len() == 1 => {
&fields[0].ty
}
_ => panic!("expected tuple struct newtype, got {:?}", input.body)
};
let pairs: Vec<_> = list_attr(&input, "cssparser__phf_map__kv_pairs").chunks(2).map(|chunk| {
let key = sub_attr_value(&chunk[0], "key");
let value = sub_attr_value(&chunk[1], "value");
(key.to_ascii_lowercase(), value)
}).collect();
let mut map = phf_codegen::Map::new();
for &(ref key, value) in &pairs {
map.entry(&**key, value);
}
let mut initializer_bytes = Vec::<u8>::new();
let mut initializer_tokens = quote::Tokens::new();
map.build(&mut initializer_bytes).unwrap();
initializer_tokens.append(::std::str::from_utf8(&initializer_bytes).unwrap());
let tokens = quote! {
impl #name {
#[inline]
fn map() -> &'static ::phf::Map<&'static str, #value_type> {
static MAP: ::phf::Map<&'static str, #value_type> = #initializer_tokens;
&MAP
}
}
};
tokens.as_str().parse().unwrap()
}
/// Panic if the first attribute isnt `#[foo(…)]` with the given name,
/// or return the parameters.
fn list_attr<'a>(input: &'a syn::DeriveInput, expected_name: &str) -> &'a [syn::NestedMetaItem] {
for attr in &input.attrs {
match attr.value {
syn::MetaItem::List(ref name, ref nested) if name == expected_name => {
return nested
}
_ => {}
}
}
panic!("expected a {} attribute", expected_name)
}
/// Panic if `sub_attr` is not a name-value like `foo = "…"` with the given name,
/// or return the value.
fn sub_attr_value<'a>(sub_attr: &'a syn::NestedMetaItem, expected_name: &str) -> &'a str {
match *sub_attr {
syn::NestedMetaItem::MetaItem(
syn::MetaItem::NameValue(ref name, syn::Lit::Str(ref value, _))
)
if name == expected_name => {
value
}
_ => {
panic!("expected a `{} = \"\"` parameter to the attribute, got {:?}",
expected_name, sub_attr)
}
}
}

View File

@ -1 +1 @@
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".gitignore":"e32812a8f09b0c5b0b972e2e090f8929eb5b600a37ca7aac2ed07ba10c30291e",".travis.yml":"f1fb4b65964c81bc1240544267ea334f554ca38ae7a74d57066f4d47d2b5d568","Cargo.toml":"00ec504efd37e12c4349d9af25b18ec32d807e8757ab5131200c32c6669b7ebf","LICENSE":"fab3dd6bdab226f1c08630b1dd917e11fcb4ec5e1e020e2c16f83a0a13863e85","README.md":"9afe084d70a5d9396674a2624012d6ac749df35f81e322d2d75b042bf208f523","build.rs":"56bfa720a5982d724661a8029315e801258e67245354aabaf2120b73f853cf3c","docs/.nojekyll":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","docs/404.html":"025861f76f8d1f6d67c20ab624c6e418f4f824385e2dd8ad8732c4ea563c6a2e","docs/index.html":"025861f76f8d1f6d67c20ab624c6e418f4f824385e2dd8ad8732c4ea563c6a2e","src/big-data-url.css":"04a8f6197ea1181123bca48bd1ebd016268e1da40f01b8f21055814e44bf62b8","src/color.rs":"2abfe8c5bdcb58151efff0b42fd6a23c51b8090de746f9c0f46d4996629f5894","src/css-parsing-tests/An+B.json":"d24559c1dad55d3da9d1fca29383edefdfc6046988435d6388a9bc0f28850257","src/css-parsing-tests/LICENSE":"5f9019a92f4aa8917aadc8e035aa673c2c1bf08d5ca2e535a0564106599f44eb","src/css-parsing-tests/README.rst":"775c5f957dd1d46d3ce954aaad219c821d2b64b4a9fb93c42e9737a11131ca44","src/css-parsing-tests/color3.json":"008f080f6f2dbae5ee403ff46aaa40a9a16e68a2b8923446ac6374f04da9e868","src/css-parsing-tests/color3_hsl.json":"09a4a1e51fb78276cdbf2e834cc9234f5b97c35426ddc879e35b2b09990327b5","src/css-parsing-tests/color3_keywords.json":"95609bf9fe762c316878a30f371fa375a2e51c21a6fda24fa188a95cd9118f5c","src/css-parsing-tests/component_value_list.json":"dda7244eb3a4fcf6d296762e285f7031028837d987065a09e584e8d973edc7f3","src/css-parsing-tests/declaration_list.json":"0b85cc3f19e945f838432acbfb9edb003abea13debc4ea27bcdcef25d117eac5","src/css-parsing-tests/make_color3_hsl.py":"df6f4c154c098641aab81d030de53c65d75d9bde429e9d1ff7069cc5b1827031","src/css-parsing-tests/make_color3_keywords.py":"66bccab3f1dea18698fcfd854be79b1fd1cd724dd487e25b1f057b522163aad2","src/css-parsing-tests/one_component_value.json":"8798017709002e14cf11e203c9d716f82d308ce6ba0f6e64ee4eea331b8485c6","src/css-parsing-tests/one_declaration.json":"a34c9da56edfff9e2e21615f059e141b0e878e90f794dc8fa58d65b47cd193ed","src/css-parsing-tests/one_rule.json":"88f7b1b6049be88e1e2827673b75fc9261986b216e8ee6bf09621fecbe274e3c","src/css-parsing-tests/rule_list.json":"97c45e80fb83abef149a4016c5625a74f053e7ad70a2ce5a95c02fce1c195686","src/css-parsing-tests/stylesheet.json":"05f1e10fc486bfbda2c059c313a74ff78c0063c0768b99737cab41969c0c87ce","src/css-parsing-tests/stylesheet_bytes.json":"890fd856a596e61f82cf7ed77920ffe95df89209fdb5ee0afe0b26bdfdb80a42","src/css-parsing-tests/urange.json":"7ce494811fcb64f20597bd11c88dc99bd72445290582e280bf7774f5d15e1ed3","src/from_bytes.rs":"331fe63af2123ae3675b61928a69461b5ac77799fff3ce9978c55cf2c558f4ff","src/lib.rs":"233ff7a7576512cbcde0221df06256bcd68c495dd2534b582832b4012518d8a8","src/macros/match_byte.rs":"89e8b941af74df2c204abf808672d3ff278bdec75abc918c41a843260b924677","src/macros/mod.rs":"99ffb7c3dbb5a09c7363db84a6ea439ab848439615b37a2e420710c5be3fbde2","src/nth.rs":"9dcabe79ab33c9965cf12fedd649f211e268572fc620f555b5dd28bbc7bea5b2","src/parser.rs":"99739b79e0829d868fc9d3ded5a20d54023dd4ff4bd71f2d0bf8e20df4f395ac","src/rules_and_declarations.rs":"6b66a986e411a56998546ab0e64de5285df3368d7c4018c7230a1b6cf6bcc532","src/serializer.rs":"4521b58389bd57acced55c3c6130831b7f80eff48ef873c48c5363e0eca0a15c","src/tests.rs":"10e2f5358b4bbbb58ef4ee5fcff5e86db1bbc3462ee892de2171f63cb46125c3","src/tokenizer.rs":"ef1f220224365d46299160191facd2d9e0534e10ef362129cf56cd3dbb87106a","src/unicode_range.rs":"a3accaf00b8e0e93ba9af0863024507b97ddc2646e65c5f7421597a269317ac0"},"package":"693cc9c8d3d0779ff60ff6b8b73497bda2c7151b6489c3a9c1f95f5d4f4497e5"} {"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",".gitignore":"e32812a8f09b0c5b0b972e2e090f8929eb5b600a37ca7aac2ed07ba10c30291e",".travis.yml":"f1fb4b65964c81bc1240544267ea334f554ca38ae7a74d57066f4d47d2b5d568","Cargo.toml":"b8bd480e473642bfe3a7075661ea5f9ddff03b703dddaddb796cfd0d82ee73f3","LICENSE":"fab3dd6bdab226f1c08630b1dd917e11fcb4ec5e1e020e2c16f83a0a13863e85","README.md":"9afe084d70a5d9396674a2624012d6ac749df35f81e322d2d75b042bf208f523","build.rs":"56bfa720a5982d724661a8029315e801258e67245354aabaf2120b73f853cf3c","docs/.nojekyll":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","docs/404.html":"025861f76f8d1f6d67c20ab624c6e418f4f824385e2dd8ad8732c4ea563c6a2e","docs/index.html":"025861f76f8d1f6d67c20ab624c6e418f4f824385e2dd8ad8732c4ea563c6a2e","src/big-data-url.css":"04a8f6197ea1181123bca48bd1ebd016268e1da40f01b8f21055814e44bf62b8","src/color.rs":"4941bb291e3be9537c70bac16cedb57b338daded2a801b19201302bacae2dca5","src/css-parsing-tests/An+B.json":"d24559c1dad55d3da9d1fca29383edefdfc6046988435d6388a9bc0f28850257","src/css-parsing-tests/LICENSE":"5f9019a92f4aa8917aadc8e035aa673c2c1bf08d5ca2e535a0564106599f44eb","src/css-parsing-tests/README.rst":"775c5f957dd1d46d3ce954aaad219c821d2b64b4a9fb93c42e9737a11131ca44","src/css-parsing-tests/color3.json":"008f080f6f2dbae5ee403ff46aaa40a9a16e68a2b8923446ac6374f04da9e868","src/css-parsing-tests/color3_hsl.json":"09a4a1e51fb78276cdbf2e834cc9234f5b97c35426ddc879e35b2b09990327b5","src/css-parsing-tests/color3_keywords.json":"95609bf9fe762c316878a30f371fa375a2e51c21a6fda24fa188a95cd9118f5c","src/css-parsing-tests/component_value_list.json":"dda7244eb3a4fcf6d296762e285f7031028837d987065a09e584e8d973edc7f3","src/css-parsing-tests/declaration_list.json":"0b85cc3f19e945f838432acbfb9edb003abea13debc4ea27bcdcef25d117eac5","src/css-parsing-tests/make_color3_hsl.py":"df6f4c154c098641aab81d030de53c65d75d9bde429e9d1ff7069cc5b1827031","src/css-parsing-tests/make_color3_keywords.py":"66bccab3f1dea18698fcfd854be79b1fd1cd724dd487e25b1f057b522163aad2","src/css-parsing-tests/one_component_value.json":"8798017709002e14cf11e203c9d716f82d308ce6ba0f6e64ee4eea331b8485c6","src/css-parsing-tests/one_declaration.json":"a34c9da56edfff9e2e21615f059e141b0e878e90f794dc8fa58d65b47cd193ed","src/css-parsing-tests/one_rule.json":"88f7b1b6049be88e1e2827673b75fc9261986b216e8ee6bf09621fecbe274e3c","src/css-parsing-tests/rule_list.json":"97c45e80fb83abef149a4016c5625a74f053e7ad70a2ce5a95c02fce1c195686","src/css-parsing-tests/stylesheet.json":"05f1e10fc486bfbda2c059c313a74ff78c0063c0768b99737cab41969c0c87ce","src/css-parsing-tests/stylesheet_bytes.json":"890fd856a596e61f82cf7ed77920ffe95df89209fdb5ee0afe0b26bdfdb80a42","src/css-parsing-tests/urange.json":"7ce494811fcb64f20597bd11c88dc99bd72445290582e280bf7774f5d15e1ed3","src/from_bytes.rs":"331fe63af2123ae3675b61928a69461b5ac77799fff3ce9978c55cf2c558f4ff","src/lib.rs":"718d9ecd62b66ad0b2337f5f4aa89421ef25bfe12a8a98d0f882059fb3e321b0","src/macros/match_byte.rs":"89e8b941af74df2c204abf808672d3ff278bdec75abc918c41a843260b924677","src/macros/mod.rs":"99ffb7c3dbb5a09c7363db84a6ea439ab848439615b37a2e420710c5be3fbde2","src/nth.rs":"0a5e68bd8a597403e184ebf34e69230ae1e955f92b16b99b3f67cf8730a180a9","src/parser.rs":"99739b79e0829d868fc9d3ded5a20d54023dd4ff4bd71f2d0bf8e20df4f395ac","src/rules_and_declarations.rs":"6b66a986e411a56998546ab0e64de5285df3368d7c4018c7230a1b6cf6bcc532","src/serializer.rs":"4521b58389bd57acced55c3c6130831b7f80eff48ef873c48c5363e0eca0a15c","src/tests.rs":"10e2f5358b4bbbb58ef4ee5fcff5e86db1bbc3462ee892de2171f63cb46125c3","src/tokenizer.rs":"ef1f220224365d46299160191facd2d9e0534e10ef362129cf56cd3dbb87106a","src/unicode_range.rs":"a3accaf00b8e0e93ba9af0863024507b97ddc2646e65c5f7421597a269317ac0"},"package":"d8352ccd22c5ebab558d179e32f6d3dd26eed30252f8420d636bfae5052eb50e"}

View File

@ -1,11 +1,11 @@
[package] [package]
name = "cssparser" name = "cssparser"
version = "0.10.0" version = "0.11.0"
authors = [ "Simon Sapin <simon.sapin@exyr.org>" ] authors = [ "Simon Sapin <simon.sapin@exyr.org>" ]
description = "Rust implementation of CSS Syntax Level 3" description = "Rust implementation of CSS Syntax Level 3"
documentation = "http://servo.github.io/rust-cssparser/cssparser/index.html" documentation = "https://docs.rs/cssparser/"
repository = "https://github.com/servo/rust-cssparser" repository = "https://github.com/servo/rust-cssparser"
readme = "README.md" readme = "README.md"
keywords = ["css", "syntax", "parser"] keywords = ["css", "syntax", "parser"]
@ -14,23 +14,25 @@ build = "build.rs"
exclude = ["src/css-parsing-tests"] exclude = ["src/css-parsing-tests"]
[lib]
doctest = false
[dev-dependencies] [dev-dependencies]
rustc-serialize = "0.3" rustc-serialize = "0.3"
tempdir = "0.3" tempdir = "0.3"
encoding_rs = "0.3.2" encoding_rs = "0.5"
[dependencies] [dependencies]
cssparser-macros = {path = "./macros", version = "0.1"}
heapsize = {version = "0.3", optional = true} heapsize = {version = "0.3", optional = true}
matches = "0.1" matches = "0.1"
phf = "0.7"
serde = {version = "0.9", optional = true} serde = {version = "0.9", optional = true}
[build-dependencies] [build-dependencies]
syn = { version = "0.10.6", features = ["full", "visit"]} syn = "0.11"
quote = "0.3" quote = "0.3"
[features] [features]
bench = [] bench = []
dummy_match_byte = [] dummy_match_byte = []
[workspace]
members = [".", "./macros"]

View File

@ -169,162 +169,174 @@ fn rgba(red: u8, green: u8, blue: u8, alpha: u8) -> Result<Color, ()> {
/// (For example, the value of an `Ident` token is fine.) /// (For example, the value of an `Ident` token is fine.)
#[inline] #[inline]
pub fn parse_color_keyword(ident: &str) -> Result<Color, ()> { pub fn parse_color_keyword(ident: &str) -> Result<Color, ()> {
match_ignore_ascii_case! { ident, macro_rules! rgb {
"black" => rgb(0, 0, 0), ($red: expr, $green: expr, $blue: expr) => {
"silver" => rgb(192, 192, 192), Color::RGBA(RGBA {
"gray" => rgb(128, 128, 128), red: $red,
"white" => rgb(255, 255, 255), green: $green,
"maroon" => rgb(128, 0, 0), blue: $blue,
"red" => rgb(255, 0, 0), alpha: 255,
"purple" => rgb(128, 0, 128), })
"fuchsia" => rgb(255, 0, 255),
"green" => rgb(0, 128, 0),
"lime" => rgb(0, 255, 0),
"olive" => rgb(128, 128, 0),
"yellow" => rgb(255, 255, 0),
"navy" => rgb(0, 0, 128),
"blue" => rgb(0, 0, 255),
"teal" => rgb(0, 128, 128),
"aqua" => rgb(0, 255, 255),
"aliceblue" => rgb(240, 248, 255),
"antiquewhite" => rgb(250, 235, 215),
"aquamarine" => rgb(127, 255, 212),
"azure" => rgb(240, 255, 255),
"beige" => rgb(245, 245, 220),
"bisque" => rgb(255, 228, 196),
"blanchedalmond" => rgb(255, 235, 205),
"blueviolet" => rgb(138, 43, 226),
"brown" => rgb(165, 42, 42),
"burlywood" => rgb(222, 184, 135),
"cadetblue" => rgb(95, 158, 160),
"chartreuse" => rgb(127, 255, 0),
"chocolate" => rgb(210, 105, 30),
"coral" => rgb(255, 127, 80),
"cornflowerblue" => rgb(100, 149, 237),
"cornsilk" => rgb(255, 248, 220),
"crimson" => rgb(220, 20, 60),
"cyan" => rgb(0, 255, 255),
"darkblue" => rgb(0, 0, 139),
"darkcyan" => rgb(0, 139, 139),
"darkgoldenrod" => rgb(184, 134, 11),
"darkgray" => rgb(169, 169, 169),
"darkgreen" => rgb(0, 100, 0),
"darkgrey" => rgb(169, 169, 169),
"darkkhaki" => rgb(189, 183, 107),
"darkmagenta" => rgb(139, 0, 139),
"darkolivegreen" => rgb(85, 107, 47),
"darkorange" => rgb(255, 140, 0),
"darkorchid" => rgb(153, 50, 204),
"darkred" => rgb(139, 0, 0),
"darksalmon" => rgb(233, 150, 122),
"darkseagreen" => rgb(143, 188, 143),
"darkslateblue" => rgb(72, 61, 139),
"darkslategray" => rgb(47, 79, 79),
"darkslategrey" => rgb(47, 79, 79),
"darkturquoise" => rgb(0, 206, 209),
"darkviolet" => rgb(148, 0, 211),
"deeppink" => rgb(255, 20, 147),
"deepskyblue" => rgb(0, 191, 255),
"dimgray" => rgb(105, 105, 105),
"dimgrey" => rgb(105, 105, 105),
"dodgerblue" => rgb(30, 144, 255),
"firebrick" => rgb(178, 34, 34),
"floralwhite" => rgb(255, 250, 240),
"forestgreen" => rgb(34, 139, 34),
"gainsboro" => rgb(220, 220, 220),
"ghostwhite" => rgb(248, 248, 255),
"gold" => rgb(255, 215, 0),
"goldenrod" => rgb(218, 165, 32),
"greenyellow" => rgb(173, 255, 47),
"grey" => rgb(128, 128, 128),
"honeydew" => rgb(240, 255, 240),
"hotpink" => rgb(255, 105, 180),
"indianred" => rgb(205, 92, 92),
"indigo" => rgb(75, 0, 130),
"ivory" => rgb(255, 255, 240),
"khaki" => rgb(240, 230, 140),
"lavender" => rgb(230, 230, 250),
"lavenderblush" => rgb(255, 240, 245),
"lawngreen" => rgb(124, 252, 0),
"lemonchiffon" => rgb(255, 250, 205),
"lightblue" => rgb(173, 216, 230),
"lightcoral" => rgb(240, 128, 128),
"lightcyan" => rgb(224, 255, 255),
"lightgoldenrodyellow" => rgb(250, 250, 210),
"lightgray" => rgb(211, 211, 211),
"lightgreen" => rgb(144, 238, 144),
"lightgrey" => rgb(211, 211, 211),
"lightpink" => rgb(255, 182, 193),
"lightsalmon" => rgb(255, 160, 122),
"lightseagreen" => rgb(32, 178, 170),
"lightskyblue" => rgb(135, 206, 250),
"lightslategray" => rgb(119, 136, 153),
"lightslategrey" => rgb(119, 136, 153),
"lightsteelblue" => rgb(176, 196, 222),
"lightyellow" => rgb(255, 255, 224),
"limegreen" => rgb(50, 205, 50),
"linen" => rgb(250, 240, 230),
"magenta" => rgb(255, 0, 255),
"mediumaquamarine" => rgb(102, 205, 170),
"mediumblue" => rgb(0, 0, 205),
"mediumorchid" => rgb(186, 85, 211),
"mediumpurple" => rgb(147, 112, 219),
"mediumseagreen" => rgb(60, 179, 113),
"mediumslateblue" => rgb(123, 104, 238),
"mediumspringgreen" => rgb(0, 250, 154),
"mediumturquoise" => rgb(72, 209, 204),
"mediumvioletred" => rgb(199, 21, 133),
"midnightblue" => rgb(25, 25, 112),
"mintcream" => rgb(245, 255, 250),
"mistyrose" => rgb(255, 228, 225),
"moccasin" => rgb(255, 228, 181),
"navajowhite" => rgb(255, 222, 173),
"oldlace" => rgb(253, 245, 230),
"olivedrab" => rgb(107, 142, 35),
"orange" => rgb(255, 165, 0),
"orangered" => rgb(255, 69, 0),
"orchid" => rgb(218, 112, 214),
"palegoldenrod" => rgb(238, 232, 170),
"palegreen" => rgb(152, 251, 152),
"paleturquoise" => rgb(175, 238, 238),
"palevioletred" => rgb(219, 112, 147),
"papayawhip" => rgb(255, 239, 213),
"peachpuff" => rgb(255, 218, 185),
"peru" => rgb(205, 133, 63),
"pink" => rgb(255, 192, 203),
"plum" => rgb(221, 160, 221),
"powderblue" => rgb(176, 224, 230),
"rebeccapurple" => rgb(102, 51, 153),
"rosybrown" => rgb(188, 143, 143),
"royalblue" => rgb(65, 105, 225),
"saddlebrown" => rgb(139, 69, 19),
"salmon" => rgb(250, 128, 114),
"sandybrown" => rgb(244, 164, 96),
"seagreen" => rgb(46, 139, 87),
"seashell" => rgb(255, 245, 238),
"sienna" => rgb(160, 82, 45),
"skyblue" => rgb(135, 206, 235),
"slateblue" => rgb(106, 90, 205),
"slategray" => rgb(112, 128, 144),
"slategrey" => rgb(112, 128, 144),
"snow" => rgb(255, 250, 250),
"springgreen" => rgb(0, 255, 127),
"steelblue" => rgb(70, 130, 180),
"tan" => rgb(210, 180, 140),
"thistle" => rgb(216, 191, 216),
"tomato" => rgb(255, 99, 71),
"turquoise" => rgb(64, 224, 208),
"violet" => rgb(238, 130, 238),
"wheat" => rgb(245, 222, 179),
"whitesmoke" => rgb(245, 245, 245),
"yellowgreen" => rgb(154, 205, 50),
"transparent" => rgba(0, 0, 0, 0),
"currentcolor" => Ok(Color::CurrentColor),
_ => Err(())
} }
} }
ascii_case_insensitive_phf_map! {
KEYWORDS: Map<Color> = {
"black" => "rgb!(0, 0, 0)",
"silver" => "rgb!(192, 192, 192)",
"gray" => "rgb!(128, 128, 128)",
"white" => "rgb!(255, 255, 255)",
"maroon" => "rgb!(128, 0, 0)",
"red" => "rgb!(255, 0, 0)",
"purple" => "rgb!(128, 0, 128)",
"fuchsia" => "rgb!(255, 0, 255)",
"green" => "rgb!(0, 128, 0)",
"lime" => "rgb!(0, 255, 0)",
"olive" => "rgb!(128, 128, 0)",
"yellow" => "rgb!(255, 255, 0)",
"navy" => "rgb!(0, 0, 128)",
"blue" => "rgb!(0, 0, 255)",
"teal" => "rgb!(0, 128, 128)",
"aqua" => "rgb!(0, 255, 255)",
"aliceblue" => "rgb!(240, 248, 255)",
"antiquewhite" => "rgb!(250, 235, 215)",
"aquamarine" => "rgb!(127, 255, 212)",
"azure" => "rgb!(240, 255, 255)",
"beige" => "rgb!(245, 245, 220)",
"bisque" => "rgb!(255, 228, 196)",
"blanchedalmond" => "rgb!(255, 235, 205)",
"blueviolet" => "rgb!(138, 43, 226)",
"brown" => "rgb!(165, 42, 42)",
"burlywood" => "rgb!(222, 184, 135)",
"cadetblue" => "rgb!(95, 158, 160)",
"chartreuse" => "rgb!(127, 255, 0)",
"chocolate" => "rgb!(210, 105, 30)",
"coral" => "rgb!(255, 127, 80)",
"cornflowerblue" => "rgb!(100, 149, 237)",
"cornsilk" => "rgb!(255, 248, 220)",
"crimson" => "rgb!(220, 20, 60)",
"cyan" => "rgb!(0, 255, 255)",
"darkblue" => "rgb!(0, 0, 139)",
"darkcyan" => "rgb!(0, 139, 139)",
"darkgoldenrod" => "rgb!(184, 134, 11)",
"darkgray" => "rgb!(169, 169, 169)",
"darkgreen" => "rgb!(0, 100, 0)",
"darkgrey" => "rgb!(169, 169, 169)",
"darkkhaki" => "rgb!(189, 183, 107)",
"darkmagenta" => "rgb!(139, 0, 139)",
"darkolivegreen" => "rgb!(85, 107, 47)",
"darkorange" => "rgb!(255, 140, 0)",
"darkorchid" => "rgb!(153, 50, 204)",
"darkred" => "rgb!(139, 0, 0)",
"darksalmon" => "rgb!(233, 150, 122)",
"darkseagreen" => "rgb!(143, 188, 143)",
"darkslateblue" => "rgb!(72, 61, 139)",
"darkslategray" => "rgb!(47, 79, 79)",
"darkslategrey" => "rgb!(47, 79, 79)",
"darkturquoise" => "rgb!(0, 206, 209)",
"darkviolet" => "rgb!(148, 0, 211)",
"deeppink" => "rgb!(255, 20, 147)",
"deepskyblue" => "rgb!(0, 191, 255)",
"dimgray" => "rgb!(105, 105, 105)",
"dimgrey" => "rgb!(105, 105, 105)",
"dodgerblue" => "rgb!(30, 144, 255)",
"firebrick" => "rgb!(178, 34, 34)",
"floralwhite" => "rgb!(255, 250, 240)",
"forestgreen" => "rgb!(34, 139, 34)",
"gainsboro" => "rgb!(220, 220, 220)",
"ghostwhite" => "rgb!(248, 248, 255)",
"gold" => "rgb!(255, 215, 0)",
"goldenrod" => "rgb!(218, 165, 32)",
"greenyellow" => "rgb!(173, 255, 47)",
"grey" => "rgb!(128, 128, 128)",
"honeydew" => "rgb!(240, 255, 240)",
"hotpink" => "rgb!(255, 105, 180)",
"indianred" => "rgb!(205, 92, 92)",
"indigo" => "rgb!(75, 0, 130)",
"ivory" => "rgb!(255, 255, 240)",
"khaki" => "rgb!(240, 230, 140)",
"lavender" => "rgb!(230, 230, 250)",
"lavenderblush" => "rgb!(255, 240, 245)",
"lawngreen" => "rgb!(124, 252, 0)",
"lemonchiffon" => "rgb!(255, 250, 205)",
"lightblue" => "rgb!(173, 216, 230)",
"lightcoral" => "rgb!(240, 128, 128)",
"lightcyan" => "rgb!(224, 255, 255)",
"lightgoldenrodyellow" => "rgb!(250, 250, 210)",
"lightgray" => "rgb!(211, 211, 211)",
"lightgreen" => "rgb!(144, 238, 144)",
"lightgrey" => "rgb!(211, 211, 211)",
"lightpink" => "rgb!(255, 182, 193)",
"lightsalmon" => "rgb!(255, 160, 122)",
"lightseagreen" => "rgb!(32, 178, 170)",
"lightskyblue" => "rgb!(135, 206, 250)",
"lightslategray" => "rgb!(119, 136, 153)",
"lightslategrey" => "rgb!(119, 136, 153)",
"lightsteelblue" => "rgb!(176, 196, 222)",
"lightyellow" => "rgb!(255, 255, 224)",
"limegreen" => "rgb!(50, 205, 50)",
"linen" => "rgb!(250, 240, 230)",
"magenta" => "rgb!(255, 0, 255)",
"mediumaquamarine" => "rgb!(102, 205, 170)",
"mediumblue" => "rgb!(0, 0, 205)",
"mediumorchid" => "rgb!(186, 85, 211)",
"mediumpurple" => "rgb!(147, 112, 219)",
"mediumseagreen" => "rgb!(60, 179, 113)",
"mediumslateblue" => "rgb!(123, 104, 238)",
"mediumspringgreen" => "rgb!(0, 250, 154)",
"mediumturquoise" => "rgb!(72, 209, 204)",
"mediumvioletred" => "rgb!(199, 21, 133)",
"midnightblue" => "rgb!(25, 25, 112)",
"mintcream" => "rgb!(245, 255, 250)",
"mistyrose" => "rgb!(255, 228, 225)",
"moccasin" => "rgb!(255, 228, 181)",
"navajowhite" => "rgb!(255, 222, 173)",
"oldlace" => "rgb!(253, 245, 230)",
"olivedrab" => "rgb!(107, 142, 35)",
"orange" => "rgb!(255, 165, 0)",
"orangered" => "rgb!(255, 69, 0)",
"orchid" => "rgb!(218, 112, 214)",
"palegoldenrod" => "rgb!(238, 232, 170)",
"palegreen" => "rgb!(152, 251, 152)",
"paleturquoise" => "rgb!(175, 238, 238)",
"palevioletred" => "rgb!(219, 112, 147)",
"papayawhip" => "rgb!(255, 239, 213)",
"peachpuff" => "rgb!(255, 218, 185)",
"peru" => "rgb!(205, 133, 63)",
"pink" => "rgb!(255, 192, 203)",
"plum" => "rgb!(221, 160, 221)",
"powderblue" => "rgb!(176, 224, 230)",
"rebeccapurple" => "rgb!(102, 51, 153)",
"rosybrown" => "rgb!(188, 143, 143)",
"royalblue" => "rgb!(65, 105, 225)",
"saddlebrown" => "rgb!(139, 69, 19)",
"salmon" => "rgb!(250, 128, 114)",
"sandybrown" => "rgb!(244, 164, 96)",
"seagreen" => "rgb!(46, 139, 87)",
"seashell" => "rgb!(255, 245, 238)",
"sienna" => "rgb!(160, 82, 45)",
"skyblue" => "rgb!(135, 206, 235)",
"slateblue" => "rgb!(106, 90, 205)",
"slategray" => "rgb!(112, 128, 144)",
"slategrey" => "rgb!(112, 128, 144)",
"snow" => "rgb!(255, 250, 250)",
"springgreen" => "rgb!(0, 255, 127)",
"steelblue" => "rgb!(70, 130, 180)",
"tan" => "rgb!(210, 180, 140)",
"thistle" => "rgb!(216, 191, 216)",
"tomato" => "rgb!(255, 99, 71)",
"turquoise" => "rgb!(64, 224, 208)",
"violet" => "rgb!(238, 130, 238)",
"wheat" => "rgb!(245, 222, 179)",
"whitesmoke" => "rgb!(245, 245, 245)",
"yellowgreen" => "rgb!(154, 205, 50)",
"transparent" => "Color::RGBA(RGBA { red: 0, green: 0, blue: 0, alpha: 0 })",
"currentcolor" => "Color::CurrentColor",
}
}
KEYWORDS::get(ident).cloned().ok_or(())
}
#[inline] #[inline]

View File

@ -68,7 +68,9 @@ fn parse_border_spacing(_context: &ParserContext, input: &mut Parser)
#![recursion_limit="200"] // For color::parse_color_keyword #![recursion_limit="200"] // For color::parse_color_keyword
#[macro_use] extern crate cssparser_macros;
#[macro_use] extern crate matches; #[macro_use] extern crate matches;
extern crate phf;
#[cfg(test)] extern crate encoding_rs; #[cfg(test)] extern crate encoding_rs;
#[cfg(test)] extern crate tempdir; #[cfg(test)] extern crate tempdir;
#[cfg(test)] extern crate rustc_serialize; #[cfg(test)] extern crate rustc_serialize;
@ -87,27 +89,36 @@ pub use serializer::{ToCss, CssStringWriter, serialize_identifier, serialize_str
pub use parser::{Parser, Delimiter, Delimiters, SourcePosition}; pub use parser::{Parser, Delimiter, Delimiters, SourcePosition};
pub use unicode_range::UnicodeRange; pub use unicode_range::UnicodeRange;
/// Expands to an expression equivalent to a `match` with string patterns,
/** /// but matching is case-insensitive in the ASCII range.
///
This macro is equivalent to a `match` expression on an `&str` value, /// Requirements:
but matching is case-insensitive in the ASCII range. ///
/// * The `cssparser_macros` crate must also be imported at the crate root
Usage example: /// * The patterns must not contain ASCII upper case letters. (They must be already be lower-cased.)
///
```{rust,ignore} /// # Example
match_ignore_ascii_case! { string, ///
"foo" => Some(Foo), /// ```rust
"bar" => Some(Bar), /// #[macro_use] extern crate cssparser;
"baz" => Some(Baz), /// #[macro_use] extern crate cssparser_macros;
_ => None ///
} /// # fn main() {} // Make doctest not wrap everythig in its own main
``` /// # fn dummy(function_name: &String) { let _ =
/// match_ignore_ascii_case! { &function_name,
The macro also takes a slice of the value, /// "rgb" => parse_rgb(..),
so that a `String` or `CowString` could be passed directly instead of a `&str`. /// "rgba" => parse_rgba(..),
/// "hsl" => parse_hsl(..),
*/ /// "hsla" => parse_hsla(..),
/// _ => Err("unknown function")
/// }
/// # ;}
/// # use std::ops::RangeFull;
/// # fn parse_rgb(_: RangeFull) -> Result<(), &'static str> { Err("") }
/// # fn parse_rgba(_: RangeFull) -> Result<(), &'static str> { Err("") }
/// # fn parse_hsl(_: RangeFull) -> Result<(), &'static str> { Err("") }
/// # fn parse_hsla(_: RangeFull) -> Result<(), &'static str> { Err("") }
/// ```
#[macro_export] #[macro_export]
macro_rules! match_ignore_ascii_case { macro_rules! match_ignore_ascii_case {
// parse the last case plus the fallback // parse the last case plus the fallback
@ -123,10 +134,15 @@ macro_rules! match_ignore_ascii_case {
// finished parsing // finished parsing
(@inner $value:expr, () -> ($(($string:expr => $result:expr))*) $fallback:expr ) => { (@inner $value:expr, () -> ($(($string:expr => $result:expr))*) $fallback:expr ) => {
{ {
use std::ascii::AsciiExt; #[derive(cssparser__assert_ascii_lowercase)]
match &$value[..] { #[cssparser__assert_ascii_lowercase__data($(string = $string),+)]
#[allow(dead_code)]
struct Dummy;
_cssparser_internal__to_lowercase!($value => lowercase, $($string),+);
match lowercase {
$( $(
s if s.eq_ignore_ascii_case($string) => $result, Some($string) => $result,
)+ )+
_ => $fallback _ => $fallback
} }
@ -139,6 +155,120 @@ macro_rules! match_ignore_ascii_case {
}; };
} }
/// Define a placeholder type `$Name`
/// with a method `fn get(input: &str) -> Option<&'static $ValueType>`.
///
/// This method uses finds a match for the input string
/// in a [`phf` map](https://github.com/sfackler/rust-phf).
/// Matching is case-insensitive in the ASCII range.
///
/// Requirements:
///
/// * The `phf` and `cssparser_macros` crates must also be imported at the crate root
/// * The values must be given a strings that contain Rust syntax for a constant expression.
///
/// ## Example:
///
/// ```rust
/// extern crate phf;
/// #[macro_use] extern crate cssparser;
/// #[macro_use] extern crate cssparser_macros;
///
/// # fn main() {} // Make doctest not wrap everythig in its own main
///
/// fn color_rgb(input: &str) -> Option<(u8, u8, u8)> {
/// ascii_case_insensitive_phf_map! {
/// KEYWORDS: Map<(u8, u8, u8)> = {
/// "red" => "(255, 0, 0)",
/// "green" => "(0, 255, 0)",
/// "blue" => "(0, 0, 255)",
/// }
/// }
/// KEYWORDS::get(input).cloned()
/// }
#[macro_export]
macro_rules! ascii_case_insensitive_phf_map {
($Name: ident : Map<$ValueType: ty> = {
$( $key: expr => $value: expr, )*
}) => {
#[derive(cssparser__phf_map)]
#[cssparser__phf_map__kv_pairs(
$(
key = $key,
value = $value
),+
)]
struct $Name($ValueType);
impl $Name {
#[inline]
fn get(input: &str) -> Option<&'static $ValueType> {
_cssparser_internal__to_lowercase!(input => lowercase, $($key),+);
lowercase.and_then(|string| $Name::map().get(string))
}
}
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This macro is not part of the public API. It can change or be removed between any versions.**
///
/// * Check at compile-time that none of the `$string`s contain ASCII uppercase letters
/// * Define a local variable named `$output` to the result of calling `_internal__to_lowercase`
/// with a stack-allocated buffer as long as the longest `$string`.
#[macro_export]
#[doc(hidden)]
macro_rules! _cssparser_internal__to_lowercase {
($input: expr => $output: ident, $($string: expr),+) => {
#[derive(cssparser__max_len)]
#[cssparser__max_len__data($(string = $string),+)]
#[allow(dead_code)]
struct Dummy2;
// mem::uninitialized() is ok because `buffer` is only used in `_internal__to_lowercase`,
// which initializes with `copy_from_slice` the part of the buffer it uses,
// before it uses it.
#[allow(unsafe_code)]
// MAX_LENGTH is generated by cssparser__max_len
let mut buffer: [u8; MAX_LENGTH] = unsafe {
::std::mem::uninitialized()
};
let input: &str = $input;
let $output = $crate::_internal__to_lowercase(&mut buffer, input);
}
}
/// Implementation detail of match_ignore_ascii_case! and ascii_case_insensitive_phf_map! macros.
///
/// **This function is not part of the public API. It can change or be removed between any verisons.**
///
/// Return `input`, lower-cased, unless larger than `buffer`
/// which is used temporary space for lower-casing a copy of `input` if necessary.
#[doc(hidden)]
#[allow(non_snake_case)]
pub fn _internal__to_lowercase<'a>(buffer: &'a mut [u8], input: &'a str) -> Option<&'a str> {
if let Some(buffer) = buffer.get_mut(..input.len()) {
if let Some(first_uppercase) = input.bytes().position(|byte| matches!(byte, b'A'...b'Z')) {
buffer.copy_from_slice(input.as_bytes());
std::ascii::AsciiExt::make_ascii_lowercase(&mut buffer[first_uppercase..]);
// `buffer` was initialized to a copy of `input` (which is &str so well-formed UTF-8)
// then lowercased (which preserves UTF-8 well-formedness)
unsafe {
Some(::std::str::from_utf8_unchecked(buffer))
}
} else {
// Input is already lower-case
Some(input)
}
} else {
// Input is longer than buffer, which has the length of the longest expected string:
// none of the expected strings would match.
None
}
}
mod rules_and_declarations; mod rules_and_declarations;
#[cfg(feature = "dummy_match_byte")] #[cfg(feature = "dummy_match_byte")]

View File

@ -16,14 +16,14 @@ pub fn parse_nth(input: &mut Parser) -> Result<(i32, i32), ()> {
Token::Number(value) => Ok((0, try!(value.int_value.ok_or(())) as i32)), Token::Number(value) => Ok((0, try!(value.int_value.ok_or(())) as i32)),
Token::Dimension(value, unit) => { Token::Dimension(value, unit) => {
let a = try!(value.int_value.ok_or(())) as i32; let a = try!(value.int_value.ok_or(())) as i32;
match_ignore_ascii_case! { unit, match_ignore_ascii_case! { &unit,
"n" => parse_b(input, a), "n" => parse_b(input, a),
"n-" => parse_signless_b(input, a, -1), "n-" => parse_signless_b(input, a, -1),
_ => Ok((a, try!(parse_n_dash_digits(&*unit)))) _ => Ok((a, try!(parse_n_dash_digits(&*unit))))
} }
} }
Token::Ident(value) => { Token::Ident(value) => {
match_ignore_ascii_case! { value, match_ignore_ascii_case! { &value,
"even" => Ok((2, 0)), "even" => Ok((2, 0)),
"odd" => Ok((2, 1)), "odd" => Ok((2, 1)),
"n" => parse_b(input, 1), "n" => parse_b(input, 1),
@ -39,7 +39,7 @@ pub fn parse_nth(input: &mut Parser) -> Result<(i32, i32), ()> {
} }
Token::Delim('+') => match try!(input.next_including_whitespace()) { Token::Delim('+') => match try!(input.next_including_whitespace()) {
Token::Ident(value) => { Token::Ident(value) => {
match_ignore_ascii_case! { value, match_ignore_ascii_case! { &value,
"n" => parse_b(input, 1), "n" => parse_b(input, 1),
"n-" => parse_signless_b(input, 1, -1), "n-" => parse_signless_b(input, 1, -1),
_ => Ok((1, try!(parse_n_dash_digits(&*value)))) _ => Ok((1, try!(parse_n_dash_digits(&*value))))

View File

@ -0,0 +1 @@
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"f703ce140afaec1a35ce733f6bc3d0ce45a6256095572d0763c815fbf39f4f11","src/aster/generics.rs":"77eb19443af0dff5debb18d064733cc8721a42ad7e993a33352cdeff2b5f9f85","src/aster/ident.rs":"e9d082664f008a56bd854011310b4258ab072740ba82e57495b6e8a868a5f36b","src/aster/invoke.rs":"2b1b993973ab4f5c8fa6d6a286576b2542edce21fe9904f5133c470c072e6d3f","src/aster/lifetime.rs":"d83f4c1a48e3580caa028cfabde6ace232efc95d70af6dc9cfcca48317db9ad7","src/aster/mod.rs":"12220f73b0021e72b4c50f6a513cff174b9c7267209aa23f183043d96ccc9ab7","src/aster/path.rs":"60865b0f952077307c1a66810a4b6dafde43e76a417a433a8343960e7de474e4","src/aster/qpath.rs":"885c94b29ab8ee45c72a682221e241d1f0dd09c659809fe77279b5dd8a4bc645","src/aster/ty.rs":"90649aad98617c09ffc43a38aeb823a3298c41bf5e10f0ef3500b71c81021c2f","src/aster/ty_param.rs":"7ced1e6ca0c98ef468d507d3f07bfcb1171395cd66ff5c3e1b091fe7e8b9a562","src/aster/where_predicate.rs":"5fb8ec3fcb67bcc1d9bb7b64cf2f5beb601aac6502d6db30c0cdf8641fa248d1","src/attr.rs":"2ba436bdd439511be10baf9ad45226ade678176a7fd45a087367e1ad2b43e07a","src/constant.rs":"90535a2320e0dc8ab623a9bffa770bdf697baef2884a7d9224b31daf422ea5a0","src/data.rs":"0119c67821f846e67d792bea638ae7f7f5d7e2f5e5a0c145d8ba8766d6ddb0f9","src/escape.rs":"e035b1f6ce3255e868fddb62ee90a95a2f3caf2db73786a2b179b92e9e337539","src/expr.rs":"02e8d346bef099974d06d74945be92fe6391111b94154df4981d44f1594d5579","src/generics.rs":"a300acff4c6e61d2fe9344db23f5e176e7abb02529bc348d9180f41ad0a4caf6","src/helper.rs":"9693d5c78f2d627a90d689a5d4bee1061eddcb646ae6dff3b2e4fd7cfbb33845","src/ident.rs":"83142b0107baba3137aad3b7d5c7b468ab53bf837bd9544d117d6644080d2705","src/item.rs":"63f2cd9a01c279405196d90a7d1cc530896157352163fb44f6b2a713657058b8","src/krate.rs":"324073a42389eb1c26a9d0f325b4f1cdd37d00a9bcaf07fdee77af54909a452d","src/lib.rs":"ef584db9ac9b7308224798d3983cbf201df7f0da1735fe5ce408f20fb3df763e","src/lit.rs":"2615fc6041f11b67a7cd62012f36eb215fd1fdf6649b6b64d728625148f53c7b","src/mac.rs":"45c44bd7abcbdaea6572bb4721bdc57b02b967ea9865172fe10e029e51e51a42","src/macro_input.rs":"93b999877879076e1f47502d96aa18aad82117d072044ca9de825c8a9bfa60b8","src/nom.rs":"642149bf322b762e02183ac1fed641df7f03ac53334c869a64707de4e9c5e68c","src/op.rs":"232f84ba605ed50e70ee02169dd551548872135cf56f155637917ec3bf810ce1","src/registry.rs":"b709f2a0f372efd8dec8fd46d6d71fb3b56a0261789e6de048a41a5e70144421","src/space.rs":"de9cb71e831c1d66f0bf2f3f219c3455d1979ca89f89b198d3b324e0cd50faf8","src/ty.rs":"97cfcb904a5fd68a42ebd2e5f86466d92e0785b1491d80c2a8d396ccec1b742a","src/visit.rs":"d7dcf429cc1a05821a66a4b38e7856eec45a9b2215f625d95030c3688eda26ca"},"package":"58fd09df59565db3399efbba34ba8a2fec1307511ebd245d0061ff9d42691673"}

0
third_party/rust/syn-0.10.8/.cargo-ok vendored Normal file
View File

32
third_party/rust/syn-0.10.8/Cargo.toml vendored Normal file
View File

@ -0,0 +1,32 @@
[package]
name = "syn"
version = "0.10.8" # don't forget to update version in readme for breaking changes
authors = ["David Tolnay <dtolnay@gmail.com>"]
license = "MIT/Apache-2.0"
description = "Nom parser for Rust source code"
repository = "https://github.com/dtolnay/syn"
documentation = "https://dtolnay.github.io/syn/syn/"
include = ["Cargo.toml", "src/**/*.rs"]
[features]
default = ["parsing", "printing"]
aster = []
expand = ["full", "parsing", "printing"]
full = []
parsing = ["unicode-xid"]
pretty = ["syntex_syntax"]
printing = ["quote"]
visit = []
[dependencies]
clippy = { version = "0.*", optional = true }
quote = { version = "0.3.0", optional = true }
syntex_syntax = { version = "0.50.0", optional = true }
unicode-xid = { version = "0.0.4", optional = true }
[dev-dependencies]
syntex_pos = "0.50.0"
syntex_syntax = "0.50.0"
tempdir = "0.3.5"
time = "0.1.35"
walkdir = "1.0.1"

View File

@ -0,0 +1,233 @@
use {Generics, Ident, LifetimeDef, TyParam, WhereClause, WherePredicate};
use aster::invoke::{Identity, Invoke};
use aster::lifetime::{IntoLifetime, LifetimeDefBuilder, IntoLifetimeDef};
use aster::path::IntoPath;
use aster::ty_param::TyParamBuilder;
use aster::where_predicate::WherePredicateBuilder;
pub struct GenericsBuilder<F = Identity> {
callback: F,
lifetimes: Vec<LifetimeDef>,
ty_params: Vec<TyParam>,
predicates: Vec<WherePredicate>,
}
impl GenericsBuilder {
pub fn new() -> Self {
GenericsBuilder::with_callback(Identity)
}
pub fn from_generics(generics: Generics) -> Self {
GenericsBuilder::from_generics_with_callback(generics, Identity)
}
}
impl<F> GenericsBuilder<F>
where F: Invoke<Generics>
{
pub fn with_callback(callback: F) -> Self {
GenericsBuilder {
callback: callback,
lifetimes: Vec::new(),
ty_params: Vec::new(),
predicates: Vec::new(),
}
}
pub fn from_generics_with_callback(generics: Generics, callback: F) -> Self {
GenericsBuilder {
callback: callback,
lifetimes: generics.lifetimes,
ty_params: generics.ty_params,
predicates: generics.where_clause.predicates,
}
}
pub fn with(self, generics: Generics) -> Self {
self.with_lifetimes(generics.lifetimes.into_iter())
.with_ty_params(generics.ty_params.into_iter())
.with_predicates(generics.where_clause.predicates.into_iter())
}
pub fn with_lifetimes<I, L>(mut self, iter: I) -> Self
where I: IntoIterator<Item = L>,
L: IntoLifetimeDef
{
let iter = iter.into_iter().map(|lifetime_def| lifetime_def.into_lifetime_def());
self.lifetimes.extend(iter);
self
}
pub fn with_lifetime_names<I, N>(mut self, iter: I) -> Self
where I: IntoIterator<Item = N>,
N: Into<Ident>
{
for name in iter {
self = self.lifetime_name(name);
}
self
}
pub fn with_lifetime(mut self, lifetime: LifetimeDef) -> Self {
self.lifetimes.push(lifetime);
self
}
pub fn lifetime_name<N>(self, name: N) -> Self
where N: Into<Ident>
{
self.lifetime(name).build()
}
pub fn lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self>
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, self)
}
pub fn with_ty_params<I>(mut self, iter: I) -> Self
where I: IntoIterator<Item = TyParam>
{
self.ty_params.extend(iter);
self
}
pub fn with_ty_param_ids<I, T>(mut self, iter: I) -> Self
where I: IntoIterator<Item = T>,
T: Into<Ident>
{
for id in iter {
self = self.ty_param_id(id);
}
self
}
pub fn with_ty_param(mut self, ty_param: TyParam) -> Self {
self.ty_params.push(ty_param);
self
}
pub fn ty_param_id<I>(self, id: I) -> Self
where I: Into<Ident>
{
self.ty_param(id).build()
}
pub fn ty_param<I>(self, id: I) -> TyParamBuilder<Self>
where I: Into<Ident>
{
TyParamBuilder::with_callback(id, self)
}
pub fn with_predicates<I>(mut self, iter: I) -> Self
where I: IntoIterator<Item = WherePredicate>
{
self.predicates.extend(iter);
self
}
pub fn with_predicate(mut self, predicate: WherePredicate) -> Self {
self.predicates.push(predicate);
self
}
pub fn predicate(self) -> WherePredicateBuilder<Self> {
WherePredicateBuilder::with_callback(self)
}
pub fn add_lifetime_bound<L>(mut self, lifetime: L) -> Self
where L: IntoLifetime
{
let lifetime = lifetime.into_lifetime();
for lifetime_def in &mut self.lifetimes {
lifetime_def.bounds.push(lifetime.clone());
}
for ty_param in &mut self.ty_params {
*ty_param = TyParamBuilder::from_ty_param(ty_param.clone())
.lifetime_bound(lifetime.clone())
.build();
}
self
}
pub fn add_ty_param_bound<P>(mut self, path: P) -> Self
where P: IntoPath
{
let path = path.into_path();
for ty_param in &mut self.ty_params {
*ty_param = TyParamBuilder::from_ty_param(ty_param.clone())
.trait_bound(path.clone())
.build()
.build();
}
self
}
pub fn strip_bounds(self) -> Self {
self.strip_lifetimes()
.strip_ty_params()
.strip_predicates()
}
pub fn strip_lifetimes(mut self) -> Self {
for lifetime in &mut self.lifetimes {
lifetime.bounds = vec![];
}
self
}
pub fn strip_ty_params(mut self) -> Self {
for ty_param in &mut self.ty_params {
ty_param.bounds = vec![];
}
self
}
pub fn strip_predicates(mut self) -> Self {
self.predicates = vec![];
self
}
pub fn build(self) -> F::Result {
self.callback.invoke(Generics {
lifetimes: self.lifetimes,
ty_params: self.ty_params,
where_clause: WhereClause { predicates: self.predicates },
})
}
}
impl<F> Invoke<LifetimeDef> for GenericsBuilder<F>
where F: Invoke<Generics>
{
type Result = Self;
fn invoke(self, lifetime: LifetimeDef) -> Self {
self.with_lifetime(lifetime)
}
}
impl<F> Invoke<TyParam> for GenericsBuilder<F>
where F: Invoke<Generics>
{
type Result = Self;
fn invoke(self, ty_param: TyParam) -> Self {
self.with_ty_param(ty_param)
}
}
impl<F> Invoke<WherePredicate> for GenericsBuilder<F>
where F: Invoke<Generics>
{
type Result = Self;
fn invoke(self, predicate: WherePredicate) -> Self {
self.with_predicate(predicate)
}
}

View File

@ -0,0 +1,39 @@
use Ident;
pub trait ToIdent {
fn to_ident(&self) -> Ident;
}
impl ToIdent for Ident {
fn to_ident(&self) -> Ident {
self.clone()
}
}
impl<'a> ToIdent for &'a str {
fn to_ident(&self) -> Ident {
(**self).into()
}
}
impl ToIdent for String {
fn to_ident(&self) -> Ident {
self.clone().into()
}
}
impl<'a, T> ToIdent for &'a T
where T: ToIdent
{
fn to_ident(&self) -> Ident {
(**self).to_ident()
}
}
impl<'a, T> ToIdent for &'a mut T
where T: ToIdent
{
fn to_ident(&self) -> Ident {
(**self).to_ident()
}
}

View File

@ -0,0 +1,16 @@
pub trait Invoke<A> {
type Result;
fn invoke(self, arg: A) -> Self::Result;
}
#[derive(Copy, Clone)]
pub struct Identity;
impl<A> Invoke<A> for Identity {
type Result = A;
fn invoke(self, arg: A) -> A {
arg
}
}

View File

@ -0,0 +1,103 @@
use {Ident, Lifetime, LifetimeDef};
use aster::invoke::{Invoke, Identity};
// ////////////////////////////////////////////////////////////////////////////
pub trait IntoLifetime {
fn into_lifetime(self) -> Lifetime;
}
impl IntoLifetime for Lifetime {
fn into_lifetime(self) -> Lifetime {
self
}
}
impl<'a> IntoLifetime for &'a str {
fn into_lifetime(self) -> Lifetime {
Lifetime { ident: self.into() }
}
}
// ////////////////////////////////////////////////////////////////////////////
pub trait IntoLifetimeDef {
fn into_lifetime_def(self) -> LifetimeDef;
}
impl IntoLifetimeDef for LifetimeDef {
fn into_lifetime_def(self) -> LifetimeDef {
self
}
}
impl IntoLifetimeDef for Lifetime {
fn into_lifetime_def(self) -> LifetimeDef {
LifetimeDef {
attrs: vec![],
lifetime: self,
bounds: vec![],
}
}
}
impl<'a> IntoLifetimeDef for &'a str {
fn into_lifetime_def(self) -> LifetimeDef {
self.into_lifetime().into_lifetime_def()
}
}
impl IntoLifetimeDef for String {
fn into_lifetime_def(self) -> LifetimeDef {
(*self).into_lifetime().into_lifetime_def()
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct LifetimeDefBuilder<F = Identity> {
callback: F,
lifetime: Lifetime,
bounds: Vec<Lifetime>,
}
impl LifetimeDefBuilder {
pub fn new<N>(name: N) -> Self
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, Identity)
}
}
impl<F> LifetimeDefBuilder<F>
where F: Invoke<LifetimeDef>
{
pub fn with_callback<N>(name: N, callback: F) -> Self
where N: Into<Ident>
{
let lifetime = Lifetime { ident: name.into() };
LifetimeDefBuilder {
callback: callback,
lifetime: lifetime,
bounds: Vec::new(),
}
}
pub fn bound<N>(mut self, name: N) -> Self
where N: Into<Ident>
{
let lifetime = Lifetime { ident: name.into() };
self.bounds.push(lifetime);
self
}
pub fn build(self) -> F::Result {
self.callback.invoke(LifetimeDef {
attrs: vec![],
lifetime: self.lifetime,
bounds: self.bounds,
})
}
}

View File

@ -0,0 +1,33 @@
use super::*;
pub mod generics;
pub mod ident;
pub mod invoke;
pub mod lifetime;
pub mod path;
pub mod qpath;
pub mod ty;
pub mod ty_param;
pub mod where_predicate;
pub fn id<I>(id: I) -> Ident
where I: Into<Ident>
{
id.into()
}
pub fn from_generics(generics: Generics) -> generics::GenericsBuilder {
generics::GenericsBuilder::from_generics(generics)
}
pub fn where_predicate() -> where_predicate::WherePredicateBuilder {
where_predicate::WherePredicateBuilder::new()
}
pub fn ty() -> ty::TyBuilder {
ty::TyBuilder::new()
}
pub fn path() -> path::PathBuilder {
path::PathBuilder::new()
}

View File

@ -0,0 +1,331 @@
use {AngleBracketedParameterData, Generics, Ident, Lifetime, ParenthesizedParameterData, Path,
PathParameters, PathSegment, Ty, TypeBinding};
use aster::ident::ToIdent;
use aster::invoke::{Invoke, Identity};
use aster::lifetime::IntoLifetime;
use aster::ty::TyBuilder;
// ////////////////////////////////////////////////////////////////////////////
pub trait IntoPath {
fn into_path(self) -> Path;
}
impl IntoPath for Path {
fn into_path(self) -> Path {
self
}
}
impl IntoPath for Ident {
fn into_path(self) -> Path {
PathBuilder::new().id(self).build()
}
}
impl<'a> IntoPath for &'a str {
fn into_path(self) -> Path {
PathBuilder::new().id(self).build()
}
}
impl IntoPath for String {
fn into_path(self) -> Path {
(&*self).into_path()
}
}
impl<'a, T> IntoPath for &'a [T]
where T: ToIdent
{
fn into_path(self) -> Path {
PathBuilder::new().ids(self).build()
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PathBuilder<F = Identity> {
callback: F,
global: bool,
}
impl PathBuilder {
pub fn new() -> Self {
PathBuilder::with_callback(Identity)
}
}
impl<F> PathBuilder<F>
where F: Invoke<Path>
{
pub fn with_callback(callback: F) -> Self {
PathBuilder {
callback: callback,
global: false,
}
}
pub fn build(self, path: Path) -> F::Result {
self.callback.invoke(path)
}
pub fn global(mut self) -> Self {
self.global = true;
self
}
pub fn ids<I, T>(self, ids: I) -> PathSegmentsBuilder<F>
where I: IntoIterator<Item = T>,
T: ToIdent
{
let mut ids = ids.into_iter();
let id = ids.next().expect("passed path with no id");
self.id(id).ids(ids)
}
pub fn id<I>(self, id: I) -> PathSegmentsBuilder<F>
where I: ToIdent
{
self.segment(id).build()
}
pub fn segment<I>(self, id: I) -> PathSegmentBuilder<PathSegmentsBuilder<F>>
where I: ToIdent
{
PathSegmentBuilder::with_callback(id,
PathSegmentsBuilder {
callback: self.callback,
global: self.global,
segments: Vec::new(),
})
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PathSegmentsBuilder<F = Identity> {
callback: F,
global: bool,
segments: Vec<PathSegment>,
}
impl<F> PathSegmentsBuilder<F>
where F: Invoke<Path>
{
pub fn ids<I, T>(mut self, ids: I) -> PathSegmentsBuilder<F>
where I: IntoIterator<Item = T>,
T: ToIdent
{
for id in ids {
self = self.id(id);
}
self
}
pub fn id<T>(self, id: T) -> PathSegmentsBuilder<F>
where T: ToIdent
{
self.segment(id).build()
}
pub fn segment<T>(self, id: T) -> PathSegmentBuilder<Self>
where T: ToIdent
{
PathSegmentBuilder::with_callback(id, self)
}
pub fn build(self) -> F::Result {
self.callback.invoke(Path {
global: self.global,
segments: self.segments,
})
}
}
impl<F> Invoke<PathSegment> for PathSegmentsBuilder<F> {
type Result = Self;
fn invoke(mut self, segment: PathSegment) -> Self {
self.segments.push(segment);
self
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PathSegmentBuilder<F = Identity> {
callback: F,
id: Ident,
lifetimes: Vec<Lifetime>,
tys: Vec<Ty>,
bindings: Vec<TypeBinding>,
}
impl<F> PathSegmentBuilder<F>
where F: Invoke<PathSegment>
{
pub fn with_callback<I>(id: I, callback: F) -> Self
where I: ToIdent
{
PathSegmentBuilder {
callback: callback,
id: id.to_ident(),
lifetimes: Vec::new(),
tys: Vec::new(),
bindings: Vec::new(),
}
}
pub fn with_generics(self, generics: Generics) -> Self {
// Strip off the bounds.
let lifetimes = generics.lifetimes
.iter()
.map(|lifetime_def| lifetime_def.lifetime.clone());
let tys = generics.ty_params
.iter()
.map(|ty_param| TyBuilder::new().id(ty_param.ident.clone()));
self.with_lifetimes(lifetimes)
.with_tys(tys)
}
pub fn with_lifetimes<I, L>(mut self, iter: I) -> Self
where I: IntoIterator<Item = L>,
L: IntoLifetime
{
let iter = iter.into_iter().map(|lifetime| lifetime.into_lifetime());
self.lifetimes.extend(iter);
self
}
pub fn with_lifetime<L>(mut self, lifetime: L) -> Self
where L: IntoLifetime
{
self.lifetimes.push(lifetime.into_lifetime());
self
}
pub fn lifetime<N>(self, name: N) -> Self
where N: ToIdent
{
let lifetime = Lifetime { ident: name.to_ident() };
self.with_lifetime(lifetime)
}
pub fn with_tys<I>(mut self, iter: I) -> Self
where I: IntoIterator<Item = Ty>
{
self.tys.extend(iter);
self
}
pub fn with_ty(mut self, ty: Ty) -> Self {
self.tys.push(ty);
self
}
pub fn ty(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
pub fn with_binding(mut self, binding: TypeBinding) -> Self {
self.bindings.push(binding);
self
}
pub fn binding<T>(self, id: T) -> TyBuilder<TypeBindingBuilder<F>>
where T: ToIdent
{
TyBuilder::with_callback(TypeBindingBuilder {
id: id.to_ident(),
builder: self,
})
}
pub fn no_return(self) -> F::Result {
self.build_return(None)
}
pub fn return_(self) -> TyBuilder<PathSegmentReturnBuilder<F>> {
TyBuilder::with_callback(PathSegmentReturnBuilder(self))
}
pub fn build_return(self, output: Option<Ty>) -> F::Result {
let data = ParenthesizedParameterData {
inputs: self.tys,
output: output,
};
let parameters = PathParameters::Parenthesized(data);
self.callback.invoke(PathSegment {
ident: self.id,
parameters: parameters,
})
}
pub fn build(self) -> F::Result {
let data = AngleBracketedParameterData {
lifetimes: self.lifetimes,
types: self.tys,
bindings: self.bindings,
};
let parameters = PathParameters::AngleBracketed(data);
self.callback.invoke(PathSegment {
ident: self.id,
parameters: parameters,
})
}
}
impl<F> Invoke<Ty> for PathSegmentBuilder<F>
where F: Invoke<PathSegment>
{
type Result = Self;
fn invoke(self, ty: Ty) -> Self {
self.with_ty(ty)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TypeBindingBuilder<F> {
id: Ident,
builder: PathSegmentBuilder<F>,
}
impl<F> Invoke<Ty> for TypeBindingBuilder<F>
where F: Invoke<PathSegment>
{
type Result = PathSegmentBuilder<F>;
fn invoke(self, ty: Ty) -> Self::Result {
let id = self.id;
self.builder.with_binding(TypeBinding {
ident: id,
ty: ty,
})
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PathSegmentReturnBuilder<F>(PathSegmentBuilder<F>);
impl<F> Invoke<Ty> for PathSegmentReturnBuilder<F>
where F: Invoke<PathSegment>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> Self::Result {
self.0.build_return(Some(ty))
}
}

View File

@ -0,0 +1,146 @@
use {Path, PathSegment, QSelf, Ty};
use aster::ident::ToIdent;
use aster::invoke::{Invoke, Identity};
use aster::path::{PathBuilder, PathSegmentBuilder};
use aster::ty::TyBuilder;
// ////////////////////////////////////////////////////////////////////////////
pub struct QPathBuilder<F = Identity> {
callback: F,
}
impl QPathBuilder {
pub fn new() -> Self {
QPathBuilder::with_callback(Identity)
}
}
impl<F> QPathBuilder<F>
where F: Invoke<(QSelf, Path)>
{
/// Construct a `QPathBuilder` that will call the `callback` with a constructed `QSelf`
/// and `Path`.
pub fn with_callback(callback: F) -> Self {
QPathBuilder { callback: callback }
}
/// Build a qualified path first by starting with a type builder.
pub fn with_ty(self, ty: Ty) -> QPathTyBuilder<F> {
QPathTyBuilder {
builder: self,
ty: ty,
}
}
/// Build a qualified path first by starting with a type builder.
pub fn ty(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
/// Build a qualified path with a concrete type and path.
pub fn build(self, qself: QSelf, path: Path) -> F::Result {
self.callback.invoke((qself, path))
}
}
impl<F> Invoke<Ty> for QPathBuilder<F>
where F: Invoke<(QSelf, Path)>
{
type Result = QPathTyBuilder<F>;
fn invoke(self, ty: Ty) -> QPathTyBuilder<F> {
self.with_ty(ty)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct QPathTyBuilder<F> {
builder: QPathBuilder<F>,
ty: Ty,
}
impl<F> QPathTyBuilder<F>
where F: Invoke<(QSelf, Path)>
{
/// Build a qualified path with a path builder.
// Clippy false positive
// https://github.com/Manishearth/rust-clippy/issues/1285
#[cfg_attr(feature = "clippy", allow(wrong_self_convention))]
pub fn as_(self) -> PathBuilder<Self> {
PathBuilder::with_callback(self)
}
pub fn id<T>(self, id: T) -> F::Result
where T: ToIdent
{
let path = Path {
global: false,
segments: vec![],
};
self.as_().build(path).id(id)
}
pub fn segment<T>(self, id: T) -> PathSegmentBuilder<QPathQSelfBuilder<F>>
where T: ToIdent
{
let path = Path {
global: false,
segments: vec![],
};
self.as_().build(path).segment(id)
}
}
impl<F> Invoke<Path> for QPathTyBuilder<F>
where F: Invoke<(QSelf, Path)>
{
type Result = QPathQSelfBuilder<F>;
fn invoke(self, path: Path) -> QPathQSelfBuilder<F> {
QPathQSelfBuilder {
builder: self.builder,
qself: QSelf {
ty: Box::new(self.ty),
position: path.segments.len(),
},
path: path,
}
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct QPathQSelfBuilder<F> {
builder: QPathBuilder<F>,
qself: QSelf,
path: Path,
}
impl<F> QPathQSelfBuilder<F>
where F: Invoke<(QSelf, Path)>
{
pub fn id<T>(self, id: T) -> F::Result
where T: ToIdent
{
self.segment(id).build()
}
pub fn segment<T>(self, id: T) -> PathSegmentBuilder<QPathQSelfBuilder<F>>
where T: ToIdent
{
PathSegmentBuilder::with_callback(id, self)
}
}
impl<F> Invoke<PathSegment> for QPathQSelfBuilder<F>
where F: Invoke<(QSelf, Path)>
{
type Result = F::Result;
fn invoke(mut self, segment: PathSegment) -> F::Result {
self.path.segments.push(segment);
self.builder.build(self.qself, self.path)
}
}

View File

@ -0,0 +1,578 @@
use {Generics, Lifetime, MutTy, Mutability, Path, QSelf, Ty, TyParamBound};
use aster::ident::ToIdent;
use aster::invoke::{Invoke, Identity};
use aster::lifetime::IntoLifetime;
use aster::path::PathBuilder;
use aster::qpath::QPathBuilder;
use aster::ty_param::TyParamBoundBuilder;
// ////////////////////////////////////////////////////////////////////////////
pub struct TyBuilder<F = Identity> {
callback: F,
}
impl TyBuilder {
pub fn new() -> Self {
TyBuilder::with_callback(Identity)
}
}
impl<F> TyBuilder<F>
where F: Invoke<Ty>
{
pub fn with_callback(callback: F) -> Self {
TyBuilder { callback: callback }
}
pub fn build(self, ty: Ty) -> F::Result {
self.callback.invoke(ty)
}
pub fn id<I>(self, id: I) -> F::Result
where I: ToIdent
{
self.path().id(id).build()
}
pub fn build_path(self, path: Path) -> F::Result {
self.build(Ty::Path(None, path))
}
pub fn build_qpath(self, qself: QSelf, path: Path) -> F::Result {
self.build(Ty::Path(Some(qself), path))
}
pub fn path(self) -> PathBuilder<TyPathBuilder<F>> {
PathBuilder::with_callback(TyPathBuilder(self))
}
pub fn qpath(self) -> QPathBuilder<TyQPathBuilder<F>> {
QPathBuilder::with_callback(TyQPathBuilder(self))
}
pub fn isize(self) -> F::Result {
self.id("isize")
}
pub fn i8(self) -> F::Result {
self.id("i8")
}
pub fn i16(self) -> F::Result {
self.id("i16")
}
pub fn i32(self) -> F::Result {
self.id("i32")
}
pub fn i64(self) -> F::Result {
self.id("i64")
}
pub fn usize(self) -> F::Result {
self.id("usize")
}
pub fn u8(self) -> F::Result {
self.id("u8")
}
pub fn u16(self) -> F::Result {
self.id("u16")
}
pub fn u32(self) -> F::Result {
self.id("u32")
}
pub fn u64(self) -> F::Result {
self.id("u64")
}
pub fn f32(self) -> F::Result {
self.id("f32")
}
pub fn f64(self) -> F::Result {
self.id("f64")
}
pub fn bool(self) -> F::Result {
self.id("bool")
}
pub fn unit(self) -> F::Result {
self.tuple().build()
}
pub fn tuple(self) -> TyTupleBuilder<F> {
TyTupleBuilder {
builder: self,
tys: vec![],
}
}
pub fn build_slice(self, ty: Ty) -> F::Result {
self.build(Ty::Slice(Box::new(ty)))
}
pub fn slice(self) -> TyBuilder<TySliceBuilder<F>> {
TyBuilder::with_callback(TySliceBuilder(self))
}
pub fn ref_(self) -> TyRefBuilder<F> {
TyRefBuilder {
builder: self,
lifetime: None,
mutability: Mutability::Immutable,
}
}
pub fn never(self) -> F::Result {
self.build(Ty::Never)
}
pub fn infer(self) -> F::Result {
self.build(Ty::Infer)
}
pub fn option(self) -> TyBuilder<TyOptionBuilder<F>> {
TyBuilder::with_callback(TyOptionBuilder(self))
}
pub fn result(self) -> TyBuilder<TyResultOkBuilder<F>> {
TyBuilder::with_callback(TyResultOkBuilder(self))
}
pub fn phantom_data(self) -> TyBuilder<TyPhantomDataBuilder<F>> {
TyBuilder::with_callback(TyPhantomDataBuilder(self))
}
pub fn box_(self) -> TyBuilder<TyBoxBuilder<F>> {
TyBuilder::with_callback(TyBoxBuilder(self))
}
pub fn iterator(self) -> TyBuilder<TyIteratorBuilder<F>> {
TyBuilder::with_callback(TyIteratorBuilder(self))
}
pub fn object_sum(self) -> TyBuilder<TyObjectSumBuilder<F>> {
TyBuilder::with_callback(TyObjectSumBuilder { builder: self })
}
pub fn impl_trait(self) -> TyImplTraitTyBuilder<F> {
TyImplTraitTyBuilder {
builder: self,
bounds: Vec::new(),
}
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyPathBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Path> for TyPathBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, path: Path) -> F::Result {
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyQPathBuilder<F>(TyBuilder<F>);
impl<F> Invoke<(QSelf, Path)> for TyQPathBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, (qself, path): (QSelf, Path)) -> F::Result {
self.0.build_qpath(qself, path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TySliceBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TySliceBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
self.0.build_slice(ty)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyRefBuilder<F> {
builder: TyBuilder<F>,
lifetime: Option<Lifetime>,
mutability: Mutability,
}
impl<F> TyRefBuilder<F>
where F: Invoke<Ty>
{
pub fn mut_(mut self) -> Self {
self.mutability = Mutability::Mutable;
self
}
pub fn lifetime<N>(mut self, name: N) -> Self
where N: ToIdent
{
self.lifetime = Some(Lifetime { ident: name.to_ident() });
self
}
pub fn build_ty(self, ty: Ty) -> F::Result {
let ty = MutTy {
ty: ty,
mutability: self.mutability,
};
self.builder.build(Ty::Rptr(self.lifetime, Box::new(ty)))
}
pub fn ty(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
}
impl<F> Invoke<Ty> for TyRefBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
self.build_ty(ty)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyOptionBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyOptionBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("option")
.segment("Option")
.with_ty(ty)
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyResultOkBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyResultOkBuilder<F>
where F: Invoke<Ty>
{
type Result = TyBuilder<TyResultErrBuilder<F>>;
fn invoke(self, ty: Ty) -> TyBuilder<TyResultErrBuilder<F>> {
TyBuilder::with_callback(TyResultErrBuilder(self.0, ty))
}
}
pub struct TyResultErrBuilder<F>(TyBuilder<F>, Ty);
impl<F> Invoke<Ty> for TyResultErrBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("result")
.segment("Result")
.with_ty(self.1)
.with_ty(ty)
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyPhantomDataBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyPhantomDataBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("marker")
.segment("PhantomData")
.with_ty(ty)
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyBoxBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyBoxBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("boxed")
.segment("Box")
.with_ty(ty)
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyIteratorBuilder<F>(TyBuilder<F>);
impl<F> Invoke<Ty> for TyIteratorBuilder<F>
where F: Invoke<Ty>
{
type Result = F::Result;
fn invoke(self, ty: Ty) -> F::Result {
let path = PathBuilder::new()
.global()
.id("std")
.id("iter")
.segment("Iterator")
.binding("Item")
.build(ty.clone())
.build()
.build();
self.0.build_path(path)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyObjectSumBuilder<F> {
builder: TyBuilder<F>,
}
impl<F> Invoke<Ty> for TyObjectSumBuilder<F>
where F: Invoke<Ty>
{
type Result = TyObjectSumTyBuilder<F>;
fn invoke(self, ty: Ty) -> Self::Result {
TyObjectSumTyBuilder {
builder: self.builder,
ty: ty,
bounds: Vec::new(),
}
}
}
pub struct TyObjectSumTyBuilder<F> {
builder: TyBuilder<F>,
ty: Ty,
bounds: Vec<TyParamBound>,
}
impl<F> TyObjectSumTyBuilder<F>
where F: Invoke<Ty>
{
pub fn with_bounds<I>(mut self, iter: I) -> Self
where I: Iterator<Item = TyParamBound>
{
self.bounds.extend(iter);
self
}
pub fn with_bound(mut self, bound: TyParamBound) -> Self {
self.bounds.push(bound);
self
}
pub fn bound(self) -> TyParamBoundBuilder<Self> {
TyParamBoundBuilder::with_callback(self)
}
pub fn with_generics(self, generics: Generics) -> Self {
self.with_lifetimes(generics.lifetimes
.into_iter()
.map(|def| def.lifetime))
}
pub fn with_lifetimes<I, L>(mut self, lifetimes: I) -> Self
where I: Iterator<Item = L>,
L: IntoLifetime
{
for lifetime in lifetimes {
self = self.lifetime(lifetime);
}
self
}
pub fn lifetime<L>(self, lifetime: L) -> Self
where L: IntoLifetime
{
self.bound().lifetime(lifetime)
}
pub fn build(self) -> F::Result {
let bounds = self.bounds;
self.builder.build(Ty::ObjectSum(Box::new(self.ty), bounds))
}
}
impl<F> Invoke<TyParamBound> for TyObjectSumTyBuilder<F>
where F: Invoke<Ty>
{
type Result = Self;
fn invoke(self, bound: TyParamBound) -> Self {
self.with_bound(bound)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyImplTraitTyBuilder<F> {
builder: TyBuilder<F>,
bounds: Vec<TyParamBound>,
}
impl<F> TyImplTraitTyBuilder<F>
where F: Invoke<Ty>
{
pub fn with_bounds<I>(mut self, iter: I) -> Self
where I: Iterator<Item = TyParamBound>
{
self.bounds.extend(iter);
self
}
pub fn with_bound(mut self, bound: TyParamBound) -> Self {
self.bounds.push(bound);
self
}
pub fn bound(self) -> TyParamBoundBuilder<Self> {
TyParamBoundBuilder::with_callback(self)
}
pub fn with_generics(self, generics: Generics) -> Self {
self.with_lifetimes(generics.lifetimes
.into_iter()
.map(|def| def.lifetime))
}
pub fn with_lifetimes<I, L>(mut self, lifetimes: I) -> Self
where I: Iterator<Item = L>,
L: IntoLifetime
{
for lifetime in lifetimes {
self = self.lifetime(lifetime);
}
self
}
pub fn lifetime<L>(self, lifetime: L) -> Self
where L: IntoLifetime
{
self.bound().lifetime(lifetime)
}
pub fn build(self) -> F::Result {
let bounds = self.bounds;
self.builder.build(Ty::ImplTrait(bounds))
}
}
impl<F> Invoke<TyParamBound> for TyImplTraitTyBuilder<F>
where F: Invoke<Ty>
{
type Result = Self;
fn invoke(self, bound: TyParamBound) -> Self {
self.with_bound(bound)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyTupleBuilder<F> {
builder: TyBuilder<F>,
tys: Vec<Ty>,
}
impl<F> TyTupleBuilder<F>
where F: Invoke<Ty>
{
pub fn with_tys<I>(mut self, iter: I) -> Self
where I: IntoIterator<Item = Ty>
{
self.tys.extend(iter);
self
}
pub fn with_ty(mut self, ty: Ty) -> Self {
self.tys.push(ty);
self
}
pub fn ty(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
pub fn build(self) -> F::Result {
self.builder.build(Ty::Tup(self.tys))
}
}
impl<F> Invoke<Ty> for TyTupleBuilder<F>
where F: Invoke<Ty>
{
type Result = Self;
fn invoke(self, ty: Ty) -> Self {
self.with_ty(ty)
}
}

View File

@ -0,0 +1,262 @@
use {Ident, LifetimeDef, Path, PolyTraitRef, TraitBoundModifier, Ty, TyParam, TyParamBound};
use aster::invoke::{Invoke, Identity};
use aster::lifetime::{IntoLifetime, IntoLifetimeDef, LifetimeDefBuilder};
use aster::path::{IntoPath, PathBuilder};
use aster::ty::TyBuilder;
// ////////////////////////////////////////////////////////////////////////////
pub struct TyParamBuilder<F = Identity> {
callback: F,
id: Ident,
bounds: Vec<TyParamBound>,
default: Option<Ty>,
}
impl TyParamBuilder {
pub fn new<I>(id: I) -> Self
where I: Into<Ident>
{
TyParamBuilder::with_callback(id, Identity)
}
pub fn from_ty_param(ty_param: TyParam) -> Self {
TyParamBuilder::from_ty_param_with_callback(Identity, ty_param)
}
}
impl<F> TyParamBuilder<F>
where F: Invoke<TyParam>
{
pub fn with_callback<I>(id: I, callback: F) -> Self
where I: Into<Ident>
{
TyParamBuilder {
callback: callback,
id: id.into(),
bounds: Vec::new(),
default: None,
}
}
pub fn from_ty_param_with_callback(callback: F, ty_param: TyParam) -> Self {
TyParamBuilder {
callback: callback,
id: ty_param.ident,
bounds: ty_param.bounds,
default: ty_param.default,
}
}
pub fn with_default(mut self, ty: Ty) -> Self {
self.default = Some(ty);
self
}
pub fn default(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
pub fn with_bound(mut self, bound: TyParamBound) -> Self {
self.bounds.push(bound);
self
}
pub fn bound(self) -> TyParamBoundBuilder<Self> {
TyParamBoundBuilder::with_callback(self)
}
pub fn with_trait_bound(self, trait_ref: PolyTraitRef) -> Self {
self.bound().build_trait(trait_ref, TraitBoundModifier::None)
}
pub fn trait_bound<P>(self, path: P) -> PolyTraitRefBuilder<Self>
where P: IntoPath
{
PolyTraitRefBuilder::with_callback(path, self)
}
pub fn lifetime_bound<L>(mut self, lifetime: L) -> Self
where L: IntoLifetime
{
let lifetime = lifetime.into_lifetime();
self.bounds.push(TyParamBound::Region(lifetime));
self
}
pub fn build(self) -> F::Result {
self.callback.invoke(TyParam {
attrs: vec![],
ident: self.id,
bounds: self.bounds,
default: self.default,
})
}
}
impl<F> Invoke<Ty> for TyParamBuilder<F>
where F: Invoke<TyParam>
{
type Result = Self;
fn invoke(self, ty: Ty) -> Self {
self.with_default(ty)
}
}
impl<F> Invoke<TyParamBound> for TyParamBuilder<F>
where F: Invoke<TyParam>
{
type Result = Self;
fn invoke(self, bound: TyParamBound) -> Self {
self.with_bound(bound)
}
}
impl<F> Invoke<PolyTraitRef> for TyParamBuilder<F>
where F: Invoke<TyParam>
{
type Result = Self;
fn invoke(self, trait_ref: PolyTraitRef) -> Self {
self.with_trait_bound(trait_ref)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyParamBoundBuilder<F = Identity> {
callback: F,
}
impl TyParamBoundBuilder {
pub fn new() -> Self {
TyParamBoundBuilder::with_callback(Identity)
}
}
impl<F> TyParamBoundBuilder<F>
where F: Invoke<TyParamBound>
{
pub fn with_callback(callback: F) -> Self {
TyParamBoundBuilder { callback: callback }
}
pub fn build_trait(self, poly_trait: PolyTraitRef, modifier: TraitBoundModifier) -> F::Result {
let bound = TyParamBound::Trait(poly_trait, modifier);
self.callback.invoke(bound)
}
pub fn trait_<P>(self, path: P) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<F>>
where P: IntoPath
{
let builder = TraitTyParamBoundBuilder {
builder: self,
modifier: TraitBoundModifier::None,
};
PolyTraitRefBuilder::with_callback(path, builder)
}
pub fn maybe_trait<P>(self, path: P) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<F>>
where P: IntoPath
{
let builder = TraitTyParamBoundBuilder {
builder: self,
modifier: TraitBoundModifier::Maybe,
};
PolyTraitRefBuilder::with_callback(path, builder)
}
pub fn iterator(self, ty: Ty) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<F>> {
let path = PathBuilder::new()
.global()
.id("std")
.id("iter")
.segment("Iterator")
.binding("Item")
.build(ty)
.build()
.build();
self.trait_(path)
}
pub fn lifetime<L>(self, lifetime: L) -> F::Result
where L: IntoLifetime
{
let lifetime = lifetime.into_lifetime();
self.callback.invoke(TyParamBound::Region(lifetime))
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TraitTyParamBoundBuilder<F> {
builder: TyParamBoundBuilder<F>,
modifier: TraitBoundModifier,
}
impl<F> Invoke<PolyTraitRef> for TraitTyParamBoundBuilder<F>
where F: Invoke<TyParamBound>
{
type Result = F::Result;
fn invoke(self, poly_trait: PolyTraitRef) -> Self::Result {
self.builder.build_trait(poly_trait, self.modifier)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct PolyTraitRefBuilder<F> {
callback: F,
trait_ref: Path,
lifetimes: Vec<LifetimeDef>,
}
impl<F> PolyTraitRefBuilder<F>
where F: Invoke<PolyTraitRef>
{
pub fn with_callback<P>(path: P, callback: F) -> Self
where P: IntoPath
{
PolyTraitRefBuilder {
callback: callback,
trait_ref: path.into_path(),
lifetimes: Vec::new(),
}
}
pub fn with_lifetime<L>(mut self, lifetime: L) -> Self
where L: IntoLifetimeDef
{
self.lifetimes.push(lifetime.into_lifetime_def());
self
}
pub fn lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self>
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, self)
}
pub fn build(self) -> F::Result {
self.callback.invoke(PolyTraitRef {
bound_lifetimes: self.lifetimes,
trait_ref: self.trait_ref,
})
}
}
impl<F> Invoke<LifetimeDef> for PolyTraitRefBuilder<F>
where F: Invoke<PolyTraitRef>
{
type Result = Self;
fn invoke(self, lifetime: LifetimeDef) -> Self {
self.with_lifetime(lifetime)
}
}

View File

@ -0,0 +1,259 @@
use {Ident, Lifetime, LifetimeDef, Ty, TyParamBound, WhereBoundPredicate, WherePredicate,
WhereRegionPredicate};
use aster::invoke::{Invoke, Identity};
use aster::lifetime::{IntoLifetime, IntoLifetimeDef, LifetimeDefBuilder};
use aster::path::IntoPath;
use aster::ty::TyBuilder;
use aster::ty_param::{TyParamBoundBuilder, PolyTraitRefBuilder, TraitTyParamBoundBuilder};
// ////////////////////////////////////////////////////////////////////////////
pub struct WherePredicateBuilder<F = Identity> {
callback: F,
}
impl WherePredicateBuilder {
pub fn new() -> Self {
WherePredicateBuilder::with_callback(Identity)
}
}
impl<F> WherePredicateBuilder<F>
where F: Invoke<WherePredicate>
{
pub fn with_callback(callback: F) -> Self {
WherePredicateBuilder { callback: callback }
}
pub fn bound(self) -> TyBuilder<Self> {
TyBuilder::with_callback(self)
}
pub fn lifetime<L>(self, lifetime: L) -> WhereRegionPredicateBuilder<F>
where L: IntoLifetime
{
WhereRegionPredicateBuilder {
callback: self.callback,
lifetime: lifetime.into_lifetime(),
bounds: Vec::new(),
}
}
}
impl<F> Invoke<Ty> for WherePredicateBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = WhereBoundPredicateTyBuilder<F>;
fn invoke(self, ty: Ty) -> Self::Result {
WhereBoundPredicateTyBuilder {
callback: self.callback,
ty: ty,
bound_lifetimes: Vec::new(),
}
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct WhereBoundPredicateBuilder<F> {
callback: F,
}
impl<F> Invoke<Ty> for WhereBoundPredicateBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = WhereBoundPredicateTyBuilder<F>;
fn invoke(self, ty: Ty) -> Self::Result {
WhereBoundPredicateTyBuilder {
callback: self.callback,
ty: ty,
bound_lifetimes: Vec::new(),
}
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct WhereBoundPredicateTyBuilder<F> {
callback: F,
ty: Ty,
bound_lifetimes: Vec<LifetimeDef>,
}
impl<F> WhereBoundPredicateTyBuilder<F>
where F: Invoke<WherePredicate>
{
pub fn with_for_lifetime<L>(mut self, lifetime: L) -> Self
where L: IntoLifetimeDef
{
self.bound_lifetimes.push(lifetime.into_lifetime_def());
self
}
pub fn for_lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self>
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, self)
}
pub fn with_bound(self, bound: TyParamBound) -> WhereBoundPredicateTyBoundsBuilder<F> {
WhereBoundPredicateTyBoundsBuilder {
callback: self.callback,
ty: self.ty,
bound_lifetimes: self.bound_lifetimes,
bounds: vec![bound],
}
}
pub fn bound(self) -> TyParamBoundBuilder<WhereBoundPredicateTyBoundsBuilder<F>> {
let builder = WhereBoundPredicateTyBoundsBuilder {
callback: self.callback,
ty: self.ty,
bound_lifetimes: self.bound_lifetimes,
bounds: vec![],
};
TyParamBoundBuilder::with_callback(builder)
}
pub fn trait_<P>
(self,
path: P)
-> PolyTraitRefBuilder<TraitTyParamBoundBuilder<WhereBoundPredicateTyBoundsBuilder<F>>>
where P: IntoPath
{
self.bound().trait_(path)
}
pub fn lifetime<L>(self, lifetime: L) -> WhereBoundPredicateTyBoundsBuilder<F>
where L: IntoLifetime
{
self.bound().lifetime(lifetime)
}
}
impl<F> Invoke<LifetimeDef> for WhereBoundPredicateTyBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = Self;
fn invoke(self, lifetime: LifetimeDef) -> Self {
self.with_for_lifetime(lifetime)
}
}
impl<F> Invoke<TyParamBound> for WhereBoundPredicateTyBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = WhereBoundPredicateTyBoundsBuilder<F>;
fn invoke(self, bound: TyParamBound) -> Self::Result {
self.with_bound(bound)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct WhereBoundPredicateTyBoundsBuilder<F> {
callback: F,
ty: Ty,
bound_lifetimes: Vec<LifetimeDef>,
bounds: Vec<TyParamBound>,
}
impl<F> WhereBoundPredicateTyBoundsBuilder<F>
where F: Invoke<WherePredicate>
{
pub fn with_for_lifetime<L>(mut self, lifetime: L) -> Self
where L: IntoLifetimeDef
{
self.bound_lifetimes.push(lifetime.into_lifetime_def());
self
}
pub fn for_lifetime<N>(self, name: N) -> LifetimeDefBuilder<Self>
where N: Into<Ident>
{
LifetimeDefBuilder::with_callback(name, self)
}
pub fn with_bound(mut self, bound: TyParamBound) -> Self {
self.bounds.push(bound);
self
}
pub fn bound(self) -> TyParamBoundBuilder<Self> {
TyParamBoundBuilder::with_callback(self)
}
pub fn trait_<P>(self, path: P) -> PolyTraitRefBuilder<TraitTyParamBoundBuilder<Self>>
where P: IntoPath
{
self.bound().trait_(path)
}
pub fn lifetime<L>(self, lifetime: L) -> Self
where L: IntoLifetime
{
self.bound().lifetime(lifetime)
}
pub fn build(self) -> F::Result {
let predicate = WhereBoundPredicate {
bound_lifetimes: self.bound_lifetimes,
bounded_ty: self.ty,
bounds: self.bounds,
};
self.callback.invoke(WherePredicate::BoundPredicate(predicate))
}
}
impl<F> Invoke<LifetimeDef> for WhereBoundPredicateTyBoundsBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = Self;
fn invoke(self, lifetime: LifetimeDef) -> Self {
self.with_for_lifetime(lifetime)
}
}
impl<F> Invoke<TyParamBound> for WhereBoundPredicateTyBoundsBuilder<F>
where F: Invoke<WherePredicate>
{
type Result = Self;
fn invoke(self, bound: TyParamBound) -> Self {
self.with_bound(bound)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct WhereRegionPredicateBuilder<F> {
callback: F,
lifetime: Lifetime,
bounds: Vec<Lifetime>,
}
impl<F> WhereRegionPredicateBuilder<F>
where F: Invoke<WherePredicate>
{
pub fn bound<L>(mut self, lifetime: L) -> Self
where L: IntoLifetime
{
self.bounds.push(lifetime.into_lifetime());
self
}
pub fn build(self) -> F::Result {
let predicate = WhereRegionPredicate {
lifetime: self.lifetime,
bounds: self.bounds,
};
self.callback.invoke(WherePredicate::RegionPredicate(predicate))
}
}

293
third_party/rust/syn-0.10.8/src/attr.rs vendored Normal file
View File

@ -0,0 +1,293 @@
use super::*;
use std::iter;
/// Doc-comments are promoted to attributes that have `is_sugared_doc` = true
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Attribute {
pub style: AttrStyle,
pub value: MetaItem,
pub is_sugared_doc: bool,
}
impl Attribute {
pub fn name(&self) -> &str {
self.value.name()
}
}
/// Distinguishes between Attributes that decorate items and Attributes that
/// are contained as statements within items. These two cases need to be
/// distinguished for pretty-printing.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum AttrStyle {
Outer,
Inner,
}
/// A compile-time attribute item.
///
/// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum MetaItem {
/// Word meta item.
///
/// E.g. `test` as in `#[test]`
Word(Ident),
/// List meta item.
///
/// E.g. `derive(..)` as in `#[derive(..)]`
List(Ident, Vec<NestedMetaItem>),
/// Name value meta item.
///
/// E.g. `feature = "foo"` as in `#[feature = "foo"]`
NameValue(Ident, Lit),
}
impl MetaItem {
pub fn name(&self) -> &str {
match *self {
MetaItem::Word(ref name) |
MetaItem::List(ref name, _) |
MetaItem::NameValue(ref name, _) => name.as_ref(),
}
}
}
/// Possible values inside of compile-time attribute lists.
///
/// E.g. the '..' in `#[name(..)]`.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum NestedMetaItem {
/// A full MetaItem, for recursive meta items.
MetaItem(MetaItem),
/// A literal.
///
/// E.g. "foo", 64, true
Literal(Lit),
}
pub trait FilterAttrs<'a> {
type Ret: Iterator<Item = &'a Attribute>;
fn outer(self) -> Self::Ret;
fn inner(self) -> Self::Ret;
}
impl<'a, T> FilterAttrs<'a> for T
where T: IntoIterator<Item = &'a Attribute>
{
type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>;
fn outer(self) -> Self::Ret {
fn is_outer(attr: &&Attribute) -> bool {
attr.style == AttrStyle::Outer
}
self.into_iter().filter(is_outer)
}
fn inner(self) -> Self::Ret {
fn is_inner(attr: &&Attribute) -> bool {
attr.style == AttrStyle::Inner
}
self.into_iter().filter(is_inner)
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use ident::parsing::ident;
use lit::parsing::lit;
use space::{block_comment, whitespace};
#[cfg(feature = "full")]
named!(pub inner_attr -> Attribute, alt!(
do_parse!(
punct!("#") >>
punct!("!") >>
punct!("[") >>
meta_item: meta_item >>
punct!("]") >>
(Attribute {
style: AttrStyle::Inner,
value: meta_item,
is_sugared_doc: false,
})
)
|
do_parse!(
punct!("//!") >>
content: take_until!("\n") >>
(Attribute {
style: AttrStyle::Inner,
value: MetaItem::NameValue(
"doc".into(),
format!("//!{}", content).into(),
),
is_sugared_doc: true,
})
)
|
do_parse!(
option!(whitespace) >>
peek!(tag!("/*!")) >>
com: block_comment >>
(Attribute {
style: AttrStyle::Inner,
value: MetaItem::NameValue(
"doc".into(),
com.into(),
),
is_sugared_doc: true,
})
)
));
named!(pub outer_attr -> Attribute, alt!(
do_parse!(
punct!("#") >>
punct!("[") >>
meta_item: meta_item >>
punct!("]") >>
(Attribute {
style: AttrStyle::Outer,
value: meta_item,
is_sugared_doc: false,
})
)
|
do_parse!(
punct!("///") >>
not!(peek!(tag!("/"))) >>
content: take_until!("\n") >>
(Attribute {
style: AttrStyle::Outer,
value: MetaItem::NameValue(
"doc".into(),
format!("///{}", content).into(),
),
is_sugared_doc: true,
})
)
|
do_parse!(
option!(whitespace) >>
peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
com: block_comment >>
(Attribute {
style: AttrStyle::Outer,
value: MetaItem::NameValue(
"doc".into(),
com.into(),
),
is_sugared_doc: true,
})
)
));
named!(meta_item -> MetaItem, alt!(
do_parse!(
id: ident >>
punct!("(") >>
inner: terminated_list!(punct!(","), nested_meta_item) >>
punct!(")") >>
(MetaItem::List(id, inner))
)
|
do_parse!(
name: ident >>
punct!("=") >>
value: lit >>
(MetaItem::NameValue(name, value))
)
|
map!(ident, MetaItem::Word)
));
named!(nested_meta_item -> NestedMetaItem, alt!(
meta_item => { NestedMetaItem::MetaItem }
|
lit => { NestedMetaItem::Literal }
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use lit::{Lit, StrStyle};
use quote::{Tokens, ToTokens};
impl ToTokens for Attribute {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Attribute { style,
value: MetaItem::NameValue(ref name,
Lit::Str(ref value, StrStyle::Cooked)),
is_sugared_doc: true } = *self {
if name == "doc" {
match style {
AttrStyle::Inner if value.starts_with("//!") => {
tokens.append(&format!("{}\n", value));
return;
}
AttrStyle::Inner if value.starts_with("/*!") => {
tokens.append(value);
return;
}
AttrStyle::Outer if value.starts_with("///") => {
tokens.append(&format!("{}\n", value));
return;
}
AttrStyle::Outer if value.starts_with("/**") => {
tokens.append(value);
return;
}
_ => {}
}
}
}
tokens.append("#");
if let AttrStyle::Inner = self.style {
tokens.append("!");
}
tokens.append("[");
self.value.to_tokens(tokens);
tokens.append("]");
}
}
impl ToTokens for MetaItem {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
MetaItem::Word(ref ident) => {
ident.to_tokens(tokens);
}
MetaItem::List(ref ident, ref inner) => {
ident.to_tokens(tokens);
tokens.append("(");
tokens.append_separated(inner, ",");
tokens.append(")");
}
MetaItem::NameValue(ref name, ref value) => {
name.to_tokens(tokens);
tokens.append("=");
value.to_tokens(tokens);
}
}
}
}
impl ToTokens for NestedMetaItem {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
NestedMetaItem::MetaItem(ref nested) => {
nested.to_tokens(tokens);
}
NestedMetaItem::Literal(ref lit) => {
lit.to_tokens(tokens);
}
}
}
}
}

View File

@ -0,0 +1,167 @@
use super::*;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum ConstExpr {
/// A function call
///
/// The first field resolves to the function itself,
/// and the second field is the list of arguments
Call(Box<ConstExpr>, Vec<ConstExpr>),
/// A binary operation (For example: `a + b`, `a * b`)
Binary(BinOp, Box<ConstExpr>, Box<ConstExpr>),
/// A unary operation (For example: `!x`, `*x`)
Unary(UnOp, Box<ConstExpr>),
/// A literal (For example: `1`, `"foo"`)
Lit(Lit),
/// A cast (`foo as f64`)
Cast(Box<ConstExpr>, Box<Ty>),
/// Variable reference, possibly containing `::` and/or type
/// parameters, e.g. foo::bar::<baz>.
Path(Path),
/// An indexing operation (`foo[2]`)
Index(Box<ConstExpr>, Box<ConstExpr>),
/// No-op: used solely so we can pretty-print faithfully
Paren(Box<ConstExpr>),
/// If compiling with full support for expression syntax, any expression is
/// allowed
Other(Other),
}
#[cfg(not(feature = "full"))]
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Other {
_private: (),
}
#[cfg(feature = "full")]
pub type Other = Expr;
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use {BinOp, Ty};
use lit::parsing::lit;
use op::parsing::{binop, unop};
use ty::parsing::{path, ty};
named!(pub const_expr -> ConstExpr, do_parse!(
mut e: alt!(
expr_unary
|
expr_lit
|
expr_path
|
expr_paren
) >>
many0!(alt!(
tap!(args: and_call => {
e = ConstExpr::Call(Box::new(e), args);
})
|
tap!(more: and_binary => {
let (op, other) = more;
e = ConstExpr::Binary(op, Box::new(e), Box::new(other));
})
|
tap!(ty: and_cast => {
e = ConstExpr::Cast(Box::new(e), Box::new(ty));
})
|
tap!(i: and_index => {
e = ConstExpr::Index(Box::new(e), Box::new(i));
})
)) >>
(e)
));
named!(and_call -> Vec<ConstExpr>, do_parse!(
punct!("(") >>
args: terminated_list!(punct!(","), const_expr) >>
punct!(")") >>
(args)
));
named!(and_binary -> (BinOp, ConstExpr), tuple!(binop, const_expr));
named!(expr_unary -> ConstExpr, do_parse!(
operator: unop >>
operand: const_expr >>
(ConstExpr::Unary(operator, Box::new(operand)))
));
named!(expr_lit -> ConstExpr, map!(lit, ConstExpr::Lit));
named!(expr_path -> ConstExpr, map!(path, ConstExpr::Path));
named!(and_index -> ConstExpr, delimited!(punct!("["), const_expr, punct!("]")));
named!(expr_paren -> ConstExpr, do_parse!(
punct!("(") >>
e: const_expr >>
punct!(")") >>
(ConstExpr::Paren(Box::new(e)))
));
named!(and_cast -> Ty, do_parse!(
keyword!("as") >>
ty: ty >>
(ty)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for ConstExpr {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
ConstExpr::Call(ref func, ref args) => {
func.to_tokens(tokens);
tokens.append("(");
tokens.append_separated(args, ",");
tokens.append(")");
}
ConstExpr::Binary(op, ref left, ref right) => {
left.to_tokens(tokens);
op.to_tokens(tokens);
right.to_tokens(tokens);
}
ConstExpr::Unary(op, ref expr) => {
op.to_tokens(tokens);
expr.to_tokens(tokens);
}
ConstExpr::Lit(ref lit) => lit.to_tokens(tokens),
ConstExpr::Cast(ref expr, ref ty) => {
expr.to_tokens(tokens);
tokens.append("as");
ty.to_tokens(tokens);
}
ConstExpr::Path(ref path) => path.to_tokens(tokens),
ConstExpr::Index(ref expr, ref index) => {
expr.to_tokens(tokens);
tokens.append("[");
index.to_tokens(tokens);
tokens.append("]");
}
ConstExpr::Paren(ref expr) => {
tokens.append("(");
expr.to_tokens(tokens);
tokens.append(")");
}
ConstExpr::Other(ref other) => {
other.to_tokens(tokens);
}
}
}
}
#[cfg(not(feature = "full"))]
impl ToTokens for Other {
fn to_tokens(&self, _tokens: &mut Tokens) {
unreachable!()
}
}
}

245
third_party/rust/syn-0.10.8/src/data.rs vendored Normal file
View File

@ -0,0 +1,245 @@
use super::*;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Variant {
pub ident: Ident,
pub attrs: Vec<Attribute>,
pub data: VariantData,
/// Explicit discriminant, e.g. `Foo = 1`
pub discriminant: Option<ConstExpr>,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum VariantData {
Struct(Vec<Field>),
Tuple(Vec<Field>),
Unit,
}
impl VariantData {
pub fn fields(&self) -> &[Field] {
match *self {
VariantData::Struct(ref fields) |
VariantData::Tuple(ref fields) => fields,
VariantData::Unit => &[],
}
}
pub fn fields_mut(&mut self) -> &mut [Field] {
match *self {
VariantData::Struct(ref mut fields) |
VariantData::Tuple(ref mut fields) => fields,
VariantData::Unit => &mut [],
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Field {
pub ident: Option<Ident>,
pub vis: Visibility,
pub attrs: Vec<Attribute>,
pub ty: Ty,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Visibility {
Public,
Crate,
Restricted(Box<Path>),
Inherited,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use WhereClause;
use attr::parsing::outer_attr;
use constant::parsing::const_expr;
use generics::parsing::where_clause;
use ident::parsing::ident;
use ty::parsing::{path, ty};
named!(pub struct_body -> (WhereClause, VariantData), alt!(
do_parse!(
wh: where_clause >>
body: struct_like_body >>
(wh, VariantData::Struct(body))
)
|
do_parse!(
body: tuple_like_body >>
wh: where_clause >>
punct!(";") >>
(wh, VariantData::Tuple(body))
)
|
do_parse!(
wh: where_clause >>
punct!(";") >>
(wh, VariantData::Unit)
)
));
named!(pub enum_body -> (WhereClause, Vec<Variant>), do_parse!(
wh: where_clause >>
punct!("{") >>
variants: terminated_list!(punct!(","), variant) >>
punct!("}") >>
(wh, variants)
));
named!(variant -> Variant, do_parse!(
attrs: many0!(outer_attr) >>
id: ident >>
data: alt!(
struct_like_body => { VariantData::Struct }
|
tuple_like_body => { VariantData::Tuple }
|
epsilon!() => { |_| VariantData::Unit }
) >>
disr: option!(preceded!(punct!("="), const_expr)) >>
(Variant {
ident: id,
attrs: attrs,
data: data,
discriminant: disr,
})
));
named!(pub struct_like_body -> Vec<Field>, do_parse!(
punct!("{") >>
fields: terminated_list!(punct!(","), struct_field) >>
punct!("}") >>
(fields)
));
named!(tuple_like_body -> Vec<Field>, do_parse!(
punct!("(") >>
fields: terminated_list!(punct!(","), tuple_field) >>
punct!(")") >>
(fields)
));
named!(struct_field -> Field, do_parse!(
attrs: many0!(outer_attr) >>
vis: visibility >>
id: ident >>
punct!(":") >>
ty: ty >>
(Field {
ident: Some(id),
vis: vis,
attrs: attrs,
ty: ty,
})
));
named!(tuple_field -> Field, do_parse!(
attrs: many0!(outer_attr) >>
vis: visibility >>
ty: ty >>
(Field {
ident: None,
vis: vis,
attrs: attrs,
ty: ty,
})
));
named!(pub visibility -> Visibility, alt!(
do_parse!(
keyword!("pub") >>
punct!("(") >>
keyword!("crate") >>
punct!(")") >>
(Visibility::Crate)
)
|
do_parse!(
keyword!("pub") >>
punct!("(") >>
restricted: path >>
punct!(")") >>
(Visibility::Restricted(Box::new(restricted)))
)
|
keyword!("pub") => { |_| Visibility::Public }
|
epsilon!() => { |_| Visibility::Inherited }
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for Variant {
fn to_tokens(&self, tokens: &mut Tokens) {
for attr in &self.attrs {
attr.to_tokens(tokens);
}
self.ident.to_tokens(tokens);
self.data.to_tokens(tokens);
if let Some(ref disr) = self.discriminant {
tokens.append("=");
disr.to_tokens(tokens);
}
}
}
impl ToTokens for VariantData {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
VariantData::Struct(ref fields) => {
tokens.append("{");
tokens.append_separated(fields, ",");
tokens.append("}");
}
VariantData::Tuple(ref fields) => {
tokens.append("(");
tokens.append_separated(fields, ",");
tokens.append(")");
}
VariantData::Unit => {}
}
}
}
impl ToTokens for Field {
fn to_tokens(&self, tokens: &mut Tokens) {
for attr in &self.attrs {
attr.to_tokens(tokens);
}
self.vis.to_tokens(tokens);
if let Some(ref ident) = self.ident {
ident.to_tokens(tokens);
tokens.append(":");
}
self.ty.to_tokens(tokens);
}
}
impl ToTokens for Visibility {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Visibility::Public => tokens.append("pub"),
Visibility::Crate => {
tokens.append("pub");
tokens.append("(");
tokens.append("crate");
tokens.append(")");
}
Visibility::Restricted(ref path) => {
tokens.append("pub");
tokens.append("(");
path.to_tokens(tokens);
tokens.append(")");
}
Visibility::Inherited => {}
}
}
}
}

View File

@ -0,0 +1,292 @@
use std::{char, str};
use std::num::ParseIntError;
use nom::IResult;
pub fn cooked_string(input: &str) -> IResult<&str, String> {
let mut s = String::new();
let mut chars = input.char_indices().peekable();
while let Some((byte_offset, ch)) = chars.next() {
match ch {
'"' => {
return IResult::Done(&input[byte_offset..], s);
}
'\r' => {
if let Some((_, '\n')) = chars.next() {
s.push('\n');
} else {
break;
}
}
'\\' => {
match chars.next() {
Some((_, 'x')) => {
match backslash_x_char(&mut chars) {
Some(ch) => s.push(ch),
None => break,
}
}
Some((_, 'n')) => s.push('\n'),
Some((_, 'r')) => s.push('\r'),
Some((_, 't')) => s.push('\t'),
Some((_, '\\')) => s.push('\\'),
Some((_, '0')) => s.push('\0'),
Some((_, 'u')) => {
match backslash_u(&mut chars) {
Some(ch) => s.push(ch),
None => break,
}
}
Some((_, '\'')) => s.push('\''),
Some((_, '"')) => s.push('"'),
Some((_, '\n')) | Some((_, '\r')) => {
while let Some(&(_, ch)) = chars.peek() {
if ch.is_whitespace() {
chars.next();
} else {
break;
}
}
}
_ => break,
}
}
ch => {
s.push(ch);
}
}
}
IResult::Error
}
pub fn cooked_byte_string(mut input: &str) -> IResult<&str, Vec<u8>> {
let mut vec = Vec::new();
let mut bytes = input.bytes().enumerate();
'outer: while let Some((offset, b)) = bytes.next() {
match b {
b'"' => {
return IResult::Done(&input[offset..], vec);
}
b'\r' => {
if let Some((_, b'\n')) = bytes.next() {
vec.push(b'\n');
} else {
break;
}
}
b'\\' => {
match bytes.next() {
Some((_, b'x')) => {
match backslash_x_byte(&mut bytes) {
Some(b) => vec.push(b),
None => break,
}
}
Some((_, b'n')) => vec.push(b'\n'),
Some((_, b'r')) => vec.push(b'\r'),
Some((_, b't')) => vec.push(b'\t'),
Some((_, b'\\')) => vec.push(b'\\'),
Some((_, b'0')) => vec.push(b'\0'),
Some((_, b'\'')) => vec.push(b'\''),
Some((_, b'"')) => vec.push(b'"'),
Some((newline, b'\n')) |
Some((newline, b'\r')) => {
let rest = &input[newline + 1..];
for (offset, ch) in rest.char_indices() {
if !ch.is_whitespace() {
input = &rest[offset..];
bytes = input.bytes().enumerate();
continue 'outer;
}
}
break;
}
_ => break,
}
}
b if b < 0x80 => {
vec.push(b);
}
_ => break,
}
}
IResult::Error
}
pub fn cooked_char(input: &str) -> IResult<&str, char> {
let mut chars = input.char_indices();
let ch = match chars.next().map(|(_, ch)| ch) {
Some('\\') => {
match chars.next().map(|(_, ch)| ch) {
Some('x') => backslash_x_char(&mut chars),
Some('n') => Some('\n'),
Some('r') => Some('\r'),
Some('t') => Some('\t'),
Some('\\') => Some('\\'),
Some('0') => Some('\0'),
Some('u') => backslash_u(&mut chars),
Some('\'') => Some('\''),
Some('"') => Some('"'),
_ => None,
}
}
ch => ch,
};
match ch {
Some(ch) => IResult::Done(chars.as_str(), ch),
None => IResult::Error,
}
}
pub fn cooked_byte(input: &str) -> IResult<&str, u8> {
let mut bytes = input.bytes().enumerate();
let b = match bytes.next().map(|(_, b)| b) {
Some(b'\\') => {
match bytes.next().map(|(_, b)| b) {
Some(b'x') => backslash_x_byte(&mut bytes),
Some(b'n') => Some(b'\n'),
Some(b'r') => Some(b'\r'),
Some(b't') => Some(b'\t'),
Some(b'\\') => Some(b'\\'),
Some(b'0') => Some(b'\0'),
Some(b'\'') => Some(b'\''),
Some(b'"') => Some(b'"'),
_ => None,
}
}
b => b,
};
match b {
Some(b) => {
match bytes.next() {
Some((offset, _)) => IResult::Done(&input[offset..], b),
None => IResult::Done("", b),
}
}
None => IResult::Error,
}
}
pub fn raw_string(input: &str) -> IResult<&str, (String, usize)> {
let mut chars = input.char_indices();
let mut n = 0;
while let Some((byte_offset, ch)) = chars.next() {
match ch {
'"' => {
n = byte_offset;
break;
}
'#' => {}
_ => return IResult::Error,
}
}
let mut s = String::new();
for (byte_offset, ch) in chars {
match ch {
'"' if input[byte_offset + 1..].starts_with(&input[..n]) => {
let rest = &input[byte_offset + 1 + n..];
return IResult::Done(rest, (s, n));
}
'\r' => {}
_ => s.push(ch),
}
}
IResult::Error
}
macro_rules! next_ch {
($chars:ident @ $pat:pat $(| $rest:pat)*) => {
match $chars.next() {
Some((_, ch)) => match ch {
$pat $(| $rest)* => ch,
_ => return None,
},
None => return None,
}
};
}
trait FromStrRadix: Sized {
fn from_str_radix(src: &str, radix: u32) -> Result<Self, ParseIntError>;
}
impl FromStrRadix for u8 {
fn from_str_radix(src: &str, radix: u32) -> Result<Self, ParseIntError> {
u8::from_str_radix(src, radix)
}
}
impl FromStrRadix for u32 {
fn from_str_radix(src: &str, radix: u32) -> Result<Self, ParseIntError> {
u32::from_str_radix(src, radix)
}
}
macro_rules! from_hex {
($($ch:ident)+) => {{
let hex_bytes = &[$($ch as u8),*];
let hex_str = str::from_utf8(hex_bytes).unwrap();
FromStrRadix::from_str_radix(hex_str, 16).unwrap()
}};
}
#[cfg_attr(feature = "clippy", allow(diverging_sub_expression))]
fn backslash_x_char<I>(chars: &mut I) -> Option<char>
where I: Iterator<Item = (usize, char)>
{
let a = next_ch!(chars @ '0'...'7');
let b = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
char::from_u32(from_hex!(a b))
}
#[cfg_attr(feature = "clippy", allow(diverging_sub_expression))]
fn backslash_x_byte<I>(chars: &mut I) -> Option<u8>
where I: Iterator<Item = (usize, u8)>
{
let a = next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
let b = next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
Some(from_hex!(a b))
}
#[cfg_attr(feature = "clippy", allow(diverging_sub_expression, many_single_char_names))]
fn backslash_u<I>(chars: &mut I) -> Option<char>
where I: Iterator<Item = (usize, char)>
{
next_ch!(chars @ '{');
let a = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
let b = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if b == '}' {
return char::from_u32(from_hex!(a));
}
let c = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if c == '}' {
return char::from_u32(from_hex!(a b));
}
let d = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if d == '}' {
return char::from_u32(from_hex!(a b c));
}
let e = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if e == '}' {
return char::from_u32(from_hex!(a b c d));
}
let f = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '}');
if f == '}' {
return char::from_u32(from_hex!(a b c d e));
}
next_ch!(chars @ '}');
char::from_u32(from_hex!(a b c d e f))
}
#[test]
fn test_cooked_string() {
let input = "\\x62 \\\n \\u{7} \\u{64} \\u{bf5} \\u{12ba} \\u{1F395} \\u{102345}\"";
let expected = "\x62 \u{7} \u{64} \u{bf5} \u{12ba} \u{1F395} \u{102345}";
assert_eq!(cooked_string(input), IResult::Done("\"", expected.to_string()));
}
#[test]
fn test_cooked_byte_string() {
let input = "\\x62 \\\n \\xEF\"";
let expected = b"\x62 \xEF";
assert_eq!(cooked_byte_string(input), IResult::Done("\"", expected.to_vec()));
}

1701
third_party/rust/syn-0.10.8/src/expr.rs vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,446 @@
use super::*;
/// Represents lifetimes and type parameters attached to a declaration
/// of a function, enum, trait, etc.
#[derive(Debug, Clone, Eq, PartialEq, Default, Hash)]
pub struct Generics {
pub lifetimes: Vec<LifetimeDef>,
pub ty_params: Vec<TyParam>,
pub where_clause: WhereClause,
}
#[cfg(feature = "printing")]
/// Returned by `Generics::split_for_impl`.
#[derive(Debug)]
pub struct ImplGenerics<'a>(&'a Generics);
#[cfg(feature = "printing")]
/// Returned by `Generics::split_for_impl`.
#[derive(Debug)]
pub struct TyGenerics<'a>(&'a Generics);
impl Generics {
#[cfg(feature = "printing")]
/// Split a type's generics into the pieces required for impl'ing a trait
/// for that type.
///
/// ```
/// # extern crate syn;
/// # #[macro_use]
/// # extern crate quote;
/// # fn main() {
/// # let generics: syn::Generics = Default::default();
/// # let name = syn::Ident::new("MyType");
/// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
/// quote! {
/// impl #impl_generics MyTrait for #name #ty_generics #where_clause {
/// // ...
/// }
/// }
/// # ;
/// # }
/// ```
pub fn split_for_impl(&self) -> (ImplGenerics, TyGenerics, &WhereClause) {
(ImplGenerics(self), TyGenerics(self), &self.where_clause)
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Lifetime {
pub ident: Ident,
}
impl Lifetime {
pub fn new<T: Into<Ident>>(t: T) -> Self {
let id = Ident::new(t);
if !id.as_ref().starts_with('\'') {
panic!("lifetime name must start with apostrophe as in \"'a\", \
got {:?}",
id.as_ref());
}
Lifetime { ident: id }
}
}
/// A lifetime definition, e.g. `'a: 'b+'c+'d`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct LifetimeDef {
pub attrs: Vec<Attribute>,
pub lifetime: Lifetime,
pub bounds: Vec<Lifetime>,
}
impl LifetimeDef {
pub fn new<T: Into<Ident>>(t: T) -> Self {
LifetimeDef {
attrs: Vec::new(),
lifetime: Lifetime::new(t),
bounds: Vec::new(),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct TyParam {
pub attrs: Vec<Attribute>,
pub ident: Ident,
pub bounds: Vec<TyParamBound>,
pub default: Option<Ty>,
}
/// The AST represents all type param bounds as types.
/// `typeck::collect::compute_bounds` matches these against
/// the "special" built-in traits (see `middle::lang_items`) and
/// detects Copy, Send and Sync.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum TyParamBound {
Trait(PolyTraitRef, TraitBoundModifier),
Region(Lifetime),
}
/// A modifier on a bound, currently this is only used for `?Sized`, where the
/// modifier is `Maybe`. Negative bounds should also be handled here.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum TraitBoundModifier {
None,
Maybe,
}
/// A `where` clause in a definition
#[derive(Debug, Clone, Eq, PartialEq, Default, Hash)]
pub struct WhereClause {
pub predicates: Vec<WherePredicate>,
}
impl WhereClause {
pub fn none() -> Self {
WhereClause { predicates: Vec::new() }
}
}
/// A single predicate in a `where` clause
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum WherePredicate {
/// A type binding, e.g. `for<'c> Foo: Send+Clone+'c`
BoundPredicate(WhereBoundPredicate),
/// A lifetime predicate, e.g. `'a: 'b+'c`
RegionPredicate(WhereRegionPredicate),
}
/// A type bound.
///
/// E.g. `for<'c> Foo: Send+Clone+'c`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct WhereBoundPredicate {
/// Any lifetimes from a `for` binding
pub bound_lifetimes: Vec<LifetimeDef>,
/// The type being bounded
pub bounded_ty: Ty,
/// Trait and lifetime bounds (`Clone+Send+'static`)
pub bounds: Vec<TyParamBound>,
}
/// A lifetime predicate.
///
/// E.g. `'a: 'b+'c`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct WhereRegionPredicate {
pub lifetime: Lifetime,
pub bounds: Vec<Lifetime>,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use attr::parsing::outer_attr;
use ident::parsing::ident;
use ty::parsing::{ty, poly_trait_ref};
named!(pub generics -> Generics, map!(
alt!(
do_parse!(
punct!("<") >>
lifetimes: separated_list!(punct!(","), lifetime_def) >>
ty_params: opt_vec!(preceded!(
cond!(!lifetimes.is_empty(), punct!(",")),
separated_nonempty_list!(punct!(","), ty_param)
)) >>
cond!(!lifetimes.is_empty() || !ty_params.is_empty(), option!(punct!(","))) >>
punct!(">") >>
(lifetimes, ty_params)
)
|
epsilon!() => { |_| (Vec::new(), Vec::new()) }
),
|(lifetimes, ty_params)| Generics {
lifetimes: lifetimes,
ty_params: ty_params,
where_clause: Default::default(),
}
));
named!(pub lifetime -> Lifetime, preceded!(
punct!("'"),
alt!(
map!(ident, |id| Lifetime {
ident: format!("'{}", id).into(),
})
|
map!(keyword!("static"), |_| Lifetime {
ident: "'static".into(),
})
)
));
named!(pub lifetime_def -> LifetimeDef, do_parse!(
attrs: many0!(outer_attr) >>
life: lifetime >>
bounds: opt_vec!(preceded!(
punct!(":"),
separated_list!(punct!("+"), lifetime)
)) >>
(LifetimeDef {
attrs: attrs,
lifetime: life,
bounds: bounds,
})
));
named!(pub bound_lifetimes -> Vec<LifetimeDef>, opt_vec!(do_parse!(
keyword!("for") >>
punct!("<") >>
lifetimes: terminated_list!(punct!(","), lifetime_def) >>
punct!(">") >>
(lifetimes)
)));
named!(ty_param -> TyParam, do_parse!(
attrs: many0!(outer_attr) >>
id: ident >>
bounds: opt_vec!(preceded!(
punct!(":"),
separated_nonempty_list!(punct!("+"), ty_param_bound)
)) >>
default: option!(preceded!(
punct!("="),
ty
)) >>
(TyParam {
attrs: attrs,
ident: id,
bounds: bounds,
default: default,
})
));
named!(pub ty_param_bound -> TyParamBound, alt!(
preceded!(punct!("?"), poly_trait_ref) => {
|poly| TyParamBound::Trait(poly, TraitBoundModifier::Maybe)
}
|
lifetime => { TyParamBound::Region }
|
poly_trait_ref => {
|poly| TyParamBound::Trait(poly, TraitBoundModifier::None)
}
));
named!(pub where_clause -> WhereClause, alt!(
do_parse!(
keyword!("where") >>
predicates: separated_nonempty_list!(punct!(","), where_predicate) >>
option!(punct!(",")) >>
(WhereClause { predicates: predicates })
)
|
epsilon!() => { |_| Default::default() }
));
named!(where_predicate -> WherePredicate, alt!(
do_parse!(
ident: lifetime >>
bounds: opt_vec!(preceded!(
punct!(":"),
separated_list!(punct!("+"), lifetime)
)) >>
(WherePredicate::RegionPredicate(WhereRegionPredicate {
lifetime: ident,
bounds: bounds,
}))
)
|
do_parse!(
bound_lifetimes: bound_lifetimes >>
bounded_ty: ty >>
punct!(":") >>
bounds: separated_nonempty_list!(punct!("+"), ty_param_bound) >>
(WherePredicate::BoundPredicate(WhereBoundPredicate {
bound_lifetimes: bound_lifetimes,
bounded_ty: bounded_ty,
bounds: bounds,
}))
)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use quote::{Tokens, ToTokens};
impl ToTokens for Generics {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.lifetimes.is_empty();
let has_ty_params = !self.ty_params.is_empty();
if has_lifetimes || has_ty_params {
tokens.append("<");
tokens.append_separated(&self.lifetimes, ",");
if has_lifetimes && has_ty_params {
tokens.append(",");
}
tokens.append_separated(&self.ty_params, ",");
tokens.append(">");
}
}
}
impl<'a> ToTokens for ImplGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.0.lifetimes.is_empty();
let has_ty_params = !self.0.ty_params.is_empty();
if has_lifetimes || has_ty_params {
tokens.append("<");
tokens.append_separated(&self.0.lifetimes, ",");
// Leave off the type parameter defaults
for (i, ty_param) in self.0.ty_params.iter().enumerate() {
if i > 0 || has_lifetimes {
tokens.append(",");
}
tokens.append_all(ty_param.attrs.outer());
ty_param.ident.to_tokens(tokens);
if !ty_param.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&ty_param.bounds, "+");
}
}
tokens.append(">");
}
}
}
impl<'a> ToTokens for TyGenerics<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.0.lifetimes.is_empty();
let has_ty_params = !self.0.ty_params.is_empty();
if has_lifetimes || has_ty_params {
tokens.append("<");
// Leave off the lifetime bounds and attributes
let lifetimes = self.0.lifetimes.iter().map(|ld| &ld.lifetime);
tokens.append_separated(lifetimes, ",");
if has_lifetimes && has_ty_params {
tokens.append(",");
}
// Leave off the type parameter bounds, defaults, and attributes
let ty_params = self.0.ty_params.iter().map(|tp| &tp.ident);
tokens.append_separated(ty_params, ",");
tokens.append(">");
}
}
}
impl ToTokens for Lifetime {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
}
}
impl ToTokens for LifetimeDef {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(self.attrs.outer());
self.lifetime.to_tokens(tokens);
if !self.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&self.bounds, "+");
}
}
}
impl ToTokens for TyParam {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append_all(self.attrs.outer());
self.ident.to_tokens(tokens);
if !self.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&self.bounds, "+");
}
if let Some(ref default) = self.default {
tokens.append("=");
default.to_tokens(tokens);
}
}
}
impl ToTokens for TyParamBound {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
TyParamBound::Region(ref lifetime) => lifetime.to_tokens(tokens),
TyParamBound::Trait(ref trait_ref, modifier) => {
match modifier {
TraitBoundModifier::None => {}
TraitBoundModifier::Maybe => tokens.append("?"),
}
trait_ref.to_tokens(tokens);
}
}
}
}
impl ToTokens for WhereClause {
fn to_tokens(&self, tokens: &mut Tokens) {
if !self.predicates.is_empty() {
tokens.append("where");
tokens.append_separated(&self.predicates, ",");
}
}
}
impl ToTokens for WherePredicate {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
WherePredicate::BoundPredicate(ref predicate) => {
predicate.to_tokens(tokens);
}
WherePredicate::RegionPredicate(ref predicate) => {
predicate.to_tokens(tokens);
}
}
}
}
impl ToTokens for WhereBoundPredicate {
fn to_tokens(&self, tokens: &mut Tokens) {
if !self.bound_lifetimes.is_empty() {
tokens.append("for");
tokens.append("<");
tokens.append_separated(&self.bound_lifetimes, ",");
tokens.append(">");
}
self.bounded_ty.to_tokens(tokens);
if !self.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&self.bounds, "+");
}
}
}
impl ToTokens for WhereRegionPredicate {
fn to_tokens(&self, tokens: &mut Tokens) {
self.lifetime.to_tokens(tokens);
if !self.bounds.is_empty() {
tokens.append(":");
tokens.append_separated(&self.bounds, "+");
}
}
}
}

129
third_party/rust/syn-0.10.8/src/ident.rs vendored Normal file
View File

@ -0,0 +1,129 @@
use std::borrow::Cow;
use std::fmt::{self, Display};
#[derive(Debug, Clone, Eq, Hash)]
pub struct Ident(String);
impl Ident {
pub fn new<T: Into<Ident>>(t: T) -> Self {
t.into()
}
}
impl<'a> From<&'a str> for Ident {
fn from(s: &str) -> Self {
Ident(s.to_owned())
}
}
impl<'a> From<Cow<'a, str>> for Ident {
fn from(s: Cow<'a, str>) -> Self {
Ident(s.into_owned())
}
}
impl From<String> for Ident {
fn from(s: String) -> Self {
Ident(s)
}
}
impl From<usize> for Ident {
fn from(u: usize) -> Self {
Ident(u.to_string())
}
}
impl AsRef<str> for Ident {
fn as_ref(&self) -> &str {
&self.0
}
}
impl Display for Ident {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
self.0.fmt(formatter)
}
}
impl<T: ?Sized> PartialEq<T> for Ident
where T: AsRef<str>
{
fn eq(&self, other: &T) -> bool {
self.0 == other.as_ref()
}
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use nom::IResult;
use space::skip_whitespace;
use unicode_xid::UnicodeXID;
pub fn ident(input: &str) -> IResult<&str, Ident> {
let (rest, id) = match word(input) {
IResult::Done(rest, id) => (rest, id),
IResult::Error => return IResult::Error,
};
match id.as_ref() {
// From https://doc.rust-lang.org/grammar.html#keywords
"abstract" | "alignof" | "as" | "become" | "box" | "break" | "const" | "continue" |
"crate" | "do" | "else" | "enum" | "extern" | "false" | "final" | "fn" | "for" |
"if" | "impl" | "in" | "let" | "loop" | "macro" | "match" | "mod" | "move" |
"mut" | "offsetof" | "override" | "priv" | "proc" | "pub" | "pure" | "ref" |
"return" | "Self" | "self" | "sizeof" | "static" | "struct" | "super" | "trait" |
"true" | "type" | "typeof" | "unsafe" | "unsized" | "use" | "virtual" | "where" |
"while" | "yield" => IResult::Error,
_ => IResult::Done(rest, id),
}
}
pub fn word(mut input: &str) -> IResult<&str, Ident> {
input = skip_whitespace(input);
let mut chars = input.char_indices();
match chars.next() {
Some((_, ch)) if UnicodeXID::is_xid_start(ch) || ch == '_' => {}
_ => return IResult::Error,
}
while let Some((i, ch)) = chars.next() {
if !UnicodeXID::is_xid_continue(ch) {
return IResult::Done(&input[i..], input[..i].into());
}
}
IResult::Done("", input.into())
}
#[cfg(feature = "full")]
pub fn wordlike(mut input: &str) -> IResult<&str, Ident> {
input = skip_whitespace(input);
for (i, ch) in input.char_indices() {
if !UnicodeXID::is_xid_start(ch) && !UnicodeXID::is_xid_continue(ch) {
return if i == 0 {
IResult::Error
} else {
IResult::Done(&input[i..], input[..i].into())
};
}
}
IResult::Done("", input.into())
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for Ident {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.as_ref())
}
}
}

1477
third_party/rust/syn-0.10.8/src/item.rs vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,57 @@
use super::*;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Crate {
pub shebang: Option<String>,
pub attrs: Vec<Attribute>,
pub items: Vec<Item>,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use attr::parsing::inner_attr;
use item::parsing::items;
named!(pub krate -> Crate, do_parse!(
option!(byte_order_mark) >>
shebang: option!(shebang) >>
attrs: many0!(inner_attr) >>
items: items >>
(Crate {
shebang: shebang,
attrs: attrs,
items: items,
})
));
named!(byte_order_mark -> &str, tag!("\u{feff}"));
named!(shebang -> String, do_parse!(
tag!("#!") >>
not!(peek!(tag!("["))) >>
content: take_until!("\n") >>
(format!("#!{}", content))
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use quote::{Tokens, ToTokens};
impl ToTokens for Crate {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Some(ref shebang) = self.shebang {
tokens.append(&format!("{}\n", shebang));
}
for attr in self.attrs.inner() {
attr.to_tokens(tokens);
}
for item in &self.items {
item.to_tokens(tokens);
}
}
}
}

170
third_party/rust/syn-0.10.8/src/lib.rs vendored Normal file
View File

@ -0,0 +1,170 @@
#![cfg_attr(feature = "clippy", feature(plugin))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#[cfg(feature = "printing")]
#[macro_use]
extern crate quote;
#[cfg(feature = "pretty")]
extern crate syntex_syntax as syntax;
#[cfg(feature = "parsing")]
extern crate unicode_xid;
#[cfg(feature = "parsing")]
#[macro_use]
mod nom;
#[cfg(feature = "parsing")]
#[macro_use]
mod helper;
#[cfg(feature = "aster")]
pub mod aster;
mod attr;
pub use attr::{Attribute, AttrStyle, MetaItem, NestedMetaItem};
mod constant;
pub use constant::ConstExpr;
mod data;
pub use data::{Field, Variant, VariantData, Visibility};
#[cfg(feature = "parsing")]
mod escape;
#[cfg(feature = "full")]
mod expr;
#[cfg(feature = "full")]
pub use expr::{Arm, BindingMode, Block, BlockCheckMode, CaptureBy, Expr, ExprKind, FieldPat,
FieldValue, Local, MacStmtStyle, Pat, RangeLimits, Stmt};
mod generics;
pub use generics::{Generics, Lifetime, LifetimeDef, TraitBoundModifier, TyParam, TyParamBound,
WhereBoundPredicate, WhereClause, WherePredicate, WhereRegionPredicate};
#[cfg(feature = "printing")]
pub use generics::{ImplGenerics, TyGenerics};
mod ident;
pub use ident::Ident;
#[cfg(feature = "full")]
mod item;
#[cfg(feature = "full")]
pub use item::{Constness, Defaultness, FnArg, FnDecl, ForeignItemKind, ForeignItem, ForeignMod,
ImplItem, ImplItemKind, ImplPolarity, Item, ItemKind, MethodSig, PathListItem,
TraitItem, TraitItemKind, ViewPath};
#[cfg(feature = "full")]
mod krate;
#[cfg(feature = "full")]
pub use krate::Crate;
mod lit;
pub use lit::{FloatTy, IntTy, Lit, StrStyle};
#[cfg(feature = "full")]
mod mac;
#[cfg(feature = "full")]
pub use mac::{BinOpToken, DelimToken, Delimited, Mac, Token, TokenTree};
mod macro_input;
pub use macro_input::{Body, MacroInput};
mod op;
pub use op::{BinOp, UnOp};
#[cfg(feature = "expand")]
mod registry;
#[cfg(feature = "expand")]
pub use registry::{CustomDerive, Expanded, Registry};
#[cfg(feature = "parsing")]
mod space;
mod ty;
pub use ty::{Abi, AngleBracketedParameterData, BareFnArg, BareFnTy, FunctionRetTy, MutTy,
Mutability, ParenthesizedParameterData, Path, PathParameters, PathSegment,
PolyTraitRef, QSelf, Ty, TypeBinding, Unsafety};
#[cfg(feature = "visit")]
pub mod visit;
#[cfg(feature = "parsing")]
pub use parsing::*;
#[cfg(feature = "parsing")]
mod parsing {
use super::*;
use {generics, ident, macro_input, space, ty};
use nom::IResult;
#[cfg(feature = "full")]
use {expr, item, krate, mac};
pub fn parse_macro_input(input: &str) -> Result<MacroInput, String> {
unwrap("macro input", macro_input::parsing::macro_input, input)
}
#[cfg(feature = "full")]
pub fn parse_crate(input: &str) -> Result<Crate, String> {
unwrap("crate", krate::parsing::krate, input)
}
#[cfg(feature = "full")]
pub fn parse_item(input: &str) -> Result<Item, String> {
unwrap("item", item::parsing::item, input)
}
#[cfg(feature = "full")]
pub fn parse_items(input: &str) -> Result<Vec<Item>, String> {
unwrap("items", item::parsing::items, input)
}
#[cfg(feature = "full")]
pub fn parse_expr(input: &str) -> Result<Expr, String> {
unwrap("expression", expr::parsing::expr, input)
}
pub fn parse_type(input: &str) -> Result<Ty, String> {
unwrap("type", ty::parsing::ty, input)
}
pub fn parse_path(input: &str) -> Result<Path, String> {
unwrap("path", ty::parsing::path, input)
}
pub fn parse_where_clause(input: &str) -> Result<WhereClause, String> {
unwrap("where clause", generics::parsing::where_clause, input)
}
#[cfg(feature = "full")]
pub fn parse_token_trees(input: &str) -> Result<Vec<TokenTree>, String> {
unwrap("token trees", mac::parsing::token_trees, input)
}
pub fn parse_ident(input: &str) -> Result<Ident, String> {
unwrap("identifier", ident::parsing::ident, input)
}
fn unwrap<T>(name: &'static str,
f: fn(&str) -> IResult<&str, T>,
input: &str)
-> Result<T, String> {
match f(input) {
IResult::Done(mut rest, t) => {
rest = space::skip_whitespace(rest);
if rest.is_empty() {
Ok(t)
} else if rest.len() == input.len() {
// parsed nothing
Err(format!("failed to parse {}: {:?}", name, rest))
} else {
Err(format!("failed to parse tokens after {}: {:?}", name, rest))
}
}
IResult::Error => Err(format!("failed to parse {}: {:?}", name, input)),
}
}
}

455
third_party/rust/syn-0.10.8/src/lit.rs vendored Normal file
View File

@ -0,0 +1,455 @@
/// Literal kind.
///
/// E.g. `"foo"`, `42`, `12.34` or `bool`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Lit {
/// A string literal (`"foo"`)
Str(String, StrStyle),
/// A byte string (`b"foo"`)
ByteStr(Vec<u8>, StrStyle),
/// A byte char (`b'f'`)
Byte(u8),
/// A character literal (`'a'`)
Char(char),
/// An integer literal (`1`)
Int(u64, IntTy),
/// A float literal (`1f64` or `1E10f64` or `1.0E10`)
Float(String, FloatTy),
/// A boolean literal
Bool(bool),
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum StrStyle {
/// A regular string, like `"foo"`
Cooked,
/// A raw string, like `r##"foo"##`
///
/// The uint is the number of `#` symbols used
Raw(usize),
}
impl From<String> for Lit {
fn from(input: String) -> Lit {
Lit::Str(input, StrStyle::Cooked)
}
}
impl<'a> From<&'a str> for Lit {
fn from(input: &str) -> Lit {
Lit::Str(input.into(), StrStyle::Cooked)
}
}
impl From<Vec<u8>> for Lit {
fn from(input: Vec<u8>) -> Lit {
Lit::ByteStr(input, StrStyle::Cooked)
}
}
impl<'a> From<&'a [u8]> for Lit {
fn from(input: &[u8]) -> Lit {
Lit::ByteStr(input.into(), StrStyle::Cooked)
}
}
impl From<char> for Lit {
fn from(input: char) -> Lit {
Lit::Char(input)
}
}
impl From<bool> for Lit {
fn from(input: bool) -> Lit {
Lit::Bool(input)
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum IntTy {
Isize,
I8,
I16,
I32,
I64,
Usize,
U8,
U16,
U32,
U64,
Unsuffixed,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum FloatTy {
F32,
F64,
Unsuffixed,
}
macro_rules! impl_from_for_lit {
(Int, [$($rust_type:ty => $syn_type:expr),+]) => {
$(
impl From<$rust_type> for Lit {
fn from(input: $rust_type) -> Lit {
Lit::Int(input as u64, $syn_type)
}
}
)+
};
(Float, [$($rust_type:ty => $syn_type:expr),+]) => {
$(
impl From<$rust_type> for Lit {
fn from(input: $rust_type) -> Lit {
Lit::Float(format!("{}", input), $syn_type)
}
}
)+
};
}
impl_from_for_lit! {Int, [
isize => IntTy::Isize,
i8 => IntTy::I8,
i16 => IntTy::I16,
i32 => IntTy::I32,
i64 => IntTy::I64,
usize => IntTy::Usize,
u8 => IntTy::U8,
u16 => IntTy::U16,
u32 => IntTy::U32,
u64 => IntTy::U64
]}
impl_from_for_lit! {Float, [
f32 => FloatTy::F32,
f64 => FloatTy::F64
]}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use escape::{cooked_byte, cooked_byte_string, cooked_char, cooked_string, raw_string};
use space::skip_whitespace;
use nom::IResult;
use unicode_xid::UnicodeXID;
named!(pub lit -> Lit, alt!(
string
|
byte_string
|
byte
|
character
|
float // must be before int
|
int => { |(value, ty)| Lit::Int(value, ty) }
|
boolean
));
named!(string -> Lit, alt!(
quoted_string => { |s| Lit::Str(s, StrStyle::Cooked) }
|
preceded!(
punct!("r"),
raw_string
) => { |(s, n)| Lit::Str(s, StrStyle::Raw(n)) }
));
named!(pub quoted_string -> String, delimited!(
punct!("\""),
cooked_string,
tag!("\"")
));
named!(byte_string -> Lit, alt!(
delimited!(
punct!("b\""),
cooked_byte_string,
tag!("\"")
) => { |vec| Lit::ByteStr(vec, StrStyle::Cooked) }
|
preceded!(
punct!("br"),
raw_string
) => { |(s, n): (String, _)| Lit::ByteStr(s.into_bytes(), StrStyle::Raw(n)) }
));
named!(byte -> Lit, do_parse!(
punct!("b") >>
tag!("'") >>
b: cooked_byte >>
tag!("'") >>
(Lit::Byte(b))
));
named!(character -> Lit, do_parse!(
punct!("'") >>
ch: cooked_char >>
tag!("'") >>
(Lit::Char(ch))
));
named!(float -> Lit, do_parse!(
value: float_string >>
suffix: alt!(
tag!("f32") => { |_| FloatTy::F32 }
|
tag!("f64") => { |_| FloatTy::F64 }
|
epsilon!() => { |_| FloatTy::Unsuffixed }
) >>
(Lit::Float(value, suffix))
));
named!(pub int -> (u64, IntTy), tuple!(
digits,
alt!(
tag!("isize") => { |_| IntTy::Isize }
|
tag!("i8") => { |_| IntTy::I8 }
|
tag!("i16") => { |_| IntTy::I16 }
|
tag!("i32") => { |_| IntTy::I32 }
|
tag!("i64") => { |_| IntTy::I64 }
|
tag!("usize") => { |_| IntTy::Usize }
|
tag!("u8") => { |_| IntTy::U8 }
|
tag!("u16") => { |_| IntTy::U16 }
|
tag!("u32") => { |_| IntTy::U32 }
|
tag!("u64") => { |_| IntTy::U64 }
|
epsilon!() => { |_| IntTy::Unsuffixed }
)
));
named!(boolean -> Lit, alt!(
keyword!("true") => { |_| Lit::Bool(true) }
|
keyword!("false") => { |_| Lit::Bool(false) }
));
fn float_string(mut input: &str) -> IResult<&str, String> {
input = skip_whitespace(input);
let mut chars = input.chars().peekable();
match chars.next() {
Some(ch) if ch >= '0' && ch <= '9' => {}
_ => return IResult::Error,
}
let mut len = 1;
let mut has_dot = false;
let mut has_exp = false;
while let Some(&ch) = chars.peek() {
match ch {
'0'...'9' | '_' => {
chars.next();
len += 1;
}
'.' => {
if has_dot {
break;
}
chars.next();
if chars.peek()
.map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
.unwrap_or(false) {
return IResult::Error;
}
len += 1;
has_dot = true;
}
'e' | 'E' => {
chars.next();
len += 1;
has_exp = true;
break;
}
_ => break,
}
}
let rest = &input[len..];
if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
return IResult::Error;
}
if has_exp {
let mut has_exp_value = false;
while let Some(&ch) = chars.peek() {
match ch {
'+' | '-' => {
if has_exp_value {
break;
}
chars.next();
len += 1;
}
'0'...'9' => {
chars.next();
len += 1;
has_exp_value = true;
}
'_' => {
chars.next();
len += 1;
}
_ => break,
}
}
if !has_exp_value {
return IResult::Error;
}
}
IResult::Done(&input[len..], input[..len].replace("_", ""))
}
pub fn digits(mut input: &str) -> IResult<&str, u64> {
input = skip_whitespace(input);
let base = if input.starts_with("0x") {
input = &input[2..];
16
} else if input.starts_with("0o") {
input = &input[2..];
8
} else if input.starts_with("0b") {
input = &input[2..];
2
} else {
10
};
let mut value = 0u64;
let mut len = 0;
let mut empty = true;
for b in input.bytes() {
let digit = match b {
b'0'...b'9' => (b - b'0') as u64,
b'a'...b'f' => 10 + (b - b'a') as u64,
b'A'...b'F' => 10 + (b - b'A') as u64,
b'_' => {
if empty && base == 10 {
return IResult::Error;
}
len += 1;
continue;
}
_ => break,
};
if digit >= base {
return IResult::Error;
}
value = match value.checked_mul(base) {
Some(value) => value,
None => return IResult::Error,
};
value = match value.checked_add(digit) {
Some(value) => value,
None => return IResult::Error,
};
len += 1;
empty = false;
}
if empty {
IResult::Error
} else {
IResult::Done(&input[len..], value)
}
}
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
use std::{ascii, iter};
use std::fmt::{self, Display};
use std::str;
impl ToTokens for Lit {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Lit::Str(ref s, StrStyle::Cooked) => s.to_tokens(tokens),
Lit::Str(ref s, StrStyle::Raw(n)) => {
tokens.append(&format!("r{delim}\"{string}\"{delim}",
delim = iter::repeat("#").take(n).collect::<String>(),
string = s));
}
Lit::ByteStr(ref v, StrStyle::Cooked) => {
let mut escaped = "b\"".to_string();
for &ch in v.iter() {
match ch {
0 => escaped.push_str(r"\0"),
b'\'' => escaped.push('\''),
_ => escaped.extend(ascii::escape_default(ch).map(|c| c as char)),
}
}
escaped.push('"');
tokens.append(&escaped);
}
Lit::ByteStr(ref vec, StrStyle::Raw(n)) => {
tokens.append(&format!("br{delim}\"{string}\"{delim}",
delim = iter::repeat("#").take(n).collect::<String>(),
string = str::from_utf8(vec).unwrap()));
}
Lit::Byte(b) => {
match b {
0 => tokens.append(r"b'\0'"),
b'\"' => tokens.append("b'\"'"),
_ => {
let mut escaped = "b'".to_string();
escaped.extend(ascii::escape_default(b).map(|c| c as char));
escaped.push('\'');
tokens.append(&escaped);
}
}
}
Lit::Char(ch) => ch.to_tokens(tokens),
Lit::Int(value, ty) => tokens.append(&format!("{}{}", value, ty)),
Lit::Float(ref value, ty) => tokens.append(&format!("{}{}", value, ty)),
Lit::Bool(true) => tokens.append("true"),
Lit::Bool(false) => tokens.append("false"),
}
}
}
impl Display for IntTy {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
IntTy::Isize => formatter.write_str("isize"),
IntTy::I8 => formatter.write_str("i8"),
IntTy::I16 => formatter.write_str("i16"),
IntTy::I32 => formatter.write_str("i32"),
IntTy::I64 => formatter.write_str("i64"),
IntTy::Usize => formatter.write_str("usize"),
IntTy::U8 => formatter.write_str("u8"),
IntTy::U16 => formatter.write_str("u16"),
IntTy::U32 => formatter.write_str("u32"),
IntTy::U64 => formatter.write_str("u64"),
IntTy::Unsuffixed => Ok(()),
}
}
}
impl Display for FloatTy {
fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
FloatTy::F32 => formatter.write_str("f32"),
FloatTy::F64 => formatter.write_str("f64"),
FloatTy::Unsuffixed => Ok(()),
}
}
}
}

429
third_party/rust/syn-0.10.8/src/mac.rs vendored Normal file
View File

@ -0,0 +1,429 @@
use super::*;
/// Represents a macro invocation. The Path indicates which macro
/// is being invoked, and the vector of token-trees contains the source
/// of the macro invocation.
///
/// NB: the additional ident for a `macro_rules`-style macro is actually
/// stored in the enclosing item. Oog.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Mac {
pub path: Path,
pub tts: Vec<TokenTree>,
}
/// When the main rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token-tree. This is a very
/// loose structure, such that all sorts of different AST-fragments can
/// be passed to syntax extensions using a uniform type.
///
/// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS token tree against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured
/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
///
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum TokenTree {
/// A single token
Token(Token),
/// A delimited sequence of token trees
Delimited(Delimited),
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Delimited {
/// The type of delimiter
pub delim: DelimToken,
/// The delimited sequence of token trees
pub tts: Vec<TokenTree>,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Token {
// Expression-operator symbols.
Eq,
Lt,
Le,
EqEq,
Ne,
Ge,
Gt,
AndAnd,
OrOr,
Not,
Tilde,
BinOp(BinOpToken),
BinOpEq(BinOpToken),
// Structural symbols
At,
Dot,
DotDot,
DotDotDot,
Comma,
Semi,
Colon,
ModSep,
RArrow,
LArrow,
FatArrow,
Pound,
Dollar,
Question,
// Literals
Literal(Lit),
// Name components
Ident(Ident),
Underscore,
Lifetime(Ident),
DocComment(String),
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum BinOpToken {
Plus,
Minus,
Star,
Slash,
Percent,
Caret,
And,
Or,
Shl,
Shr,
}
/// A delimiter token
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum DelimToken {
/// A round parenthesis: `(` or `)`
Paren,
/// A square bracket: `[` or `]`
Bracket,
/// A curly brace: `{` or `}`
Brace,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use Lifetime;
use generics::parsing::lifetime;
use ident::parsing::{ident, word};
use lit::parsing::lit;
use space::{block_comment, whitespace};
named!(pub mac -> Mac, do_parse!(
name: ident >>
punct!("!") >>
body: delimited >>
(Mac {
path: name.into(),
tts: vec![TokenTree::Delimited(body)],
})
));
named!(pub token_trees -> Vec<TokenTree>, many0!(token_tree));
named!(pub delimited -> Delimited, alt!(
delimited!(
punct!("("),
token_trees,
punct!(")")
) => { |tts| Delimited { delim: DelimToken::Paren, tts: tts } }
|
delimited!(
punct!("["),
token_trees,
punct!("]")
) => { |tts| Delimited { delim: DelimToken::Bracket, tts: tts } }
|
delimited!(
punct!("{"),
token_trees,
punct!("}")
) => { |tts| Delimited { delim: DelimToken::Brace, tts: tts } }
));
named!(token_tree -> TokenTree, alt!(
map!(token, TokenTree::Token)
|
map!(delimited, TokenTree::Delimited)
));
named!(token -> Token, alt!(
keyword!("_") => { |_| Token::Underscore }
|
punct!("&&") => { |_| Token::AndAnd } // must be before BinOp
|
punct!("||") => { |_| Token::OrOr } // must be before BinOp
|
punct!("->") => { |_| Token::RArrow } // must be before BinOp
|
punct!("<-") => { |_| Token::LArrow } // must be before Lt
|
punct!("=>") => { |_| Token::FatArrow } // must be before Eq
|
punct!("...") => { |_| Token::DotDotDot } // must be before DotDot
|
punct!("..") => { |_| Token::DotDot } // must be before Dot
|
punct!(".") => { |_| Token::Dot }
|
map!(doc_comment, Token::DocComment) // must be before bin_op
|
map!(bin_op_eq, Token::BinOpEq) // must be before bin_op
|
map!(bin_op, Token::BinOp)
|
map!(lit, Token::Literal)
|
map!(word, Token::Ident)
|
map!(lifetime, |lt: Lifetime| Token::Lifetime(lt.ident))
|
punct!("<=") => { |_| Token::Le }
|
punct!("==") => { |_| Token::EqEq }
|
punct!("!=") => { |_| Token::Ne }
|
punct!(">=") => { |_| Token::Ge }
|
punct!("::") => { |_| Token::ModSep }
|
punct!("=") => { |_| Token::Eq }
|
punct!("<") => { |_| Token::Lt }
|
punct!(">") => { |_| Token::Gt }
|
punct!("!") => { |_| Token::Not }
|
punct!("~") => { |_| Token::Tilde }
|
punct!("@") => { |_| Token::At }
|
punct!(",") => { |_| Token::Comma }
|
punct!(";") => { |_| Token::Semi }
|
punct!(":") => { |_| Token::Colon }
|
punct!("#") => { |_| Token::Pound }
|
punct!("$") => { |_| Token::Dollar }
|
punct!("?") => { |_| Token::Question }
));
named!(bin_op -> BinOpToken, alt!(
punct!("+") => { |_| BinOpToken::Plus }
|
punct!("-") => { |_| BinOpToken::Minus }
|
punct!("*") => { |_| BinOpToken::Star }
|
punct!("/") => { |_| BinOpToken::Slash }
|
punct!("%") => { |_| BinOpToken::Percent }
|
punct!("^") => { |_| BinOpToken::Caret }
|
punct!("&") => { |_| BinOpToken::And }
|
punct!("|") => { |_| BinOpToken::Or }
|
punct!("<<") => { |_| BinOpToken::Shl }
|
punct!(">>") => { |_| BinOpToken::Shr }
));
named!(bin_op_eq -> BinOpToken, alt!(
punct!("+=") => { |_| BinOpToken::Plus }
|
punct!("-=") => { |_| BinOpToken::Minus }
|
punct!("*=") => { |_| BinOpToken::Star }
|
punct!("/=") => { |_| BinOpToken::Slash }
|
punct!("%=") => { |_| BinOpToken::Percent }
|
punct!("^=") => { |_| BinOpToken::Caret }
|
punct!("&=") => { |_| BinOpToken::And }
|
punct!("|=") => { |_| BinOpToken::Or }
|
punct!("<<=") => { |_| BinOpToken::Shl }
|
punct!(">>=") => { |_| BinOpToken::Shr }
));
named!(doc_comment -> String, alt!(
do_parse!(
punct!("//!") >>
content: take_until!("\n") >>
(format!("//!{}", content))
)
|
do_parse!(
option!(whitespace) >>
peek!(tag!("/*!")) >>
com: block_comment >>
(com.to_owned())
)
|
do_parse!(
punct!("///") >>
not!(peek!(tag!("/"))) >>
content: take_until!("\n") >>
(format!("///{}", content))
)
|
do_parse!(
option!(whitespace) >>
peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
com: block_comment >>
(com.to_owned())
)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for Mac {
fn to_tokens(&self, tokens: &mut Tokens) {
self.path.to_tokens(tokens);
tokens.append("!");
for tt in &self.tts {
tt.to_tokens(tokens);
}
}
}
impl ToTokens for TokenTree {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
TokenTree::Token(ref token) => token.to_tokens(tokens),
TokenTree::Delimited(ref delimited) => delimited.to_tokens(tokens),
}
}
}
impl DelimToken {
fn open(&self) -> &'static str {
match *self {
DelimToken::Paren => "(",
DelimToken::Bracket => "[",
DelimToken::Brace => "{",
}
}
fn close(&self) -> &'static str {
match *self {
DelimToken::Paren => ")",
DelimToken::Bracket => "]",
DelimToken::Brace => "}",
}
}
}
impl ToTokens for Delimited {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.delim.open());
for tt in &self.tts {
tt.to_tokens(tokens);
}
tokens.append(self.delim.close());
}
}
impl ToTokens for Token {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Token::Eq => tokens.append("="),
Token::Lt => tokens.append("<"),
Token::Le => tokens.append("<="),
Token::EqEq => tokens.append("=="),
Token::Ne => tokens.append("!="),
Token::Ge => tokens.append(">="),
Token::Gt => tokens.append(">"),
Token::AndAnd => tokens.append("&&"),
Token::OrOr => tokens.append("||"),
Token::Not => tokens.append("!"),
Token::Tilde => tokens.append("~"),
Token::BinOp(binop) => tokens.append(binop.op()),
Token::BinOpEq(binop) => tokens.append(binop.assign_op()),
Token::At => tokens.append("@"),
Token::Dot => tokens.append("."),
Token::DotDot => tokens.append(".."),
Token::DotDotDot => tokens.append("..."),
Token::Comma => tokens.append(","),
Token::Semi => tokens.append(";"),
Token::Colon => tokens.append(":"),
Token::ModSep => tokens.append("::"),
Token::RArrow => tokens.append("->"),
Token::LArrow => tokens.append("<-"),
Token::FatArrow => tokens.append("=>"),
Token::Pound => tokens.append("#"),
Token::Dollar => tokens.append("$"),
Token::Question => tokens.append("?"),
Token::Literal(ref lit) => lit.to_tokens(tokens),
Token::Ident(ref ident) |
Token::Lifetime(ref ident) => ident.to_tokens(tokens),
Token::Underscore => tokens.append("_"),
Token::DocComment(ref com) => {
tokens.append(&format!("{}\n", com));
}
}
}
}
impl BinOpToken {
fn op(&self) -> &'static str {
match *self {
BinOpToken::Plus => "+",
BinOpToken::Minus => "-",
BinOpToken::Star => "*",
BinOpToken::Slash => "/",
BinOpToken::Percent => "%",
BinOpToken::Caret => "^",
BinOpToken::And => "&",
BinOpToken::Or => "|",
BinOpToken::Shl => "<<",
BinOpToken::Shr => ">>",
}
}
fn assign_op(&self) -> &'static str {
match *self {
BinOpToken::Plus => "+=",
BinOpToken::Minus => "-=",
BinOpToken::Star => "*=",
BinOpToken::Slash => "/=",
BinOpToken::Percent => "%=",
BinOpToken::Caret => "^=",
BinOpToken::And => "&=",
BinOpToken::Or => "|=",
BinOpToken::Shl => "<<=",
BinOpToken::Shr => ">>=",
}
}
}
impl ToTokens for BinOpToken {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.op());
}
}
}

192
third_party/rust/syn-0.10.8/src/op.rs vendored Normal file
View File

@ -0,0 +1,192 @@
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum BinOp {
/// The `+` operator (addition)
Add,
/// The `-` operator (subtraction)
Sub,
/// The `*` operator (multiplication)
Mul,
/// The `/` operator (division)
Div,
/// The `%` operator (modulus)
Rem,
/// The `&&` operator (logical and)
And,
/// The `||` operator (logical or)
Or,
/// The `^` operator (bitwise xor)
BitXor,
/// The `&` operator (bitwise and)
BitAnd,
/// The `|` operator (bitwise or)
BitOr,
/// The `<<` operator (shift left)
Shl,
/// The `>>` operator (shift right)
Shr,
/// The `==` operator (equality)
Eq,
/// The `<` operator (less than)
Lt,
/// The `<=` operator (less than or equal to)
Le,
/// The `!=` operator (not equal to)
Ne,
/// The `>=` operator (greater than or equal to)
Ge,
/// The `>` operator (greater than)
Gt,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum UnOp {
/// The `*` operator for dereferencing
Deref,
/// The `!` operator for logical inversion
Not,
/// The `-` operator for negation
Neg,
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
named!(pub binop -> BinOp, alt!(
punct!("&&") => { |_| BinOp::And }
|
punct!("||") => { |_| BinOp::Or }
|
punct!("<<") => { |_| BinOp::Shl }
|
punct!(">>") => { |_| BinOp::Shr }
|
punct!("==") => { |_| BinOp::Eq }
|
punct!("<=") => { |_| BinOp::Le }
|
punct!("!=") => { |_| BinOp::Ne }
|
punct!(">=") => { |_| BinOp::Ge }
|
punct!("+") => { |_| BinOp::Add }
|
punct!("-") => { |_| BinOp::Sub }
|
punct!("*") => { |_| BinOp::Mul }
|
punct!("/") => { |_| BinOp::Div }
|
punct!("%") => { |_| BinOp::Rem }
|
punct!("^") => { |_| BinOp::BitXor }
|
punct!("&") => { |_| BinOp::BitAnd }
|
punct!("|") => { |_| BinOp::BitOr }
|
punct!("<") => { |_| BinOp::Lt }
|
punct!(">") => { |_| BinOp::Gt }
));
#[cfg(feature = "full")]
named!(pub assign_op -> BinOp, alt!(
punct!("+=") => { |_| BinOp::Add }
|
punct!("-=") => { |_| BinOp::Sub }
|
punct!("*=") => { |_| BinOp::Mul }
|
punct!("/=") => { |_| BinOp::Div }
|
punct!("%=") => { |_| BinOp::Rem }
|
punct!("^=") => { |_| BinOp::BitXor }
|
punct!("&=") => { |_| BinOp::BitAnd }
|
punct!("|=") => { |_| BinOp::BitOr }
|
punct!("<<=") => { |_| BinOp::Shl }
|
punct!(">>=") => { |_| BinOp::Shr }
));
named!(pub unop -> UnOp, alt!(
punct!("*") => { |_| UnOp::Deref }
|
punct!("!") => { |_| UnOp::Not }
|
punct!("-") => { |_| UnOp::Neg }
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl BinOp {
pub fn op(&self) -> &'static str {
match *self {
BinOp::Add => "+",
BinOp::Sub => "-",
BinOp::Mul => "*",
BinOp::Div => "/",
BinOp::Rem => "%",
BinOp::And => "&&",
BinOp::Or => "||",
BinOp::BitXor => "^",
BinOp::BitAnd => "&",
BinOp::BitOr => "|",
BinOp::Shl => "<<",
BinOp::Shr => ">>",
BinOp::Eq => "==",
BinOp::Lt => "<",
BinOp::Le => "<=",
BinOp::Ne => "!=",
BinOp::Ge => ">=",
BinOp::Gt => ">",
}
}
pub fn assign_op(&self) -> Option<&'static str> {
match *self {
BinOp::Add => Some("+="),
BinOp::Sub => Some("-="),
BinOp::Mul => Some("*="),
BinOp::Div => Some("/="),
BinOp::Rem => Some("%="),
BinOp::BitXor => Some("^="),
BinOp::BitAnd => Some("&="),
BinOp::BitOr => Some("|="),
BinOp::Shl => Some("<<="),
BinOp::Shr => Some(">>="),
_ => None,
}
}
}
impl ToTokens for BinOp {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.op());
}
}
impl UnOp {
pub fn op(&self) -> &'static str {
match *self {
UnOp::Deref => "*",
UnOp::Not => "!",
UnOp::Neg => "-",
}
}
}
impl ToTokens for UnOp {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append(self.op());
}
}
}

826
third_party/rust/syn-0.10.8/src/ty.rs vendored Normal file
View File

@ -0,0 +1,826 @@
use super::*;
/// The different kinds of types recognized by the compiler
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Ty {
/// A variable-length array (`[T]`)
Slice(Box<Ty>),
/// A fixed length array (`[T; n]`)
Array(Box<Ty>, ConstExpr),
/// A raw pointer (`*const T` or `*mut T`)
Ptr(Box<MutTy>),
/// A reference (`&'a T` or `&'a mut T`)
Rptr(Option<Lifetime>, Box<MutTy>),
/// A bare function (e.g. `fn(usize) -> bool`)
BareFn(Box<BareFnTy>),
/// The never type (`!`)
Never,
/// A tuple (`(A, B, C, D, ...)`)
Tup(Vec<Ty>),
/// A path (`module::module::...::Type`), optionally
/// "qualified", e.g. `<Vec<T> as SomeTrait>::SomeType`.
///
/// Type parameters are stored in the Path itself
Path(Option<QSelf>, Path),
/// Something like `A+B`. Note that `B` must always be a path.
ObjectSum(Box<Ty>, Vec<TyParamBound>),
/// A type like `for<'a> Foo<&'a Bar>`
PolyTraitRef(Vec<TyParamBound>),
/// An `impl TraitA+TraitB` type.
ImplTrait(Vec<TyParamBound>),
/// No-op; kept solely so that we can pretty-print faithfully
Paren(Box<Ty>),
/// TyKind::Infer means the type should be inferred instead of it having been
/// specified. This can appear anywhere in a type.
Infer,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct MutTy {
pub ty: Ty,
pub mutability: Mutability,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Mutability {
Mutable,
Immutable,
}
/// A "Path" is essentially Rust's notion of a name.
///
/// It's represented as a sequence of identifiers,
/// along with a bunch of supporting information.
///
/// E.g. `std::cmp::PartialEq`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Path {
pub global: bool,
pub segments: Vec<PathSegment>,
}
impl<T> From<T> for Path
where T: Into<PathSegment>
{
fn from(segment: T) -> Self {
Path {
global: false,
segments: vec![segment.into()],
}
}
}
/// A segment of a path: an identifier, an optional lifetime, and a set of types.
///
/// E.g. `std`, `String` or `Box<T>`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PathSegment {
pub ident: Ident,
pub parameters: PathParameters,
}
impl<T> From<T> for PathSegment
where T: Into<Ident>
{
fn from(ident: T) -> Self {
PathSegment {
ident: ident.into(),
parameters: PathParameters::none(),
}
}
}
/// Parameters of a path segment.
///
/// E.g. `<A, B>` as in `Foo<A, B>` or `(A, B)` as in `Foo(A, B)`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum PathParameters {
/// The `<'a, A, B, C>` in `foo::bar::baz::<'a, A, B, C>`
AngleBracketed(AngleBracketedParameterData),
/// The `(A, B)` and `C` in `Foo(A, B) -> C`
Parenthesized(ParenthesizedParameterData),
}
impl PathParameters {
pub fn none() -> Self {
PathParameters::AngleBracketed(AngleBracketedParameterData::default())
}
pub fn is_empty(&self) -> bool {
match *self {
PathParameters::AngleBracketed(ref bracketed) => {
bracketed.lifetimes.is_empty() && bracketed.types.is_empty() &&
bracketed.bindings.is_empty()
}
PathParameters::Parenthesized(_) => false,
}
}
}
/// A path like `Foo<'a, T>`
#[derive(Debug, Clone, Eq, PartialEq, Default, Hash)]
pub struct AngleBracketedParameterData {
/// The lifetime parameters for this path segment.
pub lifetimes: Vec<Lifetime>,
/// The type parameters for this path segment, if present.
pub types: Vec<Ty>,
/// Bindings (equality constraints) on associated types, if present.
///
/// E.g., `Foo<A=Bar>`.
pub bindings: Vec<TypeBinding>,
}
/// Bind a type to an associated type: `A=Foo`.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct TypeBinding {
pub ident: Ident,
pub ty: Ty,
}
/// A path like `Foo(A,B) -> C`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct ParenthesizedParameterData {
/// `(A, B)`
pub inputs: Vec<Ty>,
/// `C`
pub output: Option<Ty>,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PolyTraitRef {
/// The `'a` in `<'a> Foo<&'a T>`
pub bound_lifetimes: Vec<LifetimeDef>,
/// The `Foo<&'a T>` in `<'a> Foo<&'a T>`
pub trait_ref: Path,
}
/// The explicit Self type in a "qualified path". The actual
/// path, including the trait and the associated item, is stored
/// separately. `position` represents the index of the associated
/// item qualified with this Self type.
///
/// ```rust,ignore
/// <Vec<T> as a::b::Trait>::AssociatedItem
/// ^~~~~ ~~~~~~~~~~~~~~^
/// ty position = 3
///
/// <Vec<T>>::AssociatedItem
/// ^~~~~ ^
/// ty position = 0
/// ```
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct QSelf {
pub ty: Box<Ty>,
pub position: usize,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct BareFnTy {
pub unsafety: Unsafety,
pub abi: Option<Abi>,
pub lifetimes: Vec<LifetimeDef>,
pub inputs: Vec<BareFnArg>,
pub output: FunctionRetTy,
pub variadic: bool,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Unsafety {
Unsafe,
Normal,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Abi {
Named(String),
Rust,
}
/// An argument in a function type.
///
/// E.g. `bar: usize` as in `fn foo(bar: usize)`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct BareFnArg {
pub name: Option<Ident>,
pub ty: Ty,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum FunctionRetTy {
/// Return type is not specified.
///
/// Functions default to `()` and
/// closures default to inference. Span points to where return
/// type would be inserted.
Default,
/// Everything else
Ty(Ty),
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
#[cfg(feature = "full")]
use ConstExpr;
use constant::parsing::const_expr;
#[cfg(feature = "full")]
use expr::parsing::expr;
use generics::parsing::{lifetime, lifetime_def, ty_param_bound, bound_lifetimes};
use ident::parsing::ident;
use lit::parsing::quoted_string;
use std::str;
named!(pub ty -> Ty, alt!(
ty_paren // must be before ty_tup
|
ty_path // must be before ty_poly_trait_ref
|
ty_vec
|
ty_array
|
ty_ptr
|
ty_rptr
|
ty_bare_fn
|
ty_never
|
ty_tup
|
ty_poly_trait_ref
|
ty_impl_trait
));
named!(ty_vec -> Ty, do_parse!(
punct!("[") >>
elem: ty >>
punct!("]") >>
(Ty::Slice(Box::new(elem)))
));
#[cfg(not(feature = "full"))]
named!(ty_array -> Ty, do_parse!(
punct!("[") >>
elem: ty >>
punct!(";") >>
len: const_expr >>
punct!("]") >>
(Ty::Array(Box::new(elem), len))
));
#[cfg(feature = "full")]
named!(ty_array -> Ty, do_parse!(
punct!("[") >>
elem: ty >>
punct!(";") >>
len: alt!(
terminated!(const_expr, punct!("]"))
|
terminated!(expr, punct!("]")) => { ConstExpr::Other }
) >>
(Ty::Array(Box::new(elem), len))
));
named!(ty_ptr -> Ty, do_parse!(
punct!("*") >>
mutability: alt!(
keyword!("const") => { |_| Mutability::Immutable }
|
keyword!("mut") => { |_| Mutability::Mutable }
) >>
target: ty >>
(Ty::Ptr(Box::new(MutTy {
ty: target,
mutability: mutability,
})))
));
named!(ty_rptr -> Ty, do_parse!(
punct!("&") >>
life: option!(lifetime) >>
mutability: mutability >>
target: ty >>
(Ty::Rptr(life, Box::new(MutTy {
ty: target,
mutability: mutability,
})))
));
named!(ty_bare_fn -> Ty, do_parse!(
lifetimes: opt_vec!(do_parse!(
keyword!("for") >>
punct!("<") >>
lifetimes: terminated_list!(punct!(","), lifetime_def) >>
punct!(">") >>
(lifetimes)
)) >>
unsafety: unsafety >>
abi: option!(abi) >>
keyword!("fn") >>
punct!("(") >>
inputs: separated_list!(punct!(","), fn_arg) >>
trailing_comma: option!(punct!(",")) >>
variadic: option!(cond_reduce!(trailing_comma.is_some(), punct!("..."))) >>
punct!(")") >>
output: option!(preceded!(
punct!("->"),
ty
)) >>
(Ty::BareFn(Box::new(BareFnTy {
unsafety: unsafety,
abi: abi,
lifetimes: lifetimes,
inputs: inputs,
output: match output {
Some(ty) => FunctionRetTy::Ty(ty),
None => FunctionRetTy::Default,
},
variadic: variadic.is_some(),
})))
));
named!(ty_never -> Ty, map!(punct!("!"), |_| Ty::Never));
named!(ty_tup -> Ty, do_parse!(
punct!("(") >>
elems: terminated_list!(punct!(","), ty) >>
punct!(")") >>
(Ty::Tup(elems))
));
named!(ty_path -> Ty, do_parse!(
qpath: qpath >>
parenthesized: cond!(
qpath.1.segments.last().unwrap().parameters == PathParameters::none(),
option!(parenthesized_parameter_data)
) >>
bounds: many0!(preceded!(punct!("+"), ty_param_bound)) >>
({
let (qself, mut path) = qpath;
if let Some(Some(parenthesized)) = parenthesized {
path.segments.last_mut().unwrap().parameters = parenthesized;
}
let path = Ty::Path(qself, path);
if bounds.is_empty() {
path
} else {
Ty::ObjectSum(Box::new(path), bounds)
}
})
));
named!(parenthesized_parameter_data -> PathParameters, do_parse!(
punct!("(") >>
inputs: terminated_list!(punct!(","), ty) >>
punct!(")") >>
output: option!(preceded!(
punct!("->"),
ty
)) >>
(PathParameters::Parenthesized(
ParenthesizedParameterData {
inputs: inputs,
output: output,
},
))
));
named!(pub qpath -> (Option<QSelf>, Path), alt!(
map!(path, |p| (None, p))
|
do_parse!(
punct!("<") >>
this: map!(ty, Box::new) >>
path: option!(preceded!(
keyword!("as"),
path
)) >>
punct!(">") >>
punct!("::") >>
rest: separated_nonempty_list!(punct!("::"), path_segment) >>
({
match path {
Some(mut path) => {
let pos = path.segments.len();
path.segments.extend(rest);
(Some(QSelf { ty: this, position: pos }), path)
}
None => {
(Some(QSelf { ty: this, position: 0 }), Path {
global: false,
segments: rest,
})
}
}
})
)
|
map!(keyword!("self"), |_| (None, "self".into()))
));
named!(ty_poly_trait_ref -> Ty, map!(
separated_nonempty_list!(punct!("+"), ty_param_bound),
Ty::PolyTraitRef
));
named!(ty_impl_trait -> Ty, do_parse!(
keyword!("impl") >>
elem: separated_nonempty_list!(punct!("+"), ty_param_bound) >>
(Ty::ImplTrait(elem))
));
named!(ty_paren -> Ty, do_parse!(
punct!("(") >>
elem: ty >>
punct!(")") >>
(Ty::Paren(Box::new(elem)))
));
named!(pub mutability -> Mutability, alt!(
keyword!("mut") => { |_| Mutability::Mutable }
|
epsilon!() => { |_| Mutability::Immutable }
));
named!(pub path -> Path, do_parse!(
global: option!(punct!("::")) >>
segments: separated_nonempty_list!(punct!("::"), path_segment) >>
(Path {
global: global.is_some(),
segments: segments,
})
));
named!(path_segment -> PathSegment, alt!(
do_parse!(
id: option!(ident) >>
punct!("<") >>
lifetimes: separated_list!(punct!(","), lifetime) >>
types: opt_vec!(preceded!(
cond!(!lifetimes.is_empty(), punct!(",")),
separated_nonempty_list!(
punct!(","),
terminated!(ty, not!(peek!(punct!("="))))
)
)) >>
bindings: opt_vec!(preceded!(
cond!(!lifetimes.is_empty() || !types.is_empty(), punct!(",")),
separated_nonempty_list!(punct!(","), type_binding)
)) >>
cond!(!lifetimes.is_empty() || !types.is_empty() || !bindings.is_empty(), option!(punct!(","))) >>
punct!(">") >>
(PathSegment {
ident: id.unwrap_or_else(|| "".into()),
parameters: PathParameters::AngleBracketed(
AngleBracketedParameterData {
lifetimes: lifetimes,
types: types,
bindings: bindings,
}
),
})
)
|
map!(ident, Into::into)
|
map!(alt!(
keyword!("super")
|
keyword!("self")
|
keyword!("Self")
), Into::into)
));
named!(type_binding -> TypeBinding, do_parse!(
id: ident >>
punct!("=") >>
ty: ty >>
(TypeBinding {
ident: id,
ty: ty,
})
));
named!(pub poly_trait_ref -> PolyTraitRef, do_parse!(
bound_lifetimes: bound_lifetimes >>
trait_ref: path >>
parenthesized: option!(cond_reduce!(
trait_ref.segments.last().unwrap().parameters == PathParameters::none(),
parenthesized_parameter_data
)) >>
({
let mut trait_ref = trait_ref;
if let Some(parenthesized) = parenthesized {
trait_ref.segments.last_mut().unwrap().parameters = parenthesized;
}
PolyTraitRef {
bound_lifetimes: bound_lifetimes,
trait_ref: trait_ref,
}
})
));
named!(pub fn_arg -> BareFnArg, do_parse!(
name: option!(do_parse!(
name: ident >>
punct!(":") >>
not!(peek!(tag!(":"))) >> // not ::
(name)
)) >>
ty: ty >>
(BareFnArg {
name: name,
ty: ty,
})
));
named!(pub unsafety -> Unsafety, alt!(
keyword!("unsafe") => { |_| Unsafety::Unsafe }
|
epsilon!() => { |_| Unsafety::Normal }
));
named!(pub abi -> Abi, do_parse!(
keyword!("extern") >>
name: option!(quoted_string) >>
(match name {
Some(name) => Abi::Named(name),
None => Abi::Rust,
})
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use quote::{Tokens, ToTokens};
impl ToTokens for Ty {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Ty::Slice(ref inner) => {
tokens.append("[");
inner.to_tokens(tokens);
tokens.append("]");
}
Ty::Array(ref inner, ref len) => {
tokens.append("[");
inner.to_tokens(tokens);
tokens.append(";");
len.to_tokens(tokens);
tokens.append("]");
}
Ty::Ptr(ref target) => {
tokens.append("*");
match target.mutability {
Mutability::Mutable => tokens.append("mut"),
Mutability::Immutable => tokens.append("const"),
}
target.ty.to_tokens(tokens);
}
Ty::Rptr(ref lifetime, ref target) => {
tokens.append("&");
lifetime.to_tokens(tokens);
target.mutability.to_tokens(tokens);
target.ty.to_tokens(tokens);
}
Ty::BareFn(ref func) => {
func.to_tokens(tokens);
}
Ty::Never => {
tokens.append("!");
}
Ty::Tup(ref elems) => {
tokens.append("(");
tokens.append_separated(elems, ",");
if elems.len() == 1 {
tokens.append(",");
}
tokens.append(")");
}
Ty::Path(None, ref path) => {
path.to_tokens(tokens);
}
Ty::Path(Some(ref qself), ref path) => {
tokens.append("<");
qself.ty.to_tokens(tokens);
if qself.position > 0 {
tokens.append("as");
for (i, segment) in path.segments
.iter()
.take(qself.position)
.enumerate() {
if i > 0 || path.global {
tokens.append("::");
}
segment.to_tokens(tokens);
}
}
tokens.append(">");
for segment in path.segments.iter().skip(qself.position) {
tokens.append("::");
segment.to_tokens(tokens);
}
}
Ty::ObjectSum(ref ty, ref bounds) => {
ty.to_tokens(tokens);
for bound in bounds {
tokens.append("+");
bound.to_tokens(tokens);
}
}
Ty::PolyTraitRef(ref bounds) => {
tokens.append_separated(bounds, "+");
}
Ty::ImplTrait(ref bounds) => {
tokens.append("impl");
tokens.append_separated(bounds, "+");
}
Ty::Paren(ref inner) => {
tokens.append("(");
inner.to_tokens(tokens);
tokens.append(")");
}
Ty::Infer => {
tokens.append("_");
}
}
}
}
impl ToTokens for Mutability {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Mutability::Mutable = *self {
tokens.append("mut");
}
}
}
impl ToTokens for Path {
fn to_tokens(&self, tokens: &mut Tokens) {
for (i, segment) in self.segments.iter().enumerate() {
if i > 0 || self.global {
tokens.append("::");
}
segment.to_tokens(tokens);
}
}
}
impl ToTokens for PathSegment {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
if self.ident.as_ref().is_empty() && self.parameters.is_empty() {
tokens.append("<");
tokens.append(">");
} else {
self.parameters.to_tokens(tokens);
}
}
}
impl ToTokens for PathParameters {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
PathParameters::AngleBracketed(ref parameters) => {
parameters.to_tokens(tokens);
}
PathParameters::Parenthesized(ref parameters) => {
parameters.to_tokens(tokens);
}
}
}
}
impl ToTokens for AngleBracketedParameterData {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.lifetimes.is_empty();
let has_types = !self.types.is_empty();
let has_bindings = !self.bindings.is_empty();
if !has_lifetimes && !has_types && !has_bindings {
return;
}
tokens.append("<");
let mut first = true;
for lifetime in &self.lifetimes {
if !first {
tokens.append(",");
}
lifetime.to_tokens(tokens);
first = false;
}
for ty in &self.types {
if !first {
tokens.append(",");
}
ty.to_tokens(tokens);
first = false;
}
for binding in &self.bindings {
if !first {
tokens.append(",");
}
binding.to_tokens(tokens);
first = false;
}
tokens.append(">");
}
}
impl ToTokens for TypeBinding {
fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens);
tokens.append("=");
self.ty.to_tokens(tokens);
}
}
impl ToTokens for ParenthesizedParameterData {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append("(");
tokens.append_separated(&self.inputs, ",");
tokens.append(")");
if let Some(ref output) = self.output {
tokens.append("->");
output.to_tokens(tokens);
}
}
}
impl ToTokens for PolyTraitRef {
fn to_tokens(&self, tokens: &mut Tokens) {
if !self.bound_lifetimes.is_empty() {
tokens.append("for");
tokens.append("<");
tokens.append_separated(&self.bound_lifetimes, ",");
tokens.append(">");
}
self.trait_ref.to_tokens(tokens);
}
}
impl ToTokens for BareFnTy {
fn to_tokens(&self, tokens: &mut Tokens) {
if !self.lifetimes.is_empty() {
tokens.append("for");
tokens.append("<");
tokens.append_separated(&self.lifetimes, ",");
tokens.append(">");
}
self.unsafety.to_tokens(tokens);
self.abi.to_tokens(tokens);
tokens.append("fn");
tokens.append("(");
tokens.append_separated(&self.inputs, ",");
if self.variadic {
if !self.inputs.is_empty() {
tokens.append(",");
}
tokens.append("...");
}
tokens.append(")");
if let FunctionRetTy::Ty(ref ty) = self.output {
tokens.append("->");
ty.to_tokens(tokens);
}
}
}
impl ToTokens for BareFnArg {
fn to_tokens(&self, tokens: &mut Tokens) {
if let Some(ref name) = self.name {
name.to_tokens(tokens);
tokens.append(":");
}
self.ty.to_tokens(tokens);
}
}
impl ToTokens for Unsafety {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
Unsafety::Unsafe => tokens.append("unsafe"),
Unsafety::Normal => {
// nothing
}
}
}
}
impl ToTokens for Abi {
fn to_tokens(&self, tokens: &mut Tokens) {
tokens.append("extern");
match *self {
Abi::Named(ref named) => named.to_tokens(tokens),
Abi::Rust => {}
}
}
}
}

295
third_party/rust/syn-0.10.8/src/visit.rs vendored Normal file
View File

@ -0,0 +1,295 @@
// Adapted from libsyntax.
//! AST walker. Each overridden visit method has full control over what
//! happens with its node, it can do its own traversal of the node's children,
//! call `visit::walk_*` to apply the default traversal algorithm, or prevent
//! deeper traversal by doing nothing.
//!
//! Note: it is an important invariant that the default visitor walks the body
//! of a function in "execution order" (more concretely, reverse post-order
//! with respect to the CFG implied by the AST), meaning that if AST node A may
//! execute before AST node B, then A is visited first. The borrow checker in
//! particular relies on this property.
//!
//! Note: walking an AST before macro expansion is probably a bad idea. For
//! instance, a walker looking for item names in a module will miss all of
//! those that are created by the expansion of a macro.
use super::*;
/// Each method of the Visitor trait is a hook to be potentially
/// overridden. Each method's default implementation recursively visits
/// the substructure of the input via the corresponding `walk` method;
/// e.g. the `visit_mod` method by default calls `visit::walk_mod`.
///
/// If you want to ensure that your code handles every variant
/// explicitly, you need to override each method. (And you also need
/// to monitor future changes to `Visitor` in case a new method with a
/// new default implementation gets introduced.)
pub trait Visitor: Sized {
fn visit_ident(&mut self, _ident: &Ident) {}
fn visit_macro_input(&mut self, macro_input: &MacroInput) {
walk_macro_input(self, macro_input)
}
fn visit_ty(&mut self, ty: &Ty) {
walk_ty(self, ty)
}
fn visit_generics(&mut self, generics: &Generics) {
walk_generics(self, generics)
}
fn visit_ty_param_bound(&mut self, bound: &TyParamBound) {
walk_ty_param_bound(self, bound)
}
fn visit_poly_trait_ref(&mut self, trait_ref: &PolyTraitRef, modifier: &TraitBoundModifier) {
walk_poly_trait_ref(self, trait_ref, modifier)
}
fn visit_variant_data(&mut self, data: &VariantData, _ident: &Ident, _generics: &Generics) {
walk_variant_data(self, data)
}
fn visit_field(&mut self, field: &Field) {
walk_field(self, field)
}
fn visit_variant(&mut self, variant: &Variant, generics: &Generics) {
walk_variant(self, variant, generics)
}
fn visit_lifetime(&mut self, _lifetime: &Lifetime) {}
fn visit_lifetime_def(&mut self, lifetime: &LifetimeDef) {
walk_lifetime_def(self, lifetime)
}
fn visit_path(&mut self, path: &Path) {
walk_path(self, path)
}
fn visit_path_segment(&mut self, path_segment: &PathSegment) {
walk_path_segment(self, path_segment)
}
fn visit_path_parameters(&mut self, path_parameters: &PathParameters) {
walk_path_parameters(self, path_parameters)
}
fn visit_assoc_type_binding(&mut self, type_binding: &TypeBinding) {
walk_assoc_type_binding(self, type_binding)
}
fn visit_attribute(&mut self, _attr: &Attribute) {}
fn visit_fn_ret_ty(&mut self, ret_ty: &FunctionRetTy) {
walk_fn_ret_ty(self, ret_ty)
}
fn visit_const_expr(&mut self, expr: &ConstExpr) {
walk_const_expr(self, expr)
}
fn visit_lit(&mut self, _lit: &Lit) {}
}
#[macro_export]
macro_rules! walk_list {
($visitor: expr, $method: ident, $list: expr) => {
for elem in $list {
$visitor.$method(elem)
}
};
($visitor: expr, $method: ident, $list: expr, $($extra_args: expr),*) => {
for elem in $list {
$visitor.$method(elem, $($extra_args,)*)
}
}
}
pub fn walk_opt_ident<V: Visitor>(visitor: &mut V, opt_ident: &Option<Ident>) {
if let Some(ref ident) = *opt_ident {
visitor.visit_ident(ident);
}
}
pub fn walk_lifetime_def<V: Visitor>(visitor: &mut V, lifetime_def: &LifetimeDef) {
visitor.visit_lifetime(&lifetime_def.lifetime);
walk_list!(visitor, visit_lifetime, &lifetime_def.bounds);
}
pub fn walk_poly_trait_ref<V>(visitor: &mut V, trait_ref: &PolyTraitRef, _: &TraitBoundModifier)
where V: Visitor
{
walk_list!(visitor, visit_lifetime_def, &trait_ref.bound_lifetimes);
visitor.visit_path(&trait_ref.trait_ref);
}
pub fn walk_macro_input<V: Visitor>(visitor: &mut V, macro_input: &MacroInput) {
visitor.visit_ident(&macro_input.ident);
visitor.visit_generics(&macro_input.generics);
match macro_input.body {
Body::Enum(ref variants) => {
walk_list!(visitor, visit_variant, variants, &macro_input.generics);
}
Body::Struct(ref variant_data) => {
visitor.visit_variant_data(variant_data, &macro_input.ident, &macro_input.generics);
}
}
walk_list!(visitor, visit_attribute, &macro_input.attrs);
}
pub fn walk_variant<V>(visitor: &mut V, variant: &Variant, generics: &Generics)
where V: Visitor
{
visitor.visit_ident(&variant.ident);
visitor.visit_variant_data(&variant.data, &variant.ident, generics);
walk_list!(visitor, visit_attribute, &variant.attrs);
}
pub fn walk_ty<V: Visitor>(visitor: &mut V, ty: &Ty) {
match *ty {
Ty::Slice(ref inner) |
Ty::Paren(ref inner) => visitor.visit_ty(inner),
Ty::Ptr(ref mutable_type) => visitor.visit_ty(&mutable_type.ty),
Ty::Rptr(ref opt_lifetime, ref mutable_type) => {
walk_list!(visitor, visit_lifetime, opt_lifetime);
visitor.visit_ty(&mutable_type.ty)
}
Ty::Never | Ty::Infer => {}
Ty::Tup(ref tuple_element_types) => {
walk_list!(visitor, visit_ty, tuple_element_types);
}
Ty::BareFn(ref bare_fn) => {
walk_list!(visitor, visit_lifetime_def, &bare_fn.lifetimes);
for argument in &bare_fn.inputs {
walk_opt_ident(visitor, &argument.name);
visitor.visit_ty(&argument.ty)
}
visitor.visit_fn_ret_ty(&bare_fn.output)
}
Ty::Path(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path);
}
Ty::ObjectSum(ref inner, ref bounds) => {
visitor.visit_ty(inner);
walk_list!(visitor, visit_ty_param_bound, bounds);
}
Ty::Array(ref inner, ref len) => {
visitor.visit_ty(inner);
visitor.visit_const_expr(len);
}
Ty::PolyTraitRef(ref bounds) |
Ty::ImplTrait(ref bounds) => {
walk_list!(visitor, visit_ty_param_bound, bounds);
}
}
}
pub fn walk_path<V: Visitor>(visitor: &mut V, path: &Path) {
for segment in &path.segments {
visitor.visit_path_segment(segment);
}
}
pub fn walk_path_segment<V: Visitor>(visitor: &mut V, segment: &PathSegment) {
visitor.visit_ident(&segment.ident);
visitor.visit_path_parameters(&segment.parameters);
}
pub fn walk_path_parameters<V>(visitor: &mut V, path_parameters: &PathParameters)
where V: Visitor
{
match *path_parameters {
PathParameters::AngleBracketed(ref data) => {
walk_list!(visitor, visit_ty, &data.types);
walk_list!(visitor, visit_lifetime, &data.lifetimes);
walk_list!(visitor, visit_assoc_type_binding, &data.bindings);
}
PathParameters::Parenthesized(ref data) => {
walk_list!(visitor, visit_ty, &data.inputs);
walk_list!(visitor, visit_ty, &data.output);
}
}
}
pub fn walk_assoc_type_binding<V: Visitor>(visitor: &mut V, type_binding: &TypeBinding) {
visitor.visit_ident(&type_binding.ident);
visitor.visit_ty(&type_binding.ty);
}
pub fn walk_ty_param_bound<V: Visitor>(visitor: &mut V, bound: &TyParamBound) {
match *bound {
TyParamBound::Trait(ref ty, ref modifier) => {
visitor.visit_poly_trait_ref(ty, modifier);
}
TyParamBound::Region(ref lifetime) => {
visitor.visit_lifetime(lifetime);
}
}
}
pub fn walk_generics<V: Visitor>(visitor: &mut V, generics: &Generics) {
for param in &generics.ty_params {
visitor.visit_ident(&param.ident);
walk_list!(visitor, visit_ty_param_bound, &param.bounds);
walk_list!(visitor, visit_ty, &param.default);
}
walk_list!(visitor, visit_lifetime_def, &generics.lifetimes);
for predicate in &generics.where_clause.predicates {
match *predicate {
WherePredicate::BoundPredicate(WhereBoundPredicate { ref bounded_ty,
ref bounds,
ref bound_lifetimes,
.. }) => {
visitor.visit_ty(bounded_ty);
walk_list!(visitor, visit_ty_param_bound, bounds);
walk_list!(visitor, visit_lifetime_def, bound_lifetimes);
}
WherePredicate::RegionPredicate(WhereRegionPredicate { ref lifetime,
ref bounds,
.. }) => {
visitor.visit_lifetime(lifetime);
walk_list!(visitor, visit_lifetime, bounds);
}
}
}
}
pub fn walk_fn_ret_ty<V: Visitor>(visitor: &mut V, ret_ty: &FunctionRetTy) {
if let FunctionRetTy::Ty(ref output_ty) = *ret_ty {
visitor.visit_ty(output_ty)
}
}
pub fn walk_variant_data<V: Visitor>(visitor: &mut V, data: &VariantData) {
walk_list!(visitor, visit_field, data.fields());
}
pub fn walk_field<V: Visitor>(visitor: &mut V, field: &Field) {
walk_opt_ident(visitor, &field.ident);
visitor.visit_ty(&field.ty);
walk_list!(visitor, visit_attribute, &field.attrs);
}
pub fn walk_const_expr<V: Visitor>(visitor: &mut V, len: &ConstExpr) {
match *len {
ConstExpr::Call(ref function, ref args) => {
visitor.visit_const_expr(function);
walk_list!(visitor, visit_const_expr, args);
}
ConstExpr::Binary(_op, ref left, ref right) => {
visitor.visit_const_expr(left);
visitor.visit_const_expr(right);
}
ConstExpr::Unary(_op, ref v) => {
visitor.visit_const_expr(v);
}
ConstExpr::Lit(ref lit) => {
visitor.visit_lit(lit);
}
ConstExpr::Cast(ref expr, ref ty) => {
visitor.visit_const_expr(expr);
visitor.visit_ty(ty);
}
ConstExpr::Path(ref path) => {
visitor.visit_path(path);
}
ConstExpr::Index(ref expr, ref index) => {
visitor.visit_const_expr(expr);
visitor.visit_const_expr(index);
}
ConstExpr::Paren(ref expr) => {
visitor.visit_const_expr(expr);
}
ConstExpr::Other(_) => {}
}
}

View File

@ -1 +1 @@
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"f703ce140afaec1a35ce733f6bc3d0ce45a6256095572d0763c815fbf39f4f11","src/aster/generics.rs":"77eb19443af0dff5debb18d064733cc8721a42ad7e993a33352cdeff2b5f9f85","src/aster/ident.rs":"e9d082664f008a56bd854011310b4258ab072740ba82e57495b6e8a868a5f36b","src/aster/invoke.rs":"2b1b993973ab4f5c8fa6d6a286576b2542edce21fe9904f5133c470c072e6d3f","src/aster/lifetime.rs":"d83f4c1a48e3580caa028cfabde6ace232efc95d70af6dc9cfcca48317db9ad7","src/aster/mod.rs":"12220f73b0021e72b4c50f6a513cff174b9c7267209aa23f183043d96ccc9ab7","src/aster/path.rs":"60865b0f952077307c1a66810a4b6dafde43e76a417a433a8343960e7de474e4","src/aster/qpath.rs":"885c94b29ab8ee45c72a682221e241d1f0dd09c659809fe77279b5dd8a4bc645","src/aster/ty.rs":"90649aad98617c09ffc43a38aeb823a3298c41bf5e10f0ef3500b71c81021c2f","src/aster/ty_param.rs":"7ced1e6ca0c98ef468d507d3f07bfcb1171395cd66ff5c3e1b091fe7e8b9a562","src/aster/where_predicate.rs":"5fb8ec3fcb67bcc1d9bb7b64cf2f5beb601aac6502d6db30c0cdf8641fa248d1","src/attr.rs":"2ba436bdd439511be10baf9ad45226ade678176a7fd45a087367e1ad2b43e07a","src/constant.rs":"90535a2320e0dc8ab623a9bffa770bdf697baef2884a7d9224b31daf422ea5a0","src/data.rs":"0119c67821f846e67d792bea638ae7f7f5d7e2f5e5a0c145d8ba8766d6ddb0f9","src/escape.rs":"e035b1f6ce3255e868fddb62ee90a95a2f3caf2db73786a2b179b92e9e337539","src/expr.rs":"02e8d346bef099974d06d74945be92fe6391111b94154df4981d44f1594d5579","src/generics.rs":"a300acff4c6e61d2fe9344db23f5e176e7abb02529bc348d9180f41ad0a4caf6","src/helper.rs":"9693d5c78f2d627a90d689a5d4bee1061eddcb646ae6dff3b2e4fd7cfbb33845","src/ident.rs":"83142b0107baba3137aad3b7d5c7b468ab53bf837bd9544d117d6644080d2705","src/item.rs":"63f2cd9a01c279405196d90a7d1cc530896157352163fb44f6b2a713657058b8","src/krate.rs":"324073a42389eb1c26a9d0f325b4f1cdd37d00a9bcaf07fdee77af54909a452d","src/lib.rs":"ef584db9ac9b7308224798d3983cbf201df7f0da1735fe5ce408f20fb3df763e","src/lit.rs":"2615fc6041f11b67a7cd62012f36eb215fd1fdf6649b6b64d728625148f53c7b","src/mac.rs":"45c44bd7abcbdaea6572bb4721bdc57b02b967ea9865172fe10e029e51e51a42","src/macro_input.rs":"93b999877879076e1f47502d96aa18aad82117d072044ca9de825c8a9bfa60b8","src/nom.rs":"642149bf322b762e02183ac1fed641df7f03ac53334c869a64707de4e9c5e68c","src/op.rs":"232f84ba605ed50e70ee02169dd551548872135cf56f155637917ec3bf810ce1","src/registry.rs":"b709f2a0f372efd8dec8fd46d6d71fb3b56a0261789e6de048a41a5e70144421","src/space.rs":"de9cb71e831c1d66f0bf2f3f219c3455d1979ca89f89b198d3b324e0cd50faf8","src/ty.rs":"97cfcb904a5fd68a42ebd2e5f86466d92e0785b1491d80c2a8d396ccec1b742a","src/visit.rs":"d7dcf429cc1a05821a66a4b38e7856eec45a9b2215f625d95030c3688eda26ca"},"package":"58fd09df59565db3399efbba34ba8a2fec1307511ebd245d0061ff9d42691673"} {"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"8e2151edfcef10949bbda036a8f636ad3af9fc31b98a830e282ddbc74dbd60c3","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"b2d43372ff5db084b4f2ef5178e1fbdba83e0f05a9cfc298f188cc130e8de794","src/aster/generics.rs":"77eb19443af0dff5debb18d064733cc8721a42ad7e993a33352cdeff2b5f9f85","src/aster/ident.rs":"e9d082664f008a56bd854011310b4258ab072740ba82e57495b6e8a868a5f36b","src/aster/invoke.rs":"2b1b993973ab4f5c8fa6d6a286576b2542edce21fe9904f5133c470c072e6d3f","src/aster/lifetime.rs":"d83f4c1a48e3580caa028cfabde6ace232efc95d70af6dc9cfcca48317db9ad7","src/aster/mod.rs":"12220f73b0021e72b4c50f6a513cff174b9c7267209aa23f183043d96ccc9ab7","src/aster/path.rs":"60865b0f952077307c1a66810a4b6dafde43e76a417a433a8343960e7de474e4","src/aster/qpath.rs":"5ba33af56ccf74f5c516ed542d117d1f6ca9f7dfd1a74d08b4ac50d95666c497","src/aster/ty.rs":"03e5e631f1e04d6fafb24698b4d0529a6154a83d7ffb0a8268a1e5edf849ac55","src/aster/ty_param.rs":"7ced1e6ca0c98ef468d507d3f07bfcb1171395cd66ff5c3e1b091fe7e8b9a562","src/aster/where_predicate.rs":"5fb8ec3fcb67bcc1d9bb7b64cf2f5beb601aac6502d6db30c0cdf8641fa248d1","src/attr.rs":"efe29daa36cebee492101cbbbcb2bcdc629e30cc3122b6046c11a6ee5363cbc4","src/constant.rs":"90535a2320e0dc8ab623a9bffa770bdf697baef2884a7d9224b31daf422ea5a0","src/data.rs":"0119c67821f846e67d792bea638ae7f7f5d7e2f5e5a0c145d8ba8766d6ddb0f9","src/derive.rs":"eca81a0d38d275d7700a683022c4cf8e7ea4469f854ef99a79dc1671831941f3","src/escape.rs":"b474c52ee1b0b7577134b37ab9e57009f4ae4d445363ff54fa40df0f13a4f1d2","src/expr.rs":"ef5ca0168d763ae5043c21586d12bc086ff6fa890f6a6a014d10e138b26df780","src/fold.rs":"68e4bf844aa5bcff0325958a6d1ea235c9c697887a3ef0a2819e0e26ab59d0fc","src/generics.rs":"9d1b16001c5711d092a9fc587b7701ab025b408ce3abc0ea2c5e1ba4d45e36ed","src/ident.rs":"da8e419b0ff6ee4b9e978224795a43d5451d9aeea392ad162b5f32cd43a77df8","src/item.rs":"c91ec1b423877590acd3fa01b094f452ef6b177db6c177056f33caf61f3fe92d","src/krate.rs":"78f89e1f12f5b790d99d88a3a013178585f6715a27eb26f604e72e763a47dfdf","src/lib.rs":"4803728d6f6782c0c7414fe04b8a3522c99e9266cde886ad510f185b871d20d4","src/lit.rs":"c332899502f5d1a9e35300ef5f8173c7c54c288fc1183d56035df68b4c943c70","src/mac.rs":"b3ba8e7531980abecec4a9f86f68ae136c5982617e0e37aaa823d288ba6f5e4e","src/op.rs":"232f84ba605ed50e70ee02169dd551548872135cf56f155637917ec3bf810ce1","src/ty.rs":"88d9eb8a9a2e6cf0ddcff18d7ff76dca4e06e2a14e602bb845e438100baebef0","src/visit.rs":"c949e2414e6299ed976c3e55da58627e430e7657aed00a8a31e9480f1f7340e2"},"package":"0e28da8d02d75d1e58b89258e0741128f0b0d8a8309fb5c627be0fbd37a76c67"}

View File

@ -1,32 +1,31 @@
[package] [package]
name = "syn" name = "syn"
version = "0.10.8" # don't forget to update version in readme for breaking changes version = "0.11.6" # don't forget to update version in readme for breaking changes
authors = ["David Tolnay <dtolnay@gmail.com>"] authors = ["David Tolnay <dtolnay@gmail.com>"]
license = "MIT/Apache-2.0" license = "MIT/Apache-2.0"
description = "Nom parser for Rust source code" description = "Nom parser for Rust source code"
repository = "https://github.com/dtolnay/syn" repository = "https://github.com/dtolnay/syn"
documentation = "https://dtolnay.github.io/syn/syn/" documentation = "https://dtolnay.github.io/syn/syn/"
include = ["Cargo.toml", "src/**/*.rs"] categories = ["development-tools::procedural-macro-helpers"]
include = ["Cargo.toml", "src/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
[features] [features]
default = ["parsing", "printing"] default = ["parsing", "printing"]
aster = [] aster = []
expand = ["full", "parsing", "printing"]
full = [] full = []
parsing = ["unicode-xid"] parsing = ["unicode-xid", "synom"]
pretty = ["syntex_syntax"]
printing = ["quote"] printing = ["quote"]
visit = [] visit = []
fold = []
[dependencies] [dependencies]
clippy = { version = "0.*", optional = true } quote = { version = "0.3", optional = true }
quote = { version = "0.3.0", optional = true }
syntex_syntax = { version = "0.50.0", optional = true }
unicode-xid = { version = "0.0.4", optional = true } unicode-xid = { version = "0.0.4", optional = true }
synom = { version = "0.11", path = "synom", optional = true }
[dev-dependencies] [dev-dependencies]
syntex_pos = "0.50.0" syntex_pos = "0.58"
syntex_syntax = "0.50.0" syntex_syntax = "0.58"
tempdir = "0.3.5" tempdir = "0.3.5"
time = "0.1.35" time = "0.1.35"
walkdir = "1.0.1" walkdir = "1.0.1"

201
third_party/rust/syn/LICENSE-APACHE vendored Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/syn/LICENSE-MIT vendored Normal file
View File

@ -0,0 +1,25 @@
Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

199
third_party/rust/syn/README.md vendored Normal file
View File

@ -0,0 +1,199 @@
Nom parser for Rust source code
===============================
[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://dtolnay.github.io/syn/syn/)
Parse Rust source code without a Syntex dependency, intended for use with
[Macros 1.1](https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md).
Designed for fast compile time.
- Compile time for `syn` (from scratch including all dependencies): **6 seconds**
- Compile time for the `syntex`/`quasi`/`aster` stack: **60+ seconds**
If you get stuck with Macros 1.1 I am happy to provide help even if the issue is
not related to syn. Please file a ticket in this repo.
## Usage with Macros 1.1
```toml
[dependencies]
syn = "0.11"
quote = "0.3"
[lib]
proc-macro = true
```
```rust
extern crate proc_macro;
use proc_macro::TokenStream;
extern crate syn;
#[macro_use]
extern crate quote;
#[proc_macro_derive(MyMacro)]
pub fn my_macro(input: TokenStream) -> TokenStream {
let source = input.to_string();
// Parse the string representation into a syntax tree
let ast = syn::parse_derive_input(&source).unwrap();
// Build the output, possibly using quasi-quotation
let expanded = quote! {
// ...
};
// Parse back to a token stream and return it
expanded.parse().unwrap()
}
```
## Complete example
Suppose we have the following simple trait which returns the number of fields in
a struct:
```rust
trait NumFields {
fn num_fields() -> usize;
}
```
A complete Macros 1.1 implementation of `#[derive(NumFields)]` based on `syn`
and [`quote`](https://github.com/dtolnay/quote) looks like this:
```rust
extern crate proc_macro;
use proc_macro::TokenStream;
extern crate syn;
#[macro_use]
extern crate quote;
#[proc_macro_derive(NumFields)]
pub fn num_fields(input: TokenStream) -> TokenStream {
let source = input.to_string();
// Parse the string representation into a syntax tree
let ast = syn::parse_derive_input(&source).unwrap();
// Build the output
let expanded = expand_num_fields(&ast);
// Return the generated impl as a TokenStream
expanded.parse().unwrap()
}
fn expand_num_fields(ast: &syn::DeriveInput) -> quote::Tokens {
let n = match ast.body {
syn::Body::Struct(ref data) => data.fields().len(),
syn::Body::Enum(_) => panic!("#[derive(NumFields)] can only be used with structs"),
};
// Used in the quasi-quotation below as `#name`
let name = &ast.ident;
// Helper is provided for handling complex generic types correctly and effortlessly
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
quote! {
// The generated impl
impl #impl_generics ::mycrate::NumFields for #name #ty_generics #where_clause {
fn num_fields() -> usize {
#n
}
}
}
}
```
## Testing
Macros 1.1 has a restriction that your proc-macro crate must export nothing but
`proc_macro_derive` functions, and also `proc_macro_derive` procedural macros
cannot be used from the same crate in which they are defined. These restrictions
may be lifted in the future but for now they make writing tests a bit trickier
than for other types of code.
In particular, you will not be able to write test functions like `#[test] fn
it_works() { ... }` in line with your code. Instead, either put tests in a
[`tests` directory](https://doc.rust-lang.org/book/testing.html#the-tests-directory)
or in a separate crate entirely.
Additionally, if your procedural macro implements a particular trait, that trait
must be defined in a separate crate from the procedural macro.
As a concrete example, suppose your procedural macro crate is called `my_derive`
and it implements a trait called `my_crate::MyTrait`. Your unit tests for the
procedural macro can go in `my_derive/tests/test.rs` or into a separate crate
`my_tests/tests/test.rs`. Either way the test would look something like this:
```rust
#[macro_use]
extern crate my_derive;
extern crate my_crate;
use my_crate::MyTrait;
#[test]
fn it_works() {
#[derive(MyTrait)]
struct S { /* ... */ }
/* test the thing */
}
```
## Debugging
When developing a procedural macro it can be helpful to look at what the
generated code looks like. Use `cargo rustc -- -Zunstable-options
--pretty=expanded` or the
[`cargo expand`](https://github.com/dtolnay/cargo-expand) subcommand.
To show the expanded code for some crate that uses your procedural macro, run
`cargo expand` from that crate. To show the expanded code for one of your own
test cases, run `cargo expand --test the_test_case` where the last argument is
the name of the test file without the `.rs` extension.
This write-up by Brandon W Maister discusses debugging in more detail:
[Debugging Rust's new Custom Derive
system](https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/).
## Optional features
Syn puts a lot of functionality behind optional features in order to optimize
compile time for the most common use cases. These are the available features and
their effect on compile time. Dependencies are included in the compile times.
Features | Compile time | Functionality
--- | --- | ---
*(none)* | 3 sec | The data structures representing the AST of Rust structs, enums, and types.
parsing | 6 sec | Parsing Rust source code containing structs and enums into an AST.
printing | 4 sec | Printing an AST of structs and enums as Rust source code.
**parsing, printing** | **6 sec** | **This is the default.** Parsing and printing of Rust structs and enums. This is typically what you want for implementing Macros 1.1 custom derives.
full | 4 sec | The data structures representing the full AST of all possible Rust code.
full, parsing | 9 sec | Parsing any valid Rust source code to an AST.
full, printing | 6 sec | Turning an AST into Rust source code.
full, parsing, printing | 11 sec | Parsing and printing any Rust syntax.
## License
Licensed under either of
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.

View File

@ -65,9 +65,6 @@ impl<F> QPathTyBuilder<F>
where F: Invoke<(QSelf, Path)> where F: Invoke<(QSelf, Path)>
{ {
/// Build a qualified path with a path builder. /// Build a qualified path with a path builder.
// Clippy false positive
// https://github.com/Manishearth/rust-clippy/issues/1285
#[cfg_attr(feature = "clippy", allow(wrong_self_convention))]
pub fn as_(self) -> PathBuilder<Self> { pub fn as_(self) -> PathBuilder<Self> {
PathBuilder::with_callback(self) PathBuilder::with_callback(self)
} }

View File

@ -158,10 +158,6 @@ impl<F> TyBuilder<F>
TyBuilder::with_callback(TyIteratorBuilder(self)) TyBuilder::with_callback(TyIteratorBuilder(self))
} }
pub fn object_sum(self) -> TyBuilder<TyObjectSumBuilder<F>> {
TyBuilder::with_callback(TyObjectSumBuilder { builder: self })
}
pub fn impl_trait(self) -> TyImplTraitTyBuilder<F> { pub fn impl_trait(self) -> TyImplTraitTyBuilder<F> {
TyImplTraitTyBuilder { TyImplTraitTyBuilder {
builder: self, builder: self,
@ -389,90 +385,6 @@ impl<F> Invoke<Ty> for TyIteratorBuilder<F>
// //////////////////////////////////////////////////////////////////////////// // ////////////////////////////////////////////////////////////////////////////
pub struct TyObjectSumBuilder<F> {
builder: TyBuilder<F>,
}
impl<F> Invoke<Ty> for TyObjectSumBuilder<F>
where F: Invoke<Ty>
{
type Result = TyObjectSumTyBuilder<F>;
fn invoke(self, ty: Ty) -> Self::Result {
TyObjectSumTyBuilder {
builder: self.builder,
ty: ty,
bounds: Vec::new(),
}
}
}
pub struct TyObjectSumTyBuilder<F> {
builder: TyBuilder<F>,
ty: Ty,
bounds: Vec<TyParamBound>,
}
impl<F> TyObjectSumTyBuilder<F>
where F: Invoke<Ty>
{
pub fn with_bounds<I>(mut self, iter: I) -> Self
where I: Iterator<Item = TyParamBound>
{
self.bounds.extend(iter);
self
}
pub fn with_bound(mut self, bound: TyParamBound) -> Self {
self.bounds.push(bound);
self
}
pub fn bound(self) -> TyParamBoundBuilder<Self> {
TyParamBoundBuilder::with_callback(self)
}
pub fn with_generics(self, generics: Generics) -> Self {
self.with_lifetimes(generics.lifetimes
.into_iter()
.map(|def| def.lifetime))
}
pub fn with_lifetimes<I, L>(mut self, lifetimes: I) -> Self
where I: Iterator<Item = L>,
L: IntoLifetime
{
for lifetime in lifetimes {
self = self.lifetime(lifetime);
}
self
}
pub fn lifetime<L>(self, lifetime: L) -> Self
where L: IntoLifetime
{
self.bound().lifetime(lifetime)
}
pub fn build(self) -> F::Result {
let bounds = self.bounds;
self.builder.build(Ty::ObjectSum(Box::new(self.ty), bounds))
}
}
impl<F> Invoke<TyParamBound> for TyObjectSumTyBuilder<F>
where F: Invoke<Ty>
{
type Result = Self;
fn invoke(self, bound: TyParamBound) -> Self {
self.with_bound(bound)
}
}
// ////////////////////////////////////////////////////////////////////////////
pub struct TyImplTraitTyBuilder<F> { pub struct TyImplTraitTyBuilder<F> {
builder: TyBuilder<F>, builder: TyBuilder<F>,
bounds: Vec<TyParamBound>, bounds: Vec<TyParamBound>,

View File

@ -99,7 +99,7 @@ pub mod parsing {
use super::*; use super::*;
use ident::parsing::ident; use ident::parsing::ident;
use lit::parsing::lit; use lit::parsing::lit;
use space::{block_comment, whitespace}; use synom::space::{block_comment, whitespace};
#[cfg(feature = "full")] #[cfg(feature = "full")]
named!(pub inner_attr -> Attribute, alt!( named!(pub inner_attr -> Attribute, alt!(
@ -159,7 +159,7 @@ pub mod parsing {
| |
do_parse!( do_parse!(
punct!("///") >> punct!("///") >>
not!(peek!(tag!("/"))) >> not!(tag!("/")) >>
content: take_until!("\n") >> content: take_until!("\n") >>
(Attribute { (Attribute {
style: AttrStyle::Outer, style: AttrStyle::Outer,

110
third_party/rust/syn/src/derive.rs vendored Normal file
View File

@ -0,0 +1,110 @@
use super::*;
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct DeriveInput {
pub ident: Ident,
pub vis: Visibility,
pub attrs: Vec<Attribute>,
pub generics: Generics,
pub body: Body,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Body {
Enum(Vec<Variant>),
Struct(VariantData),
}
#[cfg(feature = "parsing")]
pub mod parsing {
use super::*;
use Generics;
use attr::parsing::outer_attr;
use data::parsing::{visibility, struct_body, enum_body};
use generics::parsing::generics;
use ident::parsing::ident;
named!(pub derive_input -> DeriveInput, do_parse!(
attrs: many0!(outer_attr) >>
vis: visibility >>
which: alt!(keyword!("struct") | keyword!("enum")) >>
id: ident >>
generics: generics >>
item: switch!(value!(which),
"struct" => map!(struct_body, move |(wh, body)| DeriveInput {
ident: id,
vis: vis,
attrs: attrs,
generics: Generics {
where_clause: wh,
.. generics
},
body: Body::Struct(body),
})
|
"enum" => map!(enum_body, move |(wh, body)| DeriveInput {
ident: id,
vis: vis,
attrs: attrs,
generics: Generics {
where_clause: wh,
.. generics
},
body: Body::Enum(body),
})
) >>
(item)
));
}
#[cfg(feature = "printing")]
mod printing {
use super::*;
use attr::FilterAttrs;
use data::VariantData;
use quote::{Tokens, ToTokens};
impl ToTokens for DeriveInput {
fn to_tokens(&self, tokens: &mut Tokens) {
for attr in self.attrs.outer() {
attr.to_tokens(tokens);
}
self.vis.to_tokens(tokens);
match self.body {
Body::Enum(_) => tokens.append("enum"),
Body::Struct(_) => tokens.append("struct"),
}
self.ident.to_tokens(tokens);
self.generics.to_tokens(tokens);
match self.body {
Body::Enum(ref variants) => {
self.generics.where_clause.to_tokens(tokens);
tokens.append("{");
for variant in variants {
variant.to_tokens(tokens);
tokens.append(",");
}
tokens.append("}");
}
Body::Struct(ref variant_data) => {
match *variant_data {
VariantData::Struct(_) => {
self.generics.where_clause.to_tokens(tokens);
variant_data.to_tokens(tokens);
// no semicolon
}
VariantData::Tuple(_) => {
variant_data.to_tokens(tokens);
self.generics.where_clause.to_tokens(tokens);
tokens.append(";");
}
VariantData::Unit => {
self.generics.where_clause.to_tokens(tokens);
tokens.append(";");
}
}
}
}
}
}
}

View File

@ -1,6 +1,6 @@
use std::{char, str}; use std::{char, str};
use std::num::ParseIntError; use std::num::ParseIntError;
use nom::IResult; use synom::IResult;
pub fn cooked_string(input: &str) -> IResult<&str, String> { pub fn cooked_string(input: &str) -> IResult<&str, String> {
let mut s = String::new(); let mut s = String::new();
@ -229,7 +229,7 @@ macro_rules! from_hex {
}}; }};
} }
#[cfg_attr(feature = "clippy", allow(diverging_sub_expression))] #[cfg_attr(feature = "cargo-clippy", allow(diverging_sub_expression))]
fn backslash_x_char<I>(chars: &mut I) -> Option<char> fn backslash_x_char<I>(chars: &mut I) -> Option<char>
where I: Iterator<Item = (usize, char)> where I: Iterator<Item = (usize, char)>
{ {
@ -238,7 +238,7 @@ fn backslash_x_char<I>(chars: &mut I) -> Option<char>
char::from_u32(from_hex!(a b)) char::from_u32(from_hex!(a b))
} }
#[cfg_attr(feature = "clippy", allow(diverging_sub_expression))] #[cfg_attr(feature = "cargo-clippy", allow(diverging_sub_expression))]
fn backslash_x_byte<I>(chars: &mut I) -> Option<u8> fn backslash_x_byte<I>(chars: &mut I) -> Option<u8>
where I: Iterator<Item = (usize, u8)> where I: Iterator<Item = (usize, u8)>
{ {
@ -247,7 +247,7 @@ fn backslash_x_byte<I>(chars: &mut I) -> Option<u8>
Some(from_hex!(a b)) Some(from_hex!(a b))
} }
#[cfg_attr(feature = "clippy", allow(diverging_sub_expression, many_single_char_names))] #[cfg_attr(feature = "cargo-clippy", allow(diverging_sub_expression, many_single_char_names))]
fn backslash_u<I>(chars: &mut I) -> Option<char> fn backslash_u<I>(chars: &mut I) -> Option<char>
where I: Iterator<Item = (usize, char)> where I: Iterator<Item = (usize, char)>
{ {

View File

@ -22,7 +22,7 @@ pub enum ExprKind {
/// First expr is the place; second expr is the value. /// First expr is the place; second expr is the value.
InPlace(Box<Expr>, Box<Expr>), InPlace(Box<Expr>, Box<Expr>),
/// An array (`[a, b, c, d]`) /// An array (`[a, b, c, d]`)
Vec(Vec<Expr>), Array(Vec<Expr>),
/// A function call /// A function call
/// ///
/// The first field resolves to the function itself, /// The first field resolves to the function itself,
@ -85,10 +85,10 @@ pub enum ExprKind {
Loop(Block, Option<Ident>), Loop(Block, Option<Ident>),
/// A `match` block. /// A `match` block.
Match(Box<Expr>, Vec<Arm>), Match(Box<Expr>, Vec<Arm>),
/// A closure (for example, `move |a, b, c| {a + b + c}`) /// A closure (for example, `move |a, b, c| a + b + c`)
Closure(CaptureBy, Box<FnDecl>, Block), Closure(CaptureBy, Box<FnDecl>, Box<Expr>),
/// A block (`{ ... }` or `unsafe { ... }`) /// A block (`{ ... }` or `unsafe { ... }`)
Block(BlockCheckMode, Block), Block(Unsafety, Block),
/// An assignment (`a = foo()`) /// An assignment (`a = foo()`)
Assign(Box<Expr>, Box<Expr>), Assign(Box<Expr>, Box<Expr>),
@ -116,8 +116,8 @@ pub enum ExprKind {
/// A referencing operation (`&a` or `&mut a`) /// A referencing operation (`&a` or `&mut a`)
AddrOf(Mutability, Box<Expr>), AddrOf(Mutability, Box<Expr>),
/// A `break`, with an optional label to break /// A `break`, with an optional label to break, and an optional expression
Break(Option<Ident>), Break(Option<Ident>, Option<Box<Expr>>),
/// A `continue`, with an optional label /// A `continue`, with an optional label
Continue(Option<Ident>), Continue(Option<Ident>),
/// A `return`, with an optional value to be returned /// A `return`, with an optional value to be returned
@ -150,6 +150,7 @@ pub struct FieldValue {
pub ident: Ident, pub ident: Ident,
pub expr: Expr, pub expr: Expr,
pub is_shorthand: bool, pub is_shorthand: bool,
pub attrs: Vec<Attribute>,
} }
/// A Block (`{ .. }`). /// A Block (`{ .. }`).
@ -161,12 +162,6 @@ pub struct Block {
pub stmts: Vec<Stmt>, pub stmts: Vec<Stmt>,
} }
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum BlockCheckMode {
Default,
Unsafe,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub enum Stmt { pub enum Stmt {
/// A local (let) binding. /// A local (let) binding.
@ -209,7 +204,7 @@ pub struct Local {
#[derive(Debug, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Hash)]
// Clippy false positive // Clippy false positive
// https://github.com/Manishearth/rust-clippy/issues/1241 // https://github.com/Manishearth/rust-clippy/issues/1241
#[cfg_attr(feature = "clippy", allow(enum_variant_names))] #[cfg_attr(feature = "cargo-clippy", allow(enum_variant_names))]
pub enum Pat { pub enum Pat {
/// Represents a wildcard pattern (`_`) /// Represents a wildcard pattern (`_`)
Wild, Wild,
@ -300,6 +295,7 @@ pub struct FieldPat {
/// The pattern the field is destructured to /// The pattern the field is destructured to
pub pat: Box<Pat>, pub pat: Box<Pat>,
pub is_shorthand: bool, pub is_shorthand: bool,
pub attrs: Vec<Attribute>,
} }
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
@ -312,22 +308,22 @@ pub enum BindingMode {
pub mod parsing { pub mod parsing {
use super::*; use super::*;
use {BinOp, Delimited, DelimToken, FnArg, FnDecl, FunctionRetTy, Ident, Lifetime, Mac, use {BinOp, Delimited, DelimToken, FnArg, FnDecl, FunctionRetTy, Ident, Lifetime, Mac,
TokenTree, Ty, UnOp}; TokenTree, Ty, UnOp, Unsafety};
use attr::parsing::outer_attr; use attr::parsing::outer_attr;
use generics::parsing::lifetime; use generics::parsing::lifetime;
use ident::parsing::{ident, wordlike}; use ident::parsing::{ident, wordlike};
use item::parsing::item; use item::parsing::item;
use lit::parsing::{digits, lit}; use lit::parsing::{digits, lit};
use mac::parsing::{mac, token_trees}; use mac::parsing::{mac, token_trees};
use nom::IResult::{self, Error}; use synom::IResult::{self, Error};
use op::parsing::{assign_op, binop, unop}; use op::parsing::{assign_op, binop, unop};
use ty::parsing::{mutability, path, qpath, ty}; use ty::parsing::{mutability, path, qpath, ty, unsafety};
// Struct literals are ambiguous in certain positions // Struct literals are ambiguous in certain positions
// https://github.com/rust-lang/rfcs/pull/92 // https://github.com/rust-lang/rfcs/pull/92
macro_rules! named_ambiguous_expr { macro_rules! named_ambiguous_expr {
($name:ident -> $o:ty, $allow_struct:ident, $submac:ident!( $($args:tt)* )) => { ($name:ident -> $o:ty, $allow_struct:ident, $submac:ident!( $($args:tt)* )) => {
fn $name(i: &str, $allow_struct: bool) -> $crate::nom::IResult<&str, $o> { fn $name(i: &str, $allow_struct: bool) -> $crate::synom::IResult<&str, $o> {
$submac!(i, $($args)*) $submac!(i, $($args)*)
} }
}; };
@ -343,6 +339,7 @@ pub mod parsing {
named!(expr_no_struct -> Expr, ambiguous_expr!(false)); named!(expr_no_struct -> Expr, ambiguous_expr!(false));
#[cfg_attr(feature = "cargo-clippy", allow(cyclomatic_complexity))]
fn ambiguous_expr(i: &str, allow_struct: bool, allow_block: bool) -> IResult<&str, Expr> { fn ambiguous_expr(i: &str, allow_struct: bool, allow_block: bool) -> IResult<&str, Expr> {
do_parse!( do_parse!(
i, i,
@ -355,7 +352,7 @@ pub mod parsing {
| |
expr_mac // must be before expr_path expr_mac // must be before expr_path
| |
expr_break // must be before expr_path call!(expr_break, allow_struct) // must be before expr_path
| |
expr_continue // must be before expr_path expr_continue // must be before expr_path
| |
@ -365,7 +362,7 @@ pub mod parsing {
| |
expr_in_place expr_in_place
| |
expr_vec expr_array
| |
expr_tup expr_tup
| |
@ -474,17 +471,17 @@ pub mod parsing {
punct!("}") >> punct!("}") >>
(ExprKind::InPlace( (ExprKind::InPlace(
Box::new(place), Box::new(place),
Box::new(ExprKind::Block(BlockCheckMode::Default, Block { Box::new(ExprKind::Block(Unsafety::Normal, Block {
stmts: value, stmts: value,
}).into()), }).into()),
)) ))
)); ));
named!(expr_vec -> ExprKind, do_parse!( named!(expr_array -> ExprKind, do_parse!(
punct!("[") >> punct!("[") >>
elems: terminated_list!(punct!(","), expr) >> elems: terminated_list!(punct!(","), expr) >>
punct!("]") >> punct!("]") >>
(ExprKind::Vec(elems)) (ExprKind::Array(elems))
)); ));
named!(and_call -> Vec<Expr>, do_parse!( named!(and_call -> Vec<Expr>, do_parse!(
@ -571,7 +568,7 @@ pub mod parsing {
punct!("{") >> punct!("{") >>
else_block: within_block >> else_block: within_block >>
punct!("}") >> punct!("}") >>
(ExprKind::Block(BlockCheckMode::Default, Block { (ExprKind::Block(Unsafety::Normal, Block {
stmts: else_block, stmts: else_block,
}).into()) }).into())
) )
@ -632,7 +629,7 @@ pub mod parsing {
)); ));
fn arm_requires_comma(arm: &Arm) -> bool { fn arm_requires_comma(arm: &Arm) -> bool {
if let ExprKind::Block(BlockCheckMode::Default, _) = arm.body.node { if let ExprKind::Block(Unsafety::Normal, _) = arm.body.node {
false false
} else { } else {
true true
@ -645,7 +642,7 @@ pub mod parsing {
guard: option!(preceded!(keyword!("if"), expr)) >> guard: option!(preceded!(keyword!("if"), expr)) >>
punct!("=>") >> punct!("=>") >>
body: alt!( body: alt!(
map!(block, |blk| ExprKind::Block(BlockCheckMode::Default, blk).into()) map!(block, |blk| ExprKind::Block(Unsafety::Normal, blk).into())
| |
expr expr
) >> ) >>
@ -667,15 +664,10 @@ pub mod parsing {
punct!("->") >> punct!("->") >>
ty: ty >> ty: ty >>
body: block >> body: block >>
((FunctionRetTy::Ty(ty), body)) (FunctionRetTy::Ty(ty), ExprKind::Block(Unsafety::Normal, body).into())
) )
| |
map!(ambiguous_expr!(allow_struct), |e| ( map!(ambiguous_expr!(allow_struct), |e| (FunctionRetTy::Default, e))
FunctionRetTy::Default,
Block {
stmts: vec![Stmt::Expr(Box::new(e))],
},
))
) >> ) >>
(ExprKind::Closure( (ExprKind::Closure(
capture, capture,
@ -684,7 +676,7 @@ pub mod parsing {
output: ret_and_body.0, output: ret_and_body.0,
variadic: false, variadic: false,
}), }),
ret_and_body.1, Box::new(ret_and_body.1),
)) ))
)); ));
@ -720,10 +712,11 @@ pub mod parsing {
(ExprKind::Continue(lbl)) (ExprKind::Continue(lbl))
)); ));
named!(expr_break -> ExprKind, do_parse!( named_ambiguous_expr!(expr_break -> ExprKind, allow_struct, do_parse!(
keyword!("break") >> keyword!("break") >>
lbl: option!(label) >> lbl: option!(label) >>
(ExprKind::Break(lbl)) val: option!(call!(ambiguous_expr, allow_struct, false)) >>
(ExprKind::Break(lbl, val.map(Box::new)))
)); ));
named_ambiguous_expr!(expr_ret -> ExprKind, allow_struct, do_parse!( named_ambiguous_expr!(expr_ret -> ExprKind, allow_struct, do_parse!(
@ -756,6 +749,7 @@ pub mod parsing {
ident: name, ident: name,
expr: value, expr: value,
is_shorthand: false, is_shorthand: false,
attrs: Vec::new(),
}) })
) )
| |
@ -763,6 +757,7 @@ pub mod parsing {
ident: name.clone(), ident: name.clone(),
expr: ExprKind::Path(None, name.into()).into(), expr: ExprKind::Path(None, name.into()).into(),
is_shorthand: true, is_shorthand: true,
attrs: Vec::new(),
}) })
)); ));
@ -776,7 +771,7 @@ pub mod parsing {
)); ));
named!(expr_block -> ExprKind, do_parse!( named!(expr_block -> ExprKind, do_parse!(
rules: block_check_mode >> rules: unsafety >>
b: block >> b: block >>
(ExprKind::Block(rules, Block { (ExprKind::Block(rules, Block {
stmts: b.stmts, stmts: b.stmts,
@ -834,12 +829,6 @@ pub mod parsing {
}) })
)); ));
named!(block_check_mode -> BlockCheckMode, alt!(
keyword!("unsafe") => { |_| BlockCheckMode::Unsafe }
|
epsilon!() => { |_| BlockCheckMode::Default }
));
named!(pub within_block -> Vec<Stmt>, do_parse!( named!(pub within_block -> Vec<Stmt>, do_parse!(
many0!(punct!(";")) >> many0!(punct!(";")) >>
mut standalone: many0!(terminated!(standalone_stmt, many0!(punct!(";")))) >> mut standalone: many0!(terminated!(standalone_stmt, many0!(punct!(";")))) >>
@ -865,7 +854,7 @@ pub mod parsing {
named!(stmt_mac -> Stmt, do_parse!( named!(stmt_mac -> Stmt, do_parse!(
attrs: many0!(outer_attr) >> attrs: many0!(outer_attr) >>
name: ident >> what: path >>
punct!("!") >> punct!("!") >>
// Only parse braces here; paren and bracket will get parsed as // Only parse braces here; paren and bracket will get parsed as
// expression statements // expression statements
@ -875,7 +864,7 @@ pub mod parsing {
semi: option!(punct!(";")) >> semi: option!(punct!(";")) >>
(Stmt::Mac(Box::new(( (Stmt::Mac(Box::new((
Mac { Mac {
path: name.into(), path: what,
tts: vec![TokenTree::Delimited(Delimited { tts: vec![TokenTree::Delimited(Delimited {
delim: DelimToken::Brace, delim: DelimToken::Brace,
tts: tts, tts: tts,
@ -982,8 +971,8 @@ pub mod parsing {
| |
keyword!("self") => { Into::into } keyword!("self") => { Into::into }
) >> ) >>
not!(peek!(punct!("<"))) >> not!(punct!("<")) >>
not!(peek!(punct!("::"))) >> not!(punct!("::")) >>
subpat: option!(preceded!(punct!("@"), pat)) >> subpat: option!(preceded!(punct!("@"), pat)) >>
(Pat::Ident( (Pat::Ident(
if mode.is_some() { if mode.is_some() {
@ -1024,6 +1013,7 @@ pub mod parsing {
ident: ident, ident: ident,
pat: Box::new(pat), pat: Box::new(pat),
is_shorthand: false, is_shorthand: false,
attrs: Vec::new(),
}) })
) )
| |
@ -1049,6 +1039,7 @@ pub mod parsing {
ident: ident, ident: ident,
pat: Box::new(pat), pat: Box::new(pat),
is_shorthand: true, is_shorthand: true,
attrs: Vec::new(),
} }
}) })
) )
@ -1159,7 +1150,7 @@ pub mod parsing {
#[cfg(feature = "printing")] #[cfg(feature = "printing")]
mod printing { mod printing {
use super::*; use super::*;
use {FnArg, FunctionRetTy, Mutability, Ty}; use {FnArg, FunctionRetTy, Mutability, Ty, Unsafety};
use attr::FilterAttrs; use attr::FilterAttrs;
use quote::{Tokens, ToTokens}; use quote::{Tokens, ToTokens};
@ -1176,7 +1167,7 @@ mod printing {
place.to_tokens(tokens); place.to_tokens(tokens);
value.to_tokens(tokens); value.to_tokens(tokens);
} }
ExprKind::Vec(ref tys) => { ExprKind::Array(ref tys) => {
tokens.append("["); tokens.append("[");
tokens.append_separated(tys, ","); tokens.append_separated(tys, ",");
tokens.append("]"); tokens.append("]");
@ -1297,7 +1288,7 @@ mod printing {
tokens.append_all(arms); tokens.append_all(arms);
tokens.append("}"); tokens.append("}");
} }
ExprKind::Closure(capture, ref decl, ref body) => { ExprKind::Closure(capture, ref decl, ref expr) => {
capture.to_tokens(tokens); capture.to_tokens(tokens);
tokens.append("|"); tokens.append("|");
for (i, input) in decl.inputs.iter().enumerate() { for (i, input) in decl.inputs.iter().enumerate() {
@ -1313,23 +1304,13 @@ mod printing {
} }
tokens.append("|"); tokens.append("|");
match decl.output { match decl.output {
FunctionRetTy::Default => { FunctionRetTy::Default => { /* nothing */ }
if body.stmts.len() == 1 {
if let Stmt::Expr(ref expr) = body.stmts[0] {
expr.to_tokens(tokens);
} else {
body.to_tokens(tokens);
}
} else {
body.to_tokens(tokens);
}
}
FunctionRetTy::Ty(ref ty) => { FunctionRetTy::Ty(ref ty) => {
tokens.append("->"); tokens.append("->");
ty.to_tokens(tokens); ty.to_tokens(tokens);
body.to_tokens(tokens);
} }
} }
expr.to_tokens(tokens);
} }
ExprKind::Block(rules, ref block) => { ExprKind::Block(rules, ref block) => {
rules.to_tokens(tokens); rules.to_tokens(tokens);
@ -1396,9 +1377,10 @@ mod printing {
mutability.to_tokens(tokens); mutability.to_tokens(tokens);
expr.to_tokens(tokens); expr.to_tokens(tokens);
} }
ExprKind::Break(ref opt_label) => { ExprKind::Break(ref opt_label, ref opt_val) => {
tokens.append("break"); tokens.append("break");
opt_label.to_tokens(tokens); opt_label.to_tokens(tokens);
opt_val.to_tokens(tokens);
} }
ExprKind::Continue(ref opt_label) => { ExprKind::Continue(ref opt_label) => {
tokens.append("continue"); tokens.append("continue");
@ -1465,7 +1447,7 @@ mod printing {
tokens.append("=>"); tokens.append("=>");
self.body.to_tokens(tokens); self.body.to_tokens(tokens);
match self.body.node { match self.body.node {
ExprKind::Block(BlockCheckMode::Default, _) => { ExprKind::Block(Unsafety::Normal, _) => {
// no comma // no comma
} }
_ => tokens.append(","), _ => tokens.append(","),
@ -1646,17 +1628,6 @@ mod printing {
} }
} }
impl ToTokens for BlockCheckMode {
fn to_tokens(&self, tokens: &mut Tokens) {
match *self {
BlockCheckMode::Default => {
// nothing
}
BlockCheckMode::Unsafe => tokens.append("unsafe"),
}
}
}
impl ToTokens for Stmt { impl ToTokens for Stmt {
fn to_tokens(&self, tokens: &mut Tokens) { fn to_tokens(&self, tokens: &mut Tokens) {
match *self { match *self {

937
third_party/rust/syn/src/fold.rs vendored Normal file
View File

@ -0,0 +1,937 @@
// Adapted from libsyntax.
//! A Folder represents an AST->AST fold; it accepts an AST piece,
//! and returns a piece of the same type.
use super::*;
#[cfg(not(feature = "full"))]
use constant;
/// AST->AST fold.
///
/// Each method of the Folder trait is a hook to be potentially overridden. Each
/// method's default implementation recursively visits the substructure of the
/// input via the `noop_fold` methods, which perform an "identity fold", that
/// is, they return the same structure that they are given (for example the
/// `fold_crate` method by default calls `fold::noop_fold_crate`).
///
/// If you want to ensure that your code handles every variant explicitly, you
/// need to override each method and monitor future changes to `Folder` in case
/// a new method with a new default implementation gets introduced.
pub trait Folder {
// Any additions to this trait should happen in form
// of a call to a public `noop_*` function that only calls
// out to the folder again, not other `noop_*` functions.
//
// This is a necessary API workaround to the problem of not
// being able to call out to the super default method
// in an overridden default method.
fn fold_ident(&mut self, _ident: Ident) -> Ident {
noop_fold_ident(self, _ident)
}
fn fold_derive_input(&mut self, derive_input: DeriveInput) -> DeriveInput {
noop_fold_derive_input(self, derive_input)
}
fn fold_ty(&mut self, ty: Ty) -> Ty {
noop_fold_ty(self, ty)
}
fn fold_generics(&mut self, generics: Generics) -> Generics {
noop_fold_generics(self, generics)
}
fn fold_ty_param_bound(&mut self, bound: TyParamBound) -> TyParamBound {
noop_fold_ty_param_bound(self, bound)
}
fn fold_poly_trait_ref(&mut self, trait_ref: PolyTraitRef) -> PolyTraitRef {
noop_fold_poly_trait_ref(self, trait_ref)
}
fn fold_variant_data(&mut self, data: VariantData) -> VariantData {
noop_fold_variant_data(self, data)
}
fn fold_field(&mut self, field: Field) -> Field {
noop_fold_field(self, field)
}
fn fold_variant(&mut self, variant: Variant) -> Variant {
noop_fold_variant(self, variant)
}
fn fold_lifetime(&mut self, _lifetime: Lifetime) -> Lifetime {
noop_fold_lifetime(self, _lifetime)
}
fn fold_lifetime_def(&mut self, lifetime: LifetimeDef) -> LifetimeDef {
noop_fold_lifetime_def(self, lifetime)
}
fn fold_path(&mut self, path: Path) -> Path {
noop_fold_path(self, path)
}
fn fold_path_segment(&mut self, path_segment: PathSegment) -> PathSegment {
noop_fold_path_segment(self, path_segment)
}
fn fold_path_parameters(&mut self, path_parameters: PathParameters) -> PathParameters {
noop_fold_path_parameters(self, path_parameters)
}
fn fold_assoc_type_binding(&mut self, type_binding: TypeBinding) -> TypeBinding {
noop_fold_assoc_type_binding(self, type_binding)
}
fn fold_attribute(&mut self, _attr: Attribute) -> Attribute {
noop_fold_attribute(self, _attr)
}
fn fold_fn_ret_ty(&mut self, ret_ty: FunctionRetTy) -> FunctionRetTy {
noop_fold_fn_ret_ty(self, ret_ty)
}
fn fold_const_expr(&mut self, expr: ConstExpr) -> ConstExpr {
noop_fold_const_expr(self, expr)
}
fn fold_lit(&mut self, _lit: Lit) -> Lit {
noop_fold_lit(self, _lit)
}
fn fold_mac(&mut self, mac: Mac) -> Mac {
noop_fold_mac(self, mac)
}
#[cfg(feature = "full")]
fn fold_crate(&mut self, _crate: Crate) -> Crate {
noop_fold_crate(self, _crate)
}
#[cfg(feature = "full")]
fn fold_item(&mut self, item: Item) -> Item {
noop_fold_item(self, item)
}
#[cfg(feature = "full")]
fn fold_expr(&mut self, expr: Expr) -> Expr {
noop_fold_expr(self, expr)
}
#[cfg(feature = "full")]
fn fold_foreign_item(&mut self, foreign_item: ForeignItem) -> ForeignItem {
noop_fold_foreign_item(self, foreign_item)
}
#[cfg(feature = "full")]
fn fold_pat(&mut self, pat: Pat) -> Pat {
noop_fold_pat(self, pat)
}
#[cfg(feature = "full")]
fn fold_fn_decl(&mut self, fn_decl: FnDecl) -> FnDecl {
noop_fold_fn_decl(self, fn_decl)
}
#[cfg(feature = "full")]
fn fold_trait_item(&mut self, trait_item: TraitItem) -> TraitItem {
noop_fold_trait_item(self, trait_item)
}
#[cfg(feature = "full")]
fn fold_impl_item(&mut self, impl_item: ImplItem) -> ImplItem {
noop_fold_impl_item(self, impl_item)
}
#[cfg(feature = "full")]
fn fold_method_sig(&mut self, method_sig: MethodSig) -> MethodSig {
noop_fold_method_sig(self, method_sig)
}
#[cfg(feature = "full")]
fn fold_stmt(&mut self, stmt: Stmt) -> Stmt {
noop_fold_stmt(self, stmt)
}
#[cfg(feature = "full")]
fn fold_block(&mut self, block: Block) -> Block {
noop_fold_block(self, block)
}
#[cfg(feature = "full")]
fn fold_local(&mut self, local: Local) -> Local {
noop_fold_local(self, local)
}
#[cfg(feature = "full")]
fn fold_view_path(&mut self, view_path: ViewPath) -> ViewPath {
noop_fold_view_path(self, view_path)
}
}
trait LiftOnce<T, U> {
type Output;
fn lift<F>(self, f: F) -> Self::Output where F: FnOnce(T) -> U;
}
impl<T, U> LiftOnce<T, U> for Box<T> {
type Output = Box<U>;
// Clippy false positive
// https://github.com/Manishearth/rust-clippy/issues/1478
#[cfg_attr(feature = "cargo-clippy", allow(boxed_local))]
fn lift<F>(self, f: F) -> Box<U>
where F: FnOnce(T) -> U
{
Box::new(f(*self))
}
}
trait LiftMut<T, U> {
type Output;
fn lift<F>(self, f: F) -> Self::Output where F: FnMut(T) -> U;
}
impl<T, U> LiftMut<T, U> for Vec<T> {
type Output = Vec<U>;
fn lift<F>(self, f: F) -> Vec<U>
where F: FnMut(T) -> U
{
self.into_iter().map(f).collect()
}
}
pub fn noop_fold_ident<F: ?Sized + Folder>(_: &mut F, _ident: Ident) -> Ident {
_ident
}
pub fn noop_fold_derive_input<F: ?Sized + Folder>(folder: &mut F,
DeriveInput{ ident,
vis,
attrs,
generics,
body }: DeriveInput) -> DeriveInput {
use Body::*;
DeriveInput {
ident: folder.fold_ident(ident),
vis: noop_fold_vis(folder, vis),
attrs: attrs.lift(|a| folder.fold_attribute(a)),
generics: folder.fold_generics(generics),
body: match body {
Enum(variants) => Enum(variants.lift(move |v| folder.fold_variant(v))),
Struct(variant_data) => Struct(folder.fold_variant_data(variant_data)),
},
}
}
pub fn noop_fold_ty<F: ?Sized + Folder>(folder: &mut F, ty: Ty) -> Ty {
use Ty::*;
match ty {
Slice(inner) => Slice(inner.lift(|v| folder.fold_ty(v))),
Paren(inner) => Paren(inner.lift(|v| folder.fold_ty(v))),
Ptr(mutable_type) => {
let mutable_type_ = *mutable_type;
let MutTy { ty, mutability }: MutTy = mutable_type_;
Ptr(Box::new(MutTy {
ty: folder.fold_ty(ty),
mutability: mutability,
}))
}
Rptr(opt_lifetime, mutable_type) => {
let mutable_type_ = *mutable_type;
let MutTy { ty, mutability }: MutTy = mutable_type_;
Rptr(opt_lifetime.map(|l| folder.fold_lifetime(l)),
Box::new(MutTy {
ty: folder.fold_ty(ty),
mutability: mutability,
}))
}
Never => Never,
Infer => Infer,
Tup(tuple_element_types) => Tup(tuple_element_types.lift(|x| folder.fold_ty(x))),
BareFn(bare_fn) => {
let bf_ = *bare_fn;
let BareFnTy { unsafety, abi, lifetimes, inputs, output, variadic } = bf_;
BareFn(Box::new(BareFnTy {
unsafety: unsafety,
abi: abi,
lifetimes: lifetimes.lift(|l| folder.fold_lifetime_def(l)),
inputs: inputs.lift(|v| {
BareFnArg {
name: v.name.map(|n| folder.fold_ident(n)),
ty: folder.fold_ty(v.ty),
}
}),
output: folder.fold_fn_ret_ty(output),
variadic: variadic,
}))
}
Path(maybe_qself, path) => {
Path(maybe_qself.map(|v| noop_fold_qself(folder, v)),
folder.fold_path(path))
}
Array(inner, len) => {
Array({
inner.lift(|v| folder.fold_ty(v))
},
folder.fold_const_expr(len))
}
TraitObject(bounds) => TraitObject(bounds.lift(|v| folder.fold_ty_param_bound(v))),
ImplTrait(bounds) => ImplTrait(bounds.lift(|v| folder.fold_ty_param_bound(v))),
Mac(mac) => Mac(folder.fold_mac(mac)),
}
}
fn noop_fold_qself<F: ?Sized + Folder>(folder: &mut F, QSelf { ty, position }: QSelf) -> QSelf {
QSelf {
ty: Box::new(folder.fold_ty(*(ty))),
position: position,
}
}
pub fn noop_fold_generics<F: ?Sized + Folder>(folder: &mut F,
Generics { lifetimes, ty_params, where_clause }: Generics)
-> Generics {
use WherePredicate::*;
Generics {
lifetimes: lifetimes.lift(|l| folder.fold_lifetime_def(l)),
ty_params: ty_params.lift(|ty| {
TyParam {
attrs: ty.attrs.lift(|a| folder.fold_attribute(a)),
ident: folder.fold_ident(ty.ident),
bounds: ty.bounds
.lift(|ty_pb| folder.fold_ty_param_bound(ty_pb)),
default: ty.default.map(|v| folder.fold_ty(v)),
}
}),
where_clause: WhereClause {
predicates: where_clause.predicates
.lift(|p| match p {
BoundPredicate(bound_predicate) => {
BoundPredicate(WhereBoundPredicate {
bound_lifetimes: bound_predicate.bound_lifetimes
.lift(|l| folder.fold_lifetime_def(l)),
bounded_ty: folder.fold_ty(bound_predicate.bounded_ty),
bounds: bound_predicate.bounds
.lift(|ty_pb| folder.fold_ty_param_bound(ty_pb)),
})
}
RegionPredicate(region_predicate) => {
RegionPredicate(WhereRegionPredicate {
lifetime: folder.fold_lifetime(region_predicate.lifetime),
bounds: region_predicate.bounds
.lift(|b| folder.fold_lifetime(b)),
})
}
EqPredicate(eq_predicate) => {
EqPredicate(WhereEqPredicate {
lhs_ty: folder.fold_ty(eq_predicate.lhs_ty),
rhs_ty: folder.fold_ty(eq_predicate.rhs_ty),
})
}
}),
},
}
}
pub fn noop_fold_ty_param_bound<F: ?Sized + Folder>(folder: &mut F,
bound: TyParamBound)
-> TyParamBound {
use TyParamBound::*;
match bound {
Trait(ty, modifier) => Trait(folder.fold_poly_trait_ref(ty), modifier),
Region(lifetime) => Region(folder.fold_lifetime(lifetime)),
}
}
pub fn noop_fold_poly_trait_ref<F: ?Sized + Folder>(folder: &mut F,
trait_ref: PolyTraitRef)
-> PolyTraitRef {
PolyTraitRef {
bound_lifetimes: trait_ref.bound_lifetimes
.lift(|bl| folder.fold_lifetime_def(bl)),
trait_ref: folder.fold_path(trait_ref.trait_ref),
}
}
pub fn noop_fold_variant_data<F: ?Sized + Folder>(folder: &mut F,
data: VariantData)
-> VariantData {
use VariantData::*;
match data {
Struct(fields) => Struct(fields.lift(|f| folder.fold_field(f))),
Tuple(fields) => Tuple(fields.lift(|f| folder.fold_field(f))),
Unit => Unit,
}
}
pub fn noop_fold_field<F: ?Sized + Folder>(folder: &mut F, field: Field) -> Field {
Field {
ident: field.ident.map(|i| folder.fold_ident(i)),
vis: noop_fold_vis(folder, field.vis),
attrs: field.attrs.lift(|a| folder.fold_attribute(a)),
ty: folder.fold_ty(field.ty),
}
}
pub fn noop_fold_variant<F: ?Sized + Folder>(folder: &mut F,
Variant { ident, attrs, data, discriminant }: Variant)
-> Variant {
Variant {
ident: folder.fold_ident(ident),
attrs: attrs.lift(|v| folder.fold_attribute(v)),
data: folder.fold_variant_data(data),
discriminant: discriminant.map(|ce| folder.fold_const_expr(ce)),
}
}
pub fn noop_fold_lifetime<F: ?Sized + Folder>(folder: &mut F, _lifetime: Lifetime) -> Lifetime {
Lifetime { ident: folder.fold_ident(_lifetime.ident) }
}
pub fn noop_fold_lifetime_def<F: ?Sized + Folder>(folder: &mut F,
LifetimeDef { attrs, lifetime, bounds }: LifetimeDef)
-> LifetimeDef {
LifetimeDef {
attrs: attrs.lift(|x| folder.fold_attribute(x)),
lifetime: folder.fold_lifetime(lifetime),
bounds: bounds.lift(|l| folder.fold_lifetime(l)),
}
}
pub fn noop_fold_path<F: ?Sized + Folder>(folder: &mut F, Path { global, segments }: Path) -> Path {
Path {
global: global,
segments: segments.lift(|s| folder.fold_path_segment(s)),
}
}
pub fn noop_fold_path_segment<F: ?Sized + Folder>(folder: &mut F,
PathSegment { ident, parameters }: PathSegment)
-> PathSegment {
PathSegment {
ident: folder.fold_ident(ident),
parameters: folder.fold_path_parameters(parameters),
}
}
pub fn noop_fold_path_parameters<F: ?Sized + Folder>(folder: &mut F,
path_parameters: PathParameters)
-> PathParameters {
use PathParameters::*;
match path_parameters {
AngleBracketed(d) => {
let AngleBracketedParameterData { lifetimes, types, bindings } = d;
AngleBracketed(AngleBracketedParameterData {
lifetimes: lifetimes.into_iter().map(|l| folder.fold_lifetime(l)).collect(),
types: types.lift(|ty| folder.fold_ty(ty)),
bindings: bindings.lift(|tb| folder.fold_assoc_type_binding(tb)),
})
}
Parenthesized(d) => {
let ParenthesizedParameterData { inputs, output } = d;
Parenthesized(ParenthesizedParameterData {
inputs: inputs.lift(|i| folder.fold_ty(i)),
output: output.map(|v| folder.fold_ty(v)),
})
}
}
}
pub fn noop_fold_assoc_type_binding<F: ?Sized + Folder>(folder: &mut F,
TypeBinding { ident, ty }: TypeBinding)
-> TypeBinding {
TypeBinding {
ident: folder.fold_ident(ident),
ty: folder.fold_ty(ty),
}
}
pub fn noop_fold_attribute<F: ?Sized + Folder>(_: &mut F, _attr: Attribute) -> Attribute {
_attr
}
pub fn noop_fold_fn_ret_ty<F: ?Sized + Folder>(folder: &mut F,
ret_ty: FunctionRetTy)
-> FunctionRetTy {
use FunctionRetTy::*;
match ret_ty {
Default => Default,
Ty(ty) => Ty(folder.fold_ty(ty)),
}
}
pub fn noop_fold_const_expr<F: ?Sized + Folder>(folder: &mut F, expr: ConstExpr) -> ConstExpr {
use ConstExpr::*;
match expr {
Call(f, args) => {
Call(f.lift(|e| folder.fold_const_expr(e)),
args.lift(|v| folder.fold_const_expr(v)))
}
Binary(op, lhs, rhs) => {
Binary(op,
lhs.lift(|e| folder.fold_const_expr(e)),
rhs.lift(|e| folder.fold_const_expr(e)))
}
Unary(op, e) => Unary(op, e.lift(|e| folder.fold_const_expr(e))),
Lit(l) => Lit(folder.fold_lit(l)),
Cast(e, ty) => {
Cast(e.lift(|e| folder.fold_const_expr(e)),
ty.lift(|v| folder.fold_ty(v)))
}
Path(p) => Path(folder.fold_path(p)),
Index(o, i) => {
Index(o.lift(|e| folder.fold_const_expr(e)),
i.lift(|e| folder.fold_const_expr(e)))
}
Paren(no_op) => Paren(no_op.lift(|e| folder.fold_const_expr(e))),
Other(e) => Other(noop_fold_other_const_expr(folder, e)),
}
}
#[cfg(feature = "full")]
fn noop_fold_other_const_expr<F: ?Sized + Folder>(folder: &mut F, e: Expr) -> Expr {
folder.fold_expr(e)
}
#[cfg(not(feature = "full"))]
fn noop_fold_other_const_expr<F: ?Sized + Folder>(_: &mut F, e: constant::Other) -> constant::Other {
e
}
pub fn noop_fold_lit<F: ?Sized + Folder>(_: &mut F, _lit: Lit) -> Lit {
_lit
}
pub fn noop_fold_tt<F: ?Sized + Folder>(folder: &mut F, tt: TokenTree) -> TokenTree {
use TokenTree::*;
use Token::*;
match tt {
Token(token) => {
Token(match token {
Literal(lit) => Literal(folder.fold_lit(lit)),
Ident(ident) => Ident(folder.fold_ident(ident)),
Lifetime(ident) => Lifetime(folder.fold_ident(ident)),
x => x,
})
}
Delimited(super::Delimited { delim, tts }) => {
Delimited(super::Delimited {
delim: delim,
tts: tts.lift(|v| noop_fold_tt(folder, v)),
})
}
}
}
pub fn noop_fold_mac<F: ?Sized + Folder>(folder: &mut F, Mac { path, tts }: Mac) -> Mac {
Mac {
path: folder.fold_path(path),
tts: tts.lift(|tt| noop_fold_tt(folder, tt)),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_crate<F: ?Sized + Folder>(folder: &mut F,
Crate { shebang, attrs, items }: Crate)
-> Crate {
Crate {
shebang: shebang,
attrs: attrs.lift(|a| folder.fold_attribute(a)),
items: items.lift(|i| folder.fold_item(i)),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_block<F: ?Sized + Folder>(folder: &mut F, block: Block) -> Block {
Block { stmts: block.stmts.lift(|s| folder.fold_stmt(s)) }
}
fn noop_fold_vis<F: ?Sized + Folder>(folder: &mut F, vis: Visibility) -> Visibility {
use Visibility::*;
match vis {
Crate => Crate,
Inherited => Inherited,
Public => Public,
Restricted(path) => Restricted(path.lift(|p| folder.fold_path(p))),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_item<F: ?Sized + Folder>(folder: &mut F,
Item { ident, vis, attrs, node }: Item)
-> Item {
use ItemKind::*;
Item {
ident: folder.fold_ident(ident.clone()),
vis: noop_fold_vis(folder, vis),
attrs: attrs.lift(|a| folder.fold_attribute(a)),
node: match node {
ExternCrate(name) => ExternCrate(name.map(|i| folder.fold_ident(i))),
Use(view_path) => Use(Box::new(folder.fold_view_path(*view_path))),
Static(ty, mutability, expr) => {
Static(Box::new(folder.fold_ty(*ty)),
mutability,
expr.lift(|e| folder.fold_expr(e)))
}
Const(ty, expr) => {
Const(ty.lift(|ty| folder.fold_ty(ty)),
expr.lift(|e| folder.fold_expr(e)))
}
Fn(fn_decl, unsafety, constness, abi, generics, block) => {
Fn(fn_decl.lift(|v| folder.fold_fn_decl(v)),
unsafety,
constness,
abi,
folder.fold_generics(generics),
block.lift(|v| folder.fold_block(v)))
}
Mod(items) => Mod(items.map(|items| items.lift(|i| folder.fold_item(i)))),
ForeignMod(super::ForeignMod { abi, items }) => {
ForeignMod(super::ForeignMod {
abi: abi,
items: items.lift(|foreign_item| folder.fold_foreign_item(foreign_item)),
})
}
Ty(ty, generics) => {
Ty(ty.lift(|ty| folder.fold_ty(ty)),
folder.fold_generics(generics))
}
Enum(variants, generics) => {
Enum(variants.lift(|v| folder.fold_variant(v)),
folder.fold_generics(generics))
}
Struct(variant_data, generics) => {
Struct(folder.fold_variant_data(variant_data),
folder.fold_generics(generics))
}
Union(variant_data, generics) => {
Union(folder.fold_variant_data(variant_data),
folder.fold_generics(generics))
}
Trait(unsafety, generics, typbs, trait_items) => {
Trait(unsafety,
folder.fold_generics(generics),
typbs.lift(|typb| folder.fold_ty_param_bound(typb)),
trait_items.lift(|ti| folder.fold_trait_item(ti)))
}
DefaultImpl(unsafety, path) => DefaultImpl(unsafety, folder.fold_path(path)),
Impl(unsafety, impl_polarity, generics, path, ty, impl_items) => {
Impl(unsafety,
impl_polarity,
folder.fold_generics(generics),
path.map(|p| folder.fold_path(p)),
ty.lift(|ty| folder.fold_ty(ty)),
impl_items.lift(|i| folder.fold_impl_item(i)))
}
Mac(mac) => Mac(folder.fold_mac(mac)),
},
}
}
#[cfg(feature = "full")]
pub fn noop_fold_expr<F: ?Sized + Folder>(folder: &mut F, Expr { node, attrs }: Expr) -> Expr {
use ExprKind::*;
Expr {
node: match node {
ExprKind::Box(e) => ExprKind::Box(e.lift(|e| folder.fold_expr(e))),
InPlace(place, value) => {
InPlace(place.lift(|e| folder.fold_expr(e)),
value.lift(|e| folder.fold_expr(e)))
}
Array(array) => Array(array.lift(|e| folder.fold_expr(e))),
Call(function, args) => {
Call(function.lift(|e| folder.fold_expr(e)),
args.lift(|e| folder.fold_expr(e)))
}
MethodCall(method, tys, args) => {
MethodCall(folder.fold_ident(method),
tys.lift(|t| folder.fold_ty(t)),
args.lift(|e| folder.fold_expr(e)))
}
Tup(args) => Tup(args.lift(|e| folder.fold_expr(e))),
Binary(bop, lhs, rhs) => {
Binary(bop,
lhs.lift(|e| folder.fold_expr(e)),
rhs.lift(|e| folder.fold_expr(e)))
}
Unary(uop, e) => Unary(uop, e.lift(|e| folder.fold_expr(e))),
Lit(lit) => Lit(folder.fold_lit(lit)),
Cast(e, ty) => {
Cast(e.lift(|e| folder.fold_expr(e)),
ty.lift(|t| folder.fold_ty(t)))
}
Type(e, ty) => {
Type(e.lift(|e| folder.fold_expr(e)),
ty.lift(|t| folder.fold_ty(t)))
}
If(e, if_block, else_block) => {
If(e.lift(|e| folder.fold_expr(e)),
folder.fold_block(if_block),
else_block.map(|v| v.lift(|e| folder.fold_expr(e))))
}
IfLet(pat, expr, block, else_block) => {
IfLet(pat.lift(|p| folder.fold_pat(p)),
expr.lift(|e| folder.fold_expr(e)),
folder.fold_block(block),
else_block.map(|v| v.lift(|e| folder.fold_expr(e))))
}
While(e, block, label) => {
While(e.lift(|e| folder.fold_expr(e)),
folder.fold_block(block),
label.map(|i| folder.fold_ident(i)))
}
WhileLet(pat, expr, block, label) => {
WhileLet(pat.lift(|p| folder.fold_pat(p)),
expr.lift(|e| folder.fold_expr(e)),
folder.fold_block(block),
label.map(|i| folder.fold_ident(i)))
}
ForLoop(pat, expr, block, label) => {
ForLoop(pat.lift(|p| folder.fold_pat(p)),
expr.lift(|e| folder.fold_expr(e)),
folder.fold_block(block),
label.map(|i| folder.fold_ident(i)))
}
Loop(block, label) => {
Loop(folder.fold_block(block),
label.map(|i| folder.fold_ident(i)))
}
Match(e, arms) => {
Match(e.lift(|e| folder.fold_expr(e)),
arms.lift(|Arm { attrs, pats, guard, body }: Arm| {
Arm {
attrs: attrs.lift(|a| folder.fold_attribute(a)),
pats: pats.lift(|p| folder.fold_pat(p)),
guard: guard.map(|v| v.lift(|e| folder.fold_expr(e))),
body: body.lift(|e| folder.fold_expr(e)),
}
}))
}
Closure(capture_by, fn_decl, expr) => {
Closure(capture_by,
fn_decl.lift(|v| folder.fold_fn_decl(v)),
expr.lift(|e| folder.fold_expr(e)))
}
Block(unsafety, block) => Block(unsafety, folder.fold_block(block)),
Assign(lhs, rhs) => {
Assign(lhs.lift(|e| folder.fold_expr(e)),
rhs.lift(|e| folder.fold_expr(e)))
}
AssignOp(bop, lhs, rhs) => {
AssignOp(bop,
lhs.lift(|e| folder.fold_expr(e)),
rhs.lift(|e| folder.fold_expr(e)))
}
Field(expr, name) => Field(expr.lift(|e| folder.fold_expr(e)), folder.fold_ident(name)),
TupField(expr, index) => TupField(expr.lift(|e| folder.fold_expr(e)), index),
Index(expr, index) => {
Index(expr.lift(|e| folder.fold_expr(e)),
index.lift(|e| folder.fold_expr(e)))
}
Range(lhs, rhs, limits) => {
Range(lhs.map(|v| v.lift(|e| folder.fold_expr(e))),
rhs.map(|v| v.lift(|e| folder.fold_expr(e))),
limits)
}
Path(qself, path) => {
Path(qself.map(|v| noop_fold_qself(folder, v)),
folder.fold_path(path))
}
AddrOf(mutability, expr) => AddrOf(mutability, expr.lift(|e| folder.fold_expr(e))),
Break(label, expr) => {
Break(label.map(|i| folder.fold_ident(i)),
expr.map(|v| v.lift(|e| folder.fold_expr(e))))
}
Continue(label) => Continue(label.map(|i| folder.fold_ident(i))),
Ret(expr) => Ret(expr.map(|v| v.lift(|e| folder.fold_expr(e)))),
ExprKind::Mac(mac) => ExprKind::Mac(folder.fold_mac(mac)),
Struct(path, fields, expr) => {
Struct(folder.fold_path(path),
fields.lift(|FieldValue { ident, expr, is_shorthand, attrs }: FieldValue| {
FieldValue {
ident: folder.fold_ident(ident),
expr: folder.fold_expr(expr),
is_shorthand: is_shorthand,
attrs: attrs.lift(|v| folder.fold_attribute(v)),
}
}),
expr.map(|v| v.lift(|e| folder.fold_expr(e))))
}
Repeat(element, number) => {
Repeat(element.lift(|e| folder.fold_expr(e)),
number.lift(|e| folder.fold_expr(e)))
}
Paren(expr) => Paren(expr.lift(|e| folder.fold_expr(e))),
Try(expr) => Try(expr.lift(|e| folder.fold_expr(e))),
},
attrs: attrs.into_iter().map(|a| folder.fold_attribute(a)).collect(),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_foreign_item<F: ?Sized + Folder>(folder: &mut F,
ForeignItem { ident, attrs, node, vis }: ForeignItem)
-> ForeignItem {
ForeignItem {
ident: folder.fold_ident(ident),
attrs: attrs.into_iter().map(|a| folder.fold_attribute(a)).collect(),
node: match node {
ForeignItemKind::Fn(fn_dcl, generics) => {
ForeignItemKind::Fn(fn_dcl.lift(|v| folder.fold_fn_decl(v)),
folder.fold_generics(generics))
}
ForeignItemKind::Static(ty, mutability) => {
ForeignItemKind::Static(ty.lift(|v| folder.fold_ty(v)), mutability)
}
},
vis: noop_fold_vis(folder, vis),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_pat<F: ?Sized + Folder>(folder: &mut F, pat: Pat) -> Pat {
use Pat::*;
match pat {
Wild => Wild,
Ident(binding_mode, ident, pat) => {
Ident(binding_mode,
folder.fold_ident(ident),
pat.map(|p| p.lift(|p| folder.fold_pat(p))))
}
Struct(path, field_patterns, dots) => {
Struct(folder.fold_path(path),
field_patterns.lift(|FieldPat { ident, pat, is_shorthand, attrs }: FieldPat| {
FieldPat {
ident: folder.fold_ident(ident),
pat: pat.lift(|p| folder.fold_pat(p)),
is_shorthand: is_shorthand,
attrs: attrs.lift(|a| folder.fold_attribute(a)),
}
}),
dots)
}
TupleStruct(path, pats, len) => {
TupleStruct(folder.fold_path(path),
pats.lift(|p| folder.fold_pat(p)),
len)
}
Path(qself, path) => {
Path(qself.map(|v| noop_fold_qself(folder, v)),
folder.fold_path(path))
}
Tuple(pats, len) => Tuple(pats.lift(|p| folder.fold_pat(p)), len),
Box(b) => Box(b.lift(|p| folder.fold_pat(p))),
Ref(b, mutability) => Ref(b.lift(|p| folder.fold_pat(p)), mutability),
Lit(expr) => Lit(expr.lift(|e| folder.fold_expr(e))),
Range(l, r) => {
Range(l.lift(|e| folder.fold_expr(e)),
r.lift(|e| folder.fold_expr(e)))
}
Slice(lefts, pat, rights) => {
Slice(lefts.lift(|p| folder.fold_pat(p)),
pat.map(|v| v.lift(|p| folder.fold_pat(p))),
rights.lift(|p| folder.fold_pat(p)))
}
Mac(mac) => Mac(folder.fold_mac(mac)),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_fn_decl<F: ?Sized + Folder>(folder: &mut F,
FnDecl { inputs, output, variadic }: FnDecl)
-> FnDecl {
FnDecl {
inputs: inputs.lift(|a| {
use FnArg::*;
match a {
SelfRef(lifetime, mutability) => {
SelfRef(lifetime.map(|v| folder.fold_lifetime(v)), mutability)
}
SelfValue(mutability) => SelfValue(mutability),
Captured(pat, ty) => Captured(folder.fold_pat(pat), folder.fold_ty(ty)),
Ignored(ty) => Ignored(folder.fold_ty(ty)),
}
}),
output: folder.fold_fn_ret_ty(output),
variadic: variadic,
}
}
#[cfg(feature = "full")]
pub fn noop_fold_trait_item<F: ?Sized + Folder>(folder: &mut F,
TraitItem { ident, attrs, node }: TraitItem)
-> TraitItem {
use TraitItemKind::*;
TraitItem {
ident: folder.fold_ident(ident),
attrs: attrs.lift(|v| folder.fold_attribute(v)),
node: match node {
Const(ty, expr) => Const(folder.fold_ty(ty), expr.map(|v| folder.fold_expr(v))),
Method(sig, block) => {
Method(folder.fold_method_sig(sig),
block.map(|v| folder.fold_block(v)))
}
Type(ty_pbs, ty) => {
Type(ty_pbs.lift(|v| folder.fold_ty_param_bound(v)),
ty.map(|v| folder.fold_ty(v)))
}
Macro(mac) => Macro(folder.fold_mac(mac)),
},
}
}
#[cfg(feature = "full")]
pub fn noop_fold_impl_item<F: ?Sized + Folder>(folder: &mut F,
ImplItem { ident, vis, defaultness, attrs, node }: ImplItem)
-> ImplItem {
use ImplItemKind::*;
ImplItem {
ident: folder.fold_ident(ident),
vis: noop_fold_vis(folder, vis),
defaultness: defaultness,
attrs: attrs.lift(|v| folder.fold_attribute(v)),
node: match node {
Const(ty, expr) => Const(folder.fold_ty(ty), folder.fold_expr(expr)),
Method(sig, block) => Method(folder.fold_method_sig(sig), folder.fold_block(block)),
Type(ty) => Type(folder.fold_ty(ty)),
Macro(mac) => Macro(folder.fold_mac(mac)),
},
}
}
#[cfg(feature = "full")]
pub fn noop_fold_method_sig<F: ?Sized + Folder>(folder: &mut F, MethodSig{unsafety, constness, abi, decl, generics}:MethodSig) -> MethodSig {
MethodSig {
unsafety: unsafety,
constness: constness,
abi: abi,
decl: folder.fold_fn_decl(decl),
generics: folder.fold_generics(generics),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_stmt<F: ?Sized + Folder>(folder: &mut F, stmt: Stmt) -> Stmt {
use Stmt::*;
match stmt {
Local(local) => Local(local.lift(|l| folder.fold_local(l))),
Item(item) => Item(item.lift(|v| folder.fold_item(v))),
Expr(expr) => Expr(expr.lift(|v| folder.fold_expr(v))),
Semi(expr) => Semi(expr.lift(|v| folder.fold_expr(v))),
Mac(mac_stmt) => {
Mac(mac_stmt.lift(|(mac, style, attrs)| {
(folder.fold_mac(mac), style, attrs.lift(|a| folder.fold_attribute(a)))
}))
}
}
}
#[cfg(feature = "full")]
pub fn noop_fold_local<F: ?Sized + Folder>(folder: &mut F,
Local { pat, ty, init, attrs }: Local)
-> Local {
Local {
pat: pat.lift(|v| folder.fold_pat(v)),
ty: ty.map(|v| v.lift(|t| folder.fold_ty(t))),
init: init.map(|v| v.lift(|e| folder.fold_expr(e))),
attrs: attrs.lift(|a| folder.fold_attribute(a)),
}
}
#[cfg(feature = "full")]
pub fn noop_fold_view_path<F: ?Sized + Folder>(folder: &mut F, view_path: ViewPath) -> ViewPath {
use ViewPath::*;
match view_path {
Simple(path, ident) => Simple(folder.fold_path(path), ident.map(|i| folder.fold_ident(i))),
Glob(path) => Glob(folder.fold_path(path)),
List(path, items) => {
List(folder.fold_path(path),
items.lift(|PathListItem { name, rename }: PathListItem| {
PathListItem {
name: folder.fold_ident(name),
rename: rename.map(|i| folder.fold_ident(i)),
}
}))
}
}
}

View File

@ -19,8 +19,13 @@ pub struct ImplGenerics<'a>(&'a Generics);
#[derive(Debug)] #[derive(Debug)]
pub struct TyGenerics<'a>(&'a Generics); pub struct TyGenerics<'a>(&'a Generics);
impl Generics {
#[cfg(feature = "printing")] #[cfg(feature = "printing")]
/// Returned by `TyGenerics::as_turbofish`.
#[derive(Debug)]
pub struct Turbofish<'a>(&'a Generics);
#[cfg(feature = "printing")]
impl Generics {
/// Split a type's generics into the pieces required for impl'ing a trait /// Split a type's generics into the pieces required for impl'ing a trait
/// for that type. /// for that type.
/// ///
@ -45,6 +50,14 @@ impl Generics {
} }
} }
#[cfg(feature = "printing")]
impl<'a> TyGenerics<'a> {
/// Turn a type's generics like `<X, Y>` into a turbofish like `::<X, Y>`.
pub fn as_turbofish(&self) -> Turbofish {
Turbofish(self.0)
}
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Lifetime { pub struct Lifetime {
pub ident: Ident, pub ident: Ident,
@ -125,6 +138,8 @@ pub enum WherePredicate {
BoundPredicate(WhereBoundPredicate), BoundPredicate(WhereBoundPredicate),
/// A lifetime predicate, e.g. `'a: 'b+'c` /// A lifetime predicate, e.g. `'a: 'b+'c`
RegionPredicate(WhereRegionPredicate), RegionPredicate(WhereRegionPredicate),
/// An equality predicate (unsupported)
EqPredicate(WhereEqPredicate),
} }
/// A type bound. /// A type bound.
@ -149,6 +164,15 @@ pub struct WhereRegionPredicate {
pub bounds: Vec<Lifetime>, pub bounds: Vec<Lifetime>,
} }
/// An equality predicate (unsupported).
///
/// E.g. `T=int`
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct WhereEqPredicate {
pub lhs_ty: Ty,
pub rhs_ty: Ty,
}
#[cfg(feature = "parsing")] #[cfg(feature = "parsing")]
pub mod parsing { pub mod parsing {
use super::*; use super::*;
@ -349,6 +373,17 @@ mod printing {
} }
} }
impl<'a> ToTokens for Turbofish<'a> {
fn to_tokens(&self, tokens: &mut Tokens) {
let has_lifetimes = !self.0.lifetimes.is_empty();
let has_ty_params = !self.0.ty_params.is_empty();
if has_lifetimes || has_ty_params {
tokens.append("::");
TyGenerics(self.0).to_tokens(tokens);
}
}
}
impl ToTokens for Lifetime { impl ToTokens for Lifetime {
fn to_tokens(&self, tokens: &mut Tokens) { fn to_tokens(&self, tokens: &mut Tokens) {
self.ident.to_tokens(tokens); self.ident.to_tokens(tokens);
@ -414,6 +449,9 @@ mod printing {
WherePredicate::RegionPredicate(ref predicate) => { WherePredicate::RegionPredicate(ref predicate) => {
predicate.to_tokens(tokens); predicate.to_tokens(tokens);
} }
WherePredicate::EqPredicate(ref predicate) => {
predicate.to_tokens(tokens);
}
} }
} }
} }
@ -443,4 +481,12 @@ mod printing {
} }
} }
} }
impl ToTokens for WhereEqPredicate {
fn to_tokens(&self, tokens: &mut Tokens) {
self.lhs_ty.to_tokens(tokens);
tokens.append("=");
self.rhs_ty.to_tokens(tokens);
}
}
} }

View File

@ -57,8 +57,8 @@ impl<T: ?Sized> PartialEq<T> for Ident
#[cfg(feature = "parsing")] #[cfg(feature = "parsing")]
pub mod parsing { pub mod parsing {
use super::*; use super::*;
use nom::IResult; use synom::IResult;
use space::skip_whitespace; use synom::space::skip_whitespace;
use unicode_xid::UnicodeXID; use unicode_xid::UnicodeXID;
pub fn ident(input: &str) -> IResult<&str, Ident> { pub fn ident(input: &str) -> IResult<&str, Ident> {
@ -89,7 +89,7 @@ pub mod parsing {
_ => return IResult::Error, _ => return IResult::Error,
} }
while let Some((i, ch)) = chars.next() { for (i, ch) in chars {
if !UnicodeXID::is_xid_continue(ch) { if !UnicodeXID::is_xid_continue(ch) {
return IResult::Done(&input[i..], input[..i].into()); return IResult::Done(&input[i..], input[..i].into());
} }

View File

@ -80,8 +80,8 @@ pub enum ItemKind {
Mac(Mac), Mac(Mac),
} }
impl From<MacroInput> for Item { impl From<DeriveInput> for Item {
fn from(input: MacroInput) -> Item { fn from(input: DeriveInput) -> Item {
Item { Item {
ident: input.ident, ident: input.ident,
vis: input.vis, vis: input.vis,
@ -242,8 +242,8 @@ pub mod parsing {
use generics::parsing::{generics, lifetime, ty_param_bound, where_clause}; use generics::parsing::{generics, lifetime, ty_param_bound, where_clause};
use ident::parsing::ident; use ident::parsing::ident;
use mac::parsing::delimited; use mac::parsing::delimited;
use macro_input::{Body, MacroInput}; use derive::{Body, DeriveInput};
use macro_input::parsing::macro_input; use derive::parsing::derive_input;
use ty::parsing::{abi, mutability, path, ty, unsafety}; use ty::parsing::{abi, mutability, path, ty, unsafety};
named!(pub item -> Item, alt!( named!(pub item -> Item, alt!(
@ -280,7 +280,7 @@ pub mod parsing {
named!(item_mac -> Item, do_parse!( named!(item_mac -> Item, do_parse!(
attrs: many0!(outer_attr) >> attrs: many0!(outer_attr) >>
path: ident >> what: path >>
punct!("!") >> punct!("!") >>
name: option!(ident) >> name: option!(ident) >>
body: delimited >> body: delimited >>
@ -293,7 +293,7 @@ pub mod parsing {
vis: Visibility::Inherited, vis: Visibility::Inherited,
attrs: attrs, attrs: attrs,
node: ItemKind::Mac(Mac { node: ItemKind::Mac(Mac {
path: path.into(), path: what,
tts: vec![TokenTree::Delimited(body)], tts: vec![TokenTree::Delimited(body)],
}), }),
}) })
@ -484,14 +484,14 @@ pub mod parsing {
lt: option!(lifetime) >> lt: option!(lifetime) >>
mutability: mutability >> mutability: mutability >>
keyword!("self") >> keyword!("self") >>
not!(peek!(punct!(":"))) >> not!(punct!(":")) >>
(FnArg::SelfRef(lt, mutability)) (FnArg::SelfRef(lt, mutability))
) )
| |
do_parse!( do_parse!(
mutability: mutability >> mutability: mutability >>
keyword!("self") >> keyword!("self") >>
not!(peek!(punct!(":"))) >> not!(punct!(":")) >>
(FnArg::SelfValue(mutability)) (FnArg::SelfValue(mutability))
) )
| |
@ -639,8 +639,8 @@ pub mod parsing {
)); ));
named!(item_struct_or_enum -> Item, map!( named!(item_struct_or_enum -> Item, map!(
macro_input, derive_input,
|def: MacroInput| Item { |def: DeriveInput| Item {
ident: def.ident, ident: def.ident,
vis: def.vis, vis: def.vis,
attrs: def.attrs, attrs: def.attrs,
@ -821,7 +821,7 @@ pub mod parsing {
named!(trait_item_mac -> TraitItem, do_parse!( named!(trait_item_mac -> TraitItem, do_parse!(
attrs: many0!(outer_attr) >> attrs: many0!(outer_attr) >>
id: ident >> what: path >>
punct!("!") >> punct!("!") >>
body: delimited >> body: delimited >>
cond!(match body.delim { cond!(match body.delim {
@ -829,10 +829,10 @@ pub mod parsing {
DelimToken::Brace => false, DelimToken::Brace => false,
}, punct!(";")) >> }, punct!(";")) >>
(TraitItem { (TraitItem {
ident: id.clone(), ident: Ident::new(""),
attrs: attrs, attrs: attrs,
node: TraitItemKind::Macro(Mac { node: TraitItemKind::Macro(Mac {
path: id.into(), path: what,
tts: vec![TokenTree::Delimited(body)], tts: vec![TokenTree::Delimited(body)],
}), }),
}) })
@ -848,7 +848,7 @@ pub mod parsing {
polarity: impl_polarity >> polarity: impl_polarity >>
path: path >> path: path >>
keyword!("for") >> keyword!("for") >>
((polarity, Some(path))) (polarity, Some(path))
) )
| |
epsilon!() => { |_| (ImplPolarity::Positive, None) } epsilon!() => { |_| (ImplPolarity::Positive, None) }
@ -976,7 +976,7 @@ pub mod parsing {
named!(impl_item_macro -> ImplItem, do_parse!( named!(impl_item_macro -> ImplItem, do_parse!(
attrs: many0!(outer_attr) >> attrs: many0!(outer_attr) >>
id: ident >> what: path >>
punct!("!") >> punct!("!") >>
body: delimited >> body: delimited >>
cond!(match body.delim { cond!(match body.delim {
@ -984,12 +984,12 @@ pub mod parsing {
DelimToken::Brace => false, DelimToken::Brace => false,
}, punct!(";")) >> }, punct!(";")) >>
(ImplItem { (ImplItem {
ident: id.clone(), ident: Ident::new(""),
vis: Visibility::Inherited, vis: Visibility::Inherited,
defaultness: Defaultness::Final, defaultness: Defaultness::Final,
attrs: attrs, attrs: attrs,
node: ImplItemKind::Macro(Mac { node: ImplItemKind::Macro(Mac {
path: id.into(), path: what,
tts: vec![TokenTree::Delimited(body)], tts: vec![TokenTree::Delimited(body)],
}), }),
}) })

View File

@ -29,7 +29,7 @@ pub mod parsing {
named!(shebang -> String, do_parse!( named!(shebang -> String, do_parse!(
tag!("#!") >> tag!("#!") >>
not!(peek!(tag!("["))) >> not!(tag!("[")) >>
content: take_until!("\n") >> content: take_until!("\n") >>
(format!("#!{}", content)) (format!("#!{}", content))
)); ));

View File

@ -1,23 +1,14 @@
#![cfg_attr(feature = "clippy", feature(plugin))] #![cfg_attr(feature = "cargo-clippy", allow(large_enum_variant))]
#![cfg_attr(feature = "clippy", plugin(clippy))]
#[cfg(feature = "printing")] #[cfg(feature = "printing")]
#[macro_use]
extern crate quote; extern crate quote;
#[cfg(feature = "pretty")]
extern crate syntex_syntax as syntax;
#[cfg(feature = "parsing")] #[cfg(feature = "parsing")]
extern crate unicode_xid; extern crate unicode_xid;
#[cfg(feature = "parsing")] #[cfg(feature = "parsing")]
#[macro_use] #[macro_use]
mod nom; extern crate synom;
#[cfg(feature = "parsing")]
#[macro_use]
mod helper;
#[cfg(feature = "aster")] #[cfg(feature = "aster")]
pub mod aster; pub mod aster;
@ -37,14 +28,15 @@ mod escape;
#[cfg(feature = "full")] #[cfg(feature = "full")]
mod expr; mod expr;
#[cfg(feature = "full")] #[cfg(feature = "full")]
pub use expr::{Arm, BindingMode, Block, BlockCheckMode, CaptureBy, Expr, ExprKind, FieldPat, pub use expr::{Arm, BindingMode, Block, CaptureBy, Expr, ExprKind, FieldPat, FieldValue,
FieldValue, Local, MacStmtStyle, Pat, RangeLimits, Stmt}; Local, MacStmtStyle, Pat, RangeLimits, Stmt};
mod generics; mod generics;
pub use generics::{Generics, Lifetime, LifetimeDef, TraitBoundModifier, TyParam, TyParamBound, pub use generics::{Generics, Lifetime, LifetimeDef, TraitBoundModifier, TyParam, TyParamBound,
WhereBoundPredicate, WhereClause, WherePredicate, WhereRegionPredicate}; WhereBoundPredicate, WhereClause, WhereEqPredicate, WherePredicate,
WhereRegionPredicate};
#[cfg(feature = "printing")] #[cfg(feature = "printing")]
pub use generics::{ImplGenerics, TyGenerics}; pub use generics::{ImplGenerics, Turbofish, TyGenerics};
mod ident; mod ident;
pub use ident::Ident; pub use ident::Ident;
@ -63,26 +55,21 @@ pub use krate::Crate;
mod lit; mod lit;
pub use lit::{FloatTy, IntTy, Lit, StrStyle}; pub use lit::{FloatTy, IntTy, Lit, StrStyle};
#[cfg(feature = "parsing")]
pub use lit::{ByteStrLit, FloatLit, IntLit, StrLit};
#[cfg(feature = "full")]
mod mac; mod mac;
#[cfg(feature = "full")]
pub use mac::{BinOpToken, DelimToken, Delimited, Mac, Token, TokenTree}; pub use mac::{BinOpToken, DelimToken, Delimited, Mac, Token, TokenTree};
mod macro_input; mod derive;
pub use macro_input::{Body, MacroInput}; pub use derive::{Body, DeriveInput};
// Deprecated. Use `DeriveInput` instead.
#[doc(hidden)]
pub type MacroInput = DeriveInput;
mod op; mod op;
pub use op::{BinOp, UnOp}; pub use op::{BinOp, UnOp};
#[cfg(feature = "expand")]
mod registry;
#[cfg(feature = "expand")]
pub use registry::{CustomDerive, Expanded, Registry};
#[cfg(feature = "parsing")]
mod space;
mod ty; mod ty;
pub use ty::{Abi, AngleBracketedParameterData, BareFnArg, BareFnTy, FunctionRetTy, MutTy, pub use ty::{Abi, AngleBracketedParameterData, BareFnArg, BareFnTy, FunctionRetTy, MutTy,
Mutability, ParenthesizedParameterData, Path, PathParameters, PathSegment, Mutability, ParenthesizedParameterData, Path, PathParameters, PathSegment,
@ -91,20 +78,23 @@ pub use ty::{Abi, AngleBracketedParameterData, BareFnArg, BareFnTy, FunctionRetT
#[cfg(feature = "visit")] #[cfg(feature = "visit")]
pub mod visit; pub mod visit;
#[cfg(feature = "fold")]
pub mod fold;
#[cfg(feature = "parsing")] #[cfg(feature = "parsing")]
pub use parsing::*; pub use parsing::*;
#[cfg(feature = "parsing")] #[cfg(feature = "parsing")]
mod parsing { mod parsing {
use super::*; use super::*;
use {generics, ident, macro_input, space, ty}; use {derive, generics, ident, mac, ty};
use nom::IResult; use synom::{space, IResult};
#[cfg(feature = "full")] #[cfg(feature = "full")]
use {expr, item, krate, mac}; use {expr, item, krate};
pub fn parse_macro_input(input: &str) -> Result<MacroInput, String> { pub fn parse_derive_input(input: &str) -> Result<DeriveInput, String> {
unwrap("macro input", macro_input::parsing::macro_input, input) unwrap("derive input", derive::parsing::derive_input, input)
} }
#[cfg(feature = "full")] #[cfg(feature = "full")]
@ -139,7 +129,6 @@ mod parsing {
unwrap("where clause", generics::parsing::where_clause, input) unwrap("where clause", generics::parsing::where_clause, input)
} }
#[cfg(feature = "full")]
pub fn parse_token_trees(input: &str) -> Result<Vec<TokenTree>, String> { pub fn parse_token_trees(input: &str) -> Result<Vec<TokenTree>, String> {
unwrap("token trees", mac::parsing::token_trees, input) unwrap("token trees", mac::parsing::token_trees, input)
} }
@ -148,6 +137,16 @@ mod parsing {
unwrap("identifier", ident::parsing::ident, input) unwrap("identifier", ident::parsing::ident, input)
} }
pub fn parse_ty_param_bound(input: &str) -> Result<TyParamBound, String> {
unwrap("type parameter bound", generics::parsing::ty_param_bound, input)
}
// Deprecated. Use `parse_derive_input` instead.
#[doc(hidden)]
pub fn parse_macro_input(input: &str) -> Result<MacroInput, String> {
parse_derive_input(input)
}
fn unwrap<T>(name: &'static str, fn unwrap<T>(name: &'static str,
f: fn(&str) -> IResult<&str, T>, f: fn(&str) -> IResult<&str, T>,
input: &str) input: &str)
@ -161,10 +160,53 @@ mod parsing {
// parsed nothing // parsed nothing
Err(format!("failed to parse {}: {:?}", name, rest)) Err(format!("failed to parse {}: {:?}", name, rest))
} else { } else {
Err(format!("failed to parse tokens after {}: {:?}", name, rest)) Err(format!("unparsed tokens after {}: {:?}", name, rest))
} }
} }
IResult::Error => Err(format!("failed to parse {}: {:?}", name, input)), IResult::Error => Err(format!("failed to parse {}: {:?}", name, input)),
} }
} }
} }
#[cfg(feature = "parsing")]
pub mod parse {
//! This module contains a set of exported nom parsers which can be used to
//! parse custom grammars when used alongside the `synom` crate.
//!
//! Internally, `syn` uses a fork of `nom` called `synom` which resolves a
//! persistent pitfall of using `nom` to parse Rust by eliminating the
//! `IResult::Incomplete` variant. The `synom` crate should be used instead
//! of `nom` when working with the parsers in this module.
#[cfg(feature = "full")]
pub use item::parsing::item;
#[cfg(feature = "full")]
pub use expr::parsing::expr;
pub use lit::parsing::lit;
pub use lit::parsing::string;
pub use lit::parsing::byte_string;
pub use lit::parsing::byte;
pub use lit::parsing::character;
pub use lit::parsing::float;
pub use lit::parsing::int;
pub use lit::parsing::boolean;
pub use ty::parsing::ty;
pub use ty::parsing::path;
pub use mac::parsing::token_tree as tt;
pub use ident::parsing::ident;
pub use generics::parsing::lifetime;
}

View File

@ -126,37 +126,65 @@ impl_from_for_lit! {Float, [
f64 => FloatTy::F64 f64 => FloatTy::F64
]} ]}
#[cfg(feature = "parsing")]
#[derive(Debug, Clone)]
pub struct StrLit {
pub value: String,
pub style: StrStyle,
}
#[cfg(feature = "parsing")]
#[derive(Debug, Clone)]
pub struct ByteStrLit {
pub value: Vec<u8>,
pub style: StrStyle,
}
#[cfg(feature = "parsing")]
#[derive(Debug, Clone)]
pub struct IntLit {
pub value: u64,
pub suffix: IntTy,
}
#[cfg(feature = "parsing")]
#[derive(Debug, Clone)]
pub struct FloatLit {
pub value: String,
pub suffix: FloatTy,
}
#[cfg(feature = "parsing")] #[cfg(feature = "parsing")]
pub mod parsing { pub mod parsing {
use super::*; use super::*;
use escape::{cooked_byte, cooked_byte_string, cooked_char, cooked_string, raw_string}; use escape::{cooked_byte, cooked_byte_string, cooked_char, cooked_string, raw_string};
use space::skip_whitespace; use synom::space::skip_whitespace;
use nom::IResult; use synom::IResult;
use unicode_xid::UnicodeXID; use unicode_xid::UnicodeXID;
named!(pub lit -> Lit, alt!( named!(pub lit -> Lit, alt!(
string string => { |StrLit { value, style }| Lit::Str(value, style) }
| |
byte_string byte_string => { |ByteStrLit { value, style }| Lit::ByteStr(value, style) }
| |
byte byte => { |b| Lit::Byte(b) }
| |
character character => { |ch| Lit::Char(ch) }
| |
float // must be before int float => { |FloatLit { value, suffix }| Lit::Float(value, suffix) } // must be before int
| |
int => { |(value, ty)| Lit::Int(value, ty) } int => { |IntLit { value, suffix }| Lit::Int(value, suffix) }
| |
boolean boolean => { |value| Lit::Bool(value) }
)); ));
named!(string -> Lit, alt!( named!(pub string -> StrLit, alt!(
quoted_string => { |s| Lit::Str(s, StrStyle::Cooked) } quoted_string => { |s| StrLit { value: s, style: StrStyle::Cooked } }
| |
preceded!( preceded!(
punct!("r"), punct!("r"),
raw_string raw_string
) => { |(s, n)| Lit::Str(s, StrStyle::Raw(n)) } ) => { |(s, n)| StrLit { value: s, style: StrStyle::Raw(n) }}
)); ));
named!(pub quoted_string -> String, delimited!( named!(pub quoted_string -> String, delimited!(
@ -165,35 +193,35 @@ pub mod parsing {
tag!("\"") tag!("\"")
)); ));
named!(byte_string -> Lit, alt!( named!(pub byte_string -> ByteStrLit, alt!(
delimited!( delimited!(
punct!("b\""), punct!("b\""),
cooked_byte_string, cooked_byte_string,
tag!("\"") tag!("\"")
) => { |vec| Lit::ByteStr(vec, StrStyle::Cooked) } ) => { |vec| ByteStrLit { value: vec, style: StrStyle::Cooked } }
| |
preceded!( preceded!(
punct!("br"), punct!("br"),
raw_string raw_string
) => { |(s, n): (String, _)| Lit::ByteStr(s.into_bytes(), StrStyle::Raw(n)) } ) => { |(s, n): (String, _)| ByteStrLit { value: s.into_bytes(), style: StrStyle::Raw(n) } }
)); ));
named!(byte -> Lit, do_parse!( named!(pub byte -> u8, do_parse!(
punct!("b") >> punct!("b") >>
tag!("'") >> tag!("'") >>
b: cooked_byte >> b: cooked_byte >>
tag!("'") >> tag!("'") >>
(Lit::Byte(b)) (b)
)); ));
named!(character -> Lit, do_parse!( named!(pub character -> char, do_parse!(
punct!("'") >> punct!("'") >>
ch: cooked_char >> ch: cooked_char >>
tag!("'") >> tag!("'") >>
(Lit::Char(ch)) (ch)
)); ));
named!(float -> Lit, do_parse!( named!(pub float -> FloatLit, do_parse!(
value: float_string >> value: float_string >>
suffix: alt!( suffix: alt!(
tag!("f32") => { |_| FloatTy::F32 } tag!("f32") => { |_| FloatTy::F32 }
@ -202,12 +230,12 @@ pub mod parsing {
| |
epsilon!() => { |_| FloatTy::Unsuffixed } epsilon!() => { |_| FloatTy::Unsuffixed }
) >> ) >>
(Lit::Float(value, suffix)) (FloatLit { value: value, suffix: suffix })
)); ));
named!(pub int -> (u64, IntTy), tuple!( named!(pub int -> IntLit, do_parse!(
digits, value: digits >>
alt!( suffix: alt!(
tag!("isize") => { |_| IntTy::Isize } tag!("isize") => { |_| IntTy::Isize }
| |
tag!("i8") => { |_| IntTy::I8 } tag!("i8") => { |_| IntTy::I8 }
@ -229,13 +257,14 @@ pub mod parsing {
tag!("u64") => { |_| IntTy::U64 } tag!("u64") => { |_| IntTy::U64 }
| |
epsilon!() => { |_| IntTy::Unsuffixed } epsilon!() => { |_| IntTy::Unsuffixed }
) ) >>
(IntLit { value: value, suffix: suffix })
)); ));
named!(boolean -> Lit, alt!( named!(pub boolean -> bool, alt!(
keyword!("true") => { |_| Lit::Bool(true) } keyword!("true") => { |_| true }
| |
keyword!("false") => { |_| Lit::Bool(false) } keyword!("false") => { |_| false }
)); ));
fn float_string(mut input: &str) -> IResult<&str, String> { fn float_string(mut input: &str) -> IResult<&str, String> {

View File

@ -114,16 +114,17 @@ pub mod parsing {
use super::*; use super::*;
use Lifetime; use Lifetime;
use generics::parsing::lifetime; use generics::parsing::lifetime;
use ident::parsing::{ident, word}; use ident::parsing::word;
use lit::parsing::lit; use lit::parsing::lit;
use space::{block_comment, whitespace}; use synom::space::{block_comment, whitespace};
use ty::parsing::path;
named!(pub mac -> Mac, do_parse!( named!(pub mac -> Mac, do_parse!(
name: ident >> what: path >>
punct!("!") >> punct!("!") >>
body: delimited >> body: delimited >>
(Mac { (Mac {
path: name.into(), path: what,
tts: vec![TokenTree::Delimited(body)], tts: vec![TokenTree::Delimited(body)],
}) })
)); ));
@ -150,7 +151,7 @@ pub mod parsing {
) => { |tts| Delimited { delim: DelimToken::Brace, tts: tts } } ) => { |tts| Delimited { delim: DelimToken::Brace, tts: tts } }
)); ));
named!(token_tree -> TokenTree, alt!( named!(pub token_tree -> TokenTree, alt!(
map!(token, TokenTree::Token) map!(token, TokenTree::Token)
| |
map!(delimited, TokenTree::Delimited) map!(delimited, TokenTree::Delimited)
@ -282,7 +283,7 @@ pub mod parsing {
| |
do_parse!( do_parse!(
punct!("///") >> punct!("///") >>
not!(peek!(tag!("/"))) >> not!(tag!("/")) >>
content: take_until!("\n") >> content: take_until!("\n") >>
(format!("///{}", content)) (format!("///{}", content))
) )

View File

@ -22,17 +22,19 @@ pub enum Ty {
/// ///
/// Type parameters are stored in the Path itself /// Type parameters are stored in the Path itself
Path(Option<QSelf>, Path), Path(Option<QSelf>, Path),
/// Something like `A+B`. Note that `B` must always be a path. /// A trait object type `Bound1 + Bound2 + Bound3`
ObjectSum(Box<Ty>, Vec<TyParamBound>), /// where `Bound` is a trait or a lifetime.
/// A type like `for<'a> Foo<&'a Bar>` TraitObject(Vec<TyParamBound>),
PolyTraitRef(Vec<TyParamBound>), /// An `impl Bound1 + Bound2 + Bound3` type
/// An `impl TraitA+TraitB` type. /// where `Bound` is a trait or a lifetime.
ImplTrait(Vec<TyParamBound>), ImplTrait(Vec<TyParamBound>),
/// No-op; kept solely so that we can pretty-print faithfully /// No-op; kept solely so that we can pretty-print faithfully
Paren(Box<Ty>), Paren(Box<Ty>),
/// TyKind::Infer means the type should be inferred instead of it having been /// TyKind::Infer means the type should be inferred instead of it having been
/// specified. This can appear anywhere in a type. /// specified. This can appear anywhere in a type.
Infer, Infer,
/// A macro in the type position.
Mac(Mac),
} }
#[derive(Debug, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Hash)]
@ -55,7 +57,10 @@ pub enum Mutability {
/// E.g. `std::cmp::PartialEq` /// E.g. `std::cmp::PartialEq`
#[derive(Debug, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct Path { pub struct Path {
/// A `::foo` path, is relative to the crate root rather than current
/// module (like paths in an import).
pub global: bool, pub global: bool,
/// The segments in the path: the things separated by `::`.
pub segments: Vec<PathSegment>, pub segments: Vec<PathSegment>,
} }
@ -75,7 +80,13 @@ impl<T> From<T> for Path
/// E.g. `std`, `String` or `Box<T>` /// E.g. `std`, `String` or `Box<T>`
#[derive(Debug, Clone, Eq, PartialEq, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PathSegment { pub struct PathSegment {
/// The identifier portion of this path segment.
pub ident: Ident, pub ident: Ident,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
/// this is more than just simple syntactic sugar; the use of
/// parens affects the region binding rules, so we preserve the
/// distinction.
pub parameters: PathParameters, pub parameters: PathParameters,
} }
@ -220,6 +231,7 @@ pub enum FunctionRetTy {
#[cfg(feature = "parsing")] #[cfg(feature = "parsing")]
pub mod parsing { pub mod parsing {
use super::*; use super::*;
use {TyParamBound, TraitBoundModifier};
#[cfg(feature = "full")] #[cfg(feature = "full")]
use ConstExpr; use ConstExpr;
use constant::parsing::const_expr; use constant::parsing::const_expr;
@ -228,11 +240,14 @@ pub mod parsing {
use generics::parsing::{lifetime, lifetime_def, ty_param_bound, bound_lifetimes}; use generics::parsing::{lifetime, lifetime_def, ty_param_bound, bound_lifetimes};
use ident::parsing::ident; use ident::parsing::ident;
use lit::parsing::quoted_string; use lit::parsing::quoted_string;
use mac::parsing::mac;
use std::str; use std::str;
named!(pub ty -> Ty, alt!( named!(pub ty -> Ty, alt!(
ty_paren // must be before ty_tup ty_paren // must be before ty_tup
| |
ty_mac // must be before ty_path
|
ty_path // must be before ty_poly_trait_ref ty_path // must be before ty_poly_trait_ref
| |
ty_vec ty_vec
@ -254,6 +269,8 @@ pub mod parsing {
ty_impl_trait ty_impl_trait
)); ));
named!(ty_mac -> Ty, map!(mac, Ty::Mac));
named!(ty_vec -> Ty, do_parse!( named!(ty_vec -> Ty, do_parse!(
punct!("[") >> punct!("[") >>
elem: ty >> elem: ty >>
@ -363,11 +380,18 @@ pub mod parsing {
if let Some(Some(parenthesized)) = parenthesized { if let Some(Some(parenthesized)) = parenthesized {
path.segments.last_mut().unwrap().parameters = parenthesized; path.segments.last_mut().unwrap().parameters = parenthesized;
} }
let path = Ty::Path(qself, path);
if bounds.is_empty() { if bounds.is_empty() {
path Ty::Path(qself, path)
} else { } else {
Ty::ObjectSum(Box::new(path), bounds) let path = TyParamBound::Trait(
PolyTraitRef {
bound_lifetimes: Vec::new(),
trait_ref: path,
},
TraitBoundModifier::None,
);
let bounds = Some(path).into_iter().chain(bounds).collect();
Ty::TraitObject(bounds)
} }
}) })
)); ));
@ -423,7 +447,7 @@ pub mod parsing {
named!(ty_poly_trait_ref -> Ty, map!( named!(ty_poly_trait_ref -> Ty, map!(
separated_nonempty_list!(punct!("+"), ty_param_bound), separated_nonempty_list!(punct!("+"), ty_param_bound),
Ty::PolyTraitRef Ty::TraitObject
)); ));
named!(ty_impl_trait -> Ty, do_parse!( named!(ty_impl_trait -> Ty, do_parse!(
@ -463,7 +487,7 @@ pub mod parsing {
cond!(!lifetimes.is_empty(), punct!(",")), cond!(!lifetimes.is_empty(), punct!(",")),
separated_nonempty_list!( separated_nonempty_list!(
punct!(","), punct!(","),
terminated!(ty, not!(peek!(punct!("=")))) terminated!(ty, not!(punct!("=")))
) )
)) >> )) >>
bindings: opt_vec!(preceded!( bindings: opt_vec!(preceded!(
@ -528,7 +552,7 @@ pub mod parsing {
name: option!(do_parse!( name: option!(do_parse!(
name: ident >> name: ident >>
punct!(":") >> punct!(":") >>
not!(peek!(tag!(":"))) >> // not :: not!(tag!(":")) >> // not ::
(name) (name)
)) >> )) >>
ty: ty >> ty: ty >>
@ -626,14 +650,7 @@ mod printing {
segment.to_tokens(tokens); segment.to_tokens(tokens);
} }
} }
Ty::ObjectSum(ref ty, ref bounds) => { Ty::TraitObject(ref bounds) => {
ty.to_tokens(tokens);
for bound in bounds {
tokens.append("+");
bound.to_tokens(tokens);
}
}
Ty::PolyTraitRef(ref bounds) => {
tokens.append_separated(bounds, "+"); tokens.append_separated(bounds, "+");
} }
Ty::ImplTrait(ref bounds) => { Ty::ImplTrait(ref bounds) => {
@ -648,6 +665,7 @@ mod printing {
Ty::Infer => { Ty::Infer => {
tokens.append("_"); tokens.append("_");
} }
Ty::Mac(ref mac) => mac.to_tokens(tokens),
} }
} }
} }

View File

@ -28,8 +28,8 @@ use super::*;
/// new default implementation gets introduced.) /// new default implementation gets introduced.)
pub trait Visitor: Sized { pub trait Visitor: Sized {
fn visit_ident(&mut self, _ident: &Ident) {} fn visit_ident(&mut self, _ident: &Ident) {}
fn visit_macro_input(&mut self, macro_input: &MacroInput) { fn visit_derive_input(&mut self, derive_input: &DeriveInput) {
walk_macro_input(self, macro_input) walk_derive_input(self, derive_input)
} }
fn visit_ty(&mut self, ty: &Ty) { fn visit_ty(&mut self, ty: &Ty) {
walk_ty(self, ty) walk_ty(self, ty)
@ -76,20 +76,67 @@ pub trait Visitor: Sized {
walk_const_expr(self, expr) walk_const_expr(self, expr)
} }
fn visit_lit(&mut self, _lit: &Lit) {} fn visit_lit(&mut self, _lit: &Lit) {}
fn visit_mac(&mut self, mac: &Mac) {
walk_mac(self, mac);
}
#[cfg(feature = "full")]
fn visit_crate(&mut self, _crate: &Crate) {
walk_crate(self, _crate);
}
#[cfg(feature = "full")]
fn visit_item(&mut self, item: &Item) {
walk_item(self, item);
}
#[cfg(feature = "full")]
fn visit_expr(&mut self, expr: &Expr) {
walk_expr(self, expr);
}
#[cfg(feature = "full")]
fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) {
walk_foreign_item(self, foreign_item);
}
#[cfg(feature = "full")]
fn visit_pat(&mut self, pat: &Pat) {
walk_pat(self, pat);
}
#[cfg(feature = "full")]
fn visit_fn_decl(&mut self, fn_decl: &FnDecl) {
walk_fn_decl(self, fn_decl);
}
#[cfg(feature = "full")]
fn visit_trait_item(&mut self, trait_item: &TraitItem) {
walk_trait_item(self, trait_item);
}
#[cfg(feature = "full")]
fn visit_impl_item(&mut self, impl_item: &ImplItem) {
walk_impl_item(self, impl_item);
}
#[cfg(feature = "full")]
fn visit_method_sig(&mut self, method_sig: &MethodSig) {
walk_method_sig(self, method_sig);
}
#[cfg(feature = "full")]
fn visit_stmt(&mut self, stmt: &Stmt) {
walk_stmt(self, stmt);
}
#[cfg(feature = "full")]
fn visit_local(&mut self, local: &Local) {
walk_local(self, local);
}
#[cfg(feature = "full")]
fn visit_view_path(&mut self, view_path: &ViewPath) {
walk_view_path(self, view_path);
}
} }
#[macro_export]
macro_rules! walk_list { macro_rules! walk_list {
($visitor: expr, $method: ident, $list: expr) => { ($visitor:expr, $method:ident, $list:expr $(, $extra_args:expr)*) => {
for elem in $list { for elem in $list {
$visitor.$method(elem) $visitor.$method(elem $(, $extra_args)*)
} }
}; };
($visitor: expr, $method: ident, $list: expr, $($extra_args: expr),*) => {
for elem in $list {
$visitor.$method(elem, $($extra_args,)*)
}
}
} }
pub fn walk_opt_ident<V: Visitor>(visitor: &mut V, opt_ident: &Option<Ident>) { pub fn walk_opt_ident<V: Visitor>(visitor: &mut V, opt_ident: &Option<Ident>) {
@ -110,18 +157,18 @@ pub fn walk_poly_trait_ref<V>(visitor: &mut V, trait_ref: &PolyTraitRef, _: &Tra
visitor.visit_path(&trait_ref.trait_ref); visitor.visit_path(&trait_ref.trait_ref);
} }
pub fn walk_macro_input<V: Visitor>(visitor: &mut V, macro_input: &MacroInput) { pub fn walk_derive_input<V: Visitor>(visitor: &mut V, derive_input: &DeriveInput) {
visitor.visit_ident(&macro_input.ident); visitor.visit_ident(&derive_input.ident);
visitor.visit_generics(&macro_input.generics); visitor.visit_generics(&derive_input.generics);
match macro_input.body { match derive_input.body {
Body::Enum(ref variants) => { Body::Enum(ref variants) => {
walk_list!(visitor, visit_variant, variants, &macro_input.generics); walk_list!(visitor, visit_variant, variants, &derive_input.generics);
} }
Body::Struct(ref variant_data) => { Body::Struct(ref variant_data) => {
visitor.visit_variant_data(variant_data, &macro_input.ident, &macro_input.generics); visitor.visit_variant_data(variant_data, &derive_input.ident, &derive_input.generics);
} }
} }
walk_list!(visitor, visit_attribute, &macro_input.attrs); walk_list!(visitor, visit_attribute, &derive_input.attrs);
} }
pub fn walk_variant<V>(visitor: &mut V, variant: &Variant, generics: &Generics) pub fn walk_variant<V>(visitor: &mut V, variant: &Variant, generics: &Generics)
@ -159,18 +206,17 @@ pub fn walk_ty<V: Visitor>(visitor: &mut V, ty: &Ty) {
} }
visitor.visit_path(path); visitor.visit_path(path);
} }
Ty::ObjectSum(ref inner, ref bounds) => {
visitor.visit_ty(inner);
walk_list!(visitor, visit_ty_param_bound, bounds);
}
Ty::Array(ref inner, ref len) => { Ty::Array(ref inner, ref len) => {
visitor.visit_ty(inner); visitor.visit_ty(inner);
visitor.visit_const_expr(len); visitor.visit_const_expr(len);
} }
Ty::PolyTraitRef(ref bounds) | Ty::TraitObject(ref bounds) |
Ty::ImplTrait(ref bounds) => { Ty::ImplTrait(ref bounds) => {
walk_list!(visitor, visit_ty_param_bound, bounds); walk_list!(visitor, visit_ty_param_bound, bounds);
} }
Ty::Mac(ref mac) => {
visitor.visit_mac(mac);
}
} }
} }
@ -240,6 +286,12 @@ pub fn walk_generics<V: Visitor>(visitor: &mut V, generics: &Generics) {
visitor.visit_lifetime(lifetime); visitor.visit_lifetime(lifetime);
walk_list!(visitor, visit_lifetime, bounds); walk_list!(visitor, visit_lifetime, bounds);
} }
WherePredicate::EqPredicate(WhereEqPredicate { ref lhs_ty,
ref rhs_ty,
.. }) => {
visitor.visit_ty(lhs_ty);
visitor.visit_ty(rhs_ty);
}
} }
} }
} }
@ -290,6 +342,440 @@ pub fn walk_const_expr<V: Visitor>(visitor: &mut V, len: &ConstExpr) {
ConstExpr::Paren(ref expr) => { ConstExpr::Paren(ref expr) => {
visitor.visit_const_expr(expr); visitor.visit_const_expr(expr);
} }
ConstExpr::Other(_) => {} ConstExpr::Other(ref other) => {
#[cfg(feature = "full")]
fn walk_other<V: Visitor>(visitor: &mut V, other: &Expr) {
visitor.visit_expr(other);
}
#[cfg(not(feature = "full"))]
fn walk_other<V: Visitor>(_: &mut V, _: &super::constant::Other) {}
walk_other(visitor, other);
}
}
}
pub fn walk_mac<V: Visitor>(visitor: &mut V, mac: &Mac) {
visitor.visit_path(&mac.path);
}
#[cfg(feature = "full")]
pub fn walk_crate<V: Visitor>(visitor: &mut V, _crate: &Crate) {
walk_list!(visitor, visit_attribute, &_crate.attrs);
walk_list!(visitor, visit_item, &_crate.items);
}
#[cfg(feature = "full")]
pub fn walk_item<V: Visitor>(visitor: &mut V, item: &Item) {
visitor.visit_ident(&item.ident);
walk_list!(visitor, visit_attribute, &item.attrs);
match item.node {
ItemKind::ExternCrate(ref ident) => {
walk_opt_ident(visitor, ident);
}
ItemKind::Use(ref view_path) => {
visitor.visit_view_path(view_path);
}
ItemKind::Static(ref ty, _, ref expr) |
ItemKind::Const(ref ty, ref expr) => {
visitor.visit_ty(ty);
visitor.visit_expr(expr);
}
ItemKind::Fn(ref decl, _, _, _, ref generics, ref body) => {
visitor.visit_fn_decl(decl);
visitor.visit_generics(generics);
walk_list!(visitor, visit_stmt, &body.stmts);
}
ItemKind::Mod(ref maybe_items) => {
if let Some(ref items) = *maybe_items {
walk_list!(visitor, visit_item, items);
}
}
ItemKind::ForeignMod(ref foreign_mod) => {
walk_list!(visitor, visit_foreign_item, &foreign_mod.items);
}
ItemKind::Ty(ref ty, ref generics) => {
visitor.visit_ty(ty);
visitor.visit_generics(generics);
}
ItemKind::Enum(ref variant, ref generics) => {
walk_list!(visitor, visit_variant, variant, generics);
}
ItemKind::Struct(ref variant_data, ref generics) |
ItemKind::Union(ref variant_data, ref generics) => {
visitor.visit_variant_data(variant_data, &item.ident, generics);
}
ItemKind::Trait(_, ref generics, ref bounds, ref trait_items) => {
visitor.visit_generics(generics);
walk_list!(visitor, visit_ty_param_bound, bounds);
walk_list!(visitor, visit_trait_item, trait_items);
}
ItemKind::DefaultImpl(_, ref path) => {
visitor.visit_path(path);
}
ItemKind::Impl(_, _, ref generics, ref maybe_path, ref ty, ref impl_items) => {
visitor.visit_generics(generics);
if let Some(ref path) = *maybe_path {
visitor.visit_path(path);
}
visitor.visit_ty(ty);
walk_list!(visitor, visit_impl_item, impl_items);
}
ItemKind::Mac(ref mac) => {
visitor.visit_mac(mac)
}
}
}
#[cfg(feature = "full")]
#[cfg_attr(feature = "cargo-clippy", allow(cyclomatic_complexity))]
pub fn walk_expr<V: Visitor>(visitor: &mut V, expr: &Expr) {
walk_list!(visitor, visit_attribute, &expr.attrs);
match expr.node {
ExprKind::InPlace(ref place, ref value) => {
visitor.visit_expr(place);
visitor.visit_expr(value);
}
ExprKind::Call(ref callee, ref args) => {
visitor.visit_expr(callee);
walk_list!(visitor, visit_expr, args);
}
ExprKind::MethodCall(ref name, ref ty_args, ref args) => {
visitor.visit_ident(name);
walk_list!(visitor, visit_ty, ty_args);
walk_list!(visitor, visit_expr, args);
}
ExprKind::Array(ref exprs) |
ExprKind::Tup(ref exprs) => {
walk_list!(visitor, visit_expr, exprs);
}
ExprKind::Unary(_, ref operand) => {
visitor.visit_expr(operand);
}
ExprKind::Lit(ref lit) => {
visitor.visit_lit(lit);
}
ExprKind::Cast(ref expr, ref ty) |
ExprKind::Type(ref expr, ref ty) => {
visitor.visit_expr(expr);
visitor.visit_ty(ty);
}
ExprKind::If(ref cond, ref cons, ref maybe_alt) => {
visitor.visit_expr(cond);
walk_list!(visitor, visit_stmt, &cons.stmts);
if let Some(ref alt) = *maybe_alt {
visitor.visit_expr(alt);
}
}
ExprKind::IfLet(ref pat, ref cond, ref cons, ref maybe_alt) => {
visitor.visit_pat(pat);
visitor.visit_expr(cond);
walk_list!(visitor, visit_stmt, &cons.stmts);
if let Some(ref alt) = *maybe_alt {
visitor.visit_expr(alt);
}
}
ExprKind::While(ref cond, ref body, ref label) => {
visitor.visit_expr(cond);
walk_list!(visitor, visit_stmt, &body.stmts);
walk_opt_ident(visitor, label);
}
ExprKind::WhileLet(ref pat, ref cond, ref body, ref label) => {
visitor.visit_pat(pat);
visitor.visit_expr(cond);
walk_list!(visitor, visit_stmt, &body.stmts);
walk_opt_ident(visitor, label);
}
ExprKind::ForLoop(ref pat, ref expr, ref body, ref label) => {
visitor.visit_pat(pat);
visitor.visit_expr(expr);
walk_list!(visitor, visit_stmt, &body.stmts);
walk_opt_ident(visitor, label);
}
ExprKind::Loop(ref body, ref label) => {
walk_list!(visitor, visit_stmt, &body.stmts);
walk_opt_ident(visitor, label);
}
ExprKind::Match(ref expr, ref arms) => {
visitor.visit_expr(expr);
for &Arm{ref attrs, ref pats, ref guard, ref body} in arms {
walk_list!(visitor, visit_attribute, attrs);
walk_list!(visitor, visit_pat, pats);
if let Some(ref guard) = *guard {
visitor.visit_expr(guard);
}
visitor.visit_expr(body);
}
}
ExprKind::Closure(_, ref decl, ref expr) => {
visitor.visit_fn_decl(decl);
visitor.visit_expr(expr);
}
ExprKind::Block(_, ref block) => {
walk_list!(visitor, visit_stmt, &block.stmts);
}
ExprKind::Binary(_, ref lhs, ref rhs) |
ExprKind::Assign(ref lhs, ref rhs) |
ExprKind::AssignOp(_, ref lhs, ref rhs) => {
visitor.visit_expr(lhs);
visitor.visit_expr(rhs);
}
ExprKind::Field(ref obj, ref field) => {
visitor.visit_expr(obj);
visitor.visit_ident(field);
}
ExprKind::TupField(ref obj, _) => {
visitor.visit_expr(obj);
}
ExprKind::Index(ref obj, ref idx) => {
visitor.visit_expr(obj);
visitor.visit_expr(idx);
}
ExprKind::Range(ref maybe_start, ref maybe_end, _) => {
if let Some(ref start) = *maybe_start {
visitor.visit_expr(start);
}
if let Some(ref end) = *maybe_end {
visitor.visit_expr(end);
}
}
ExprKind::Path(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path);
}
ExprKind::Break(ref maybe_label, ref maybe_expr) => {
walk_opt_ident(visitor, maybe_label);
if let Some(ref expr) = *maybe_expr {
visitor.visit_expr(expr);
}
}
ExprKind::Continue(ref maybe_label) => {
walk_opt_ident(visitor, maybe_label);
}
ExprKind::Ret(ref maybe_expr) => {
if let Some(ref expr) = *maybe_expr {
visitor.visit_expr(expr);
}
}
ExprKind::Mac(ref mac) => {
visitor.visit_mac(mac);
}
ExprKind::Struct(ref path, ref fields, ref maybe_base) => {
visitor.visit_path(path);
for &FieldValue{ref ident, ref expr, ..} in fields {
visitor.visit_ident(ident);
visitor.visit_expr(expr);
}
if let Some(ref base) = *maybe_base {
visitor.visit_expr(base);
}
}
ExprKind::Repeat(ref value, ref times) => {
visitor.visit_expr(value);
visitor.visit_expr(times);
}
ExprKind::Box(ref expr) |
ExprKind::AddrOf(_, ref expr) |
ExprKind::Paren(ref expr) |
ExprKind::Try(ref expr) => {
visitor.visit_expr(expr);
}
}
}
#[cfg(feature = "full")]
pub fn walk_foreign_item<V: Visitor>(visitor: &mut V, foreign_item: &ForeignItem) {
visitor.visit_ident(&foreign_item.ident);
walk_list!(visitor, visit_attribute, &foreign_item.attrs);
match foreign_item.node {
ForeignItemKind::Fn(ref decl, ref generics) => {
visitor.visit_fn_decl(decl);
visitor.visit_generics(generics);
}
ForeignItemKind::Static(ref ty, _) => {
visitor.visit_ty(ty);
}
}
}
#[cfg(feature = "full")]
pub fn walk_pat<V: Visitor>(visitor: &mut V, pat: &Pat) {
match *pat {
Pat::Wild => {}
Pat::Ident(_, ref ident, ref maybe_pat) => {
visitor.visit_ident(ident);
if let Some(ref pat) = *maybe_pat {
visitor.visit_pat(pat);
}
}
Pat::Struct(ref path, ref field_pats, _) => {
visitor.visit_path(path);
for &FieldPat{ref ident, ref pat, ..} in field_pats {
visitor.visit_ident(ident);
visitor.visit_pat(pat);
}
}
Pat::TupleStruct(ref path, ref pats, _) => {
visitor.visit_path(path);
walk_list!(visitor, visit_pat, pats);
}
Pat::Path(ref maybe_qself, ref path) => {
if let Some(ref qself) = *maybe_qself {
visitor.visit_ty(&qself.ty);
}
visitor.visit_path(path);
}
Pat::Tuple(ref pats, _) => {
walk_list!(visitor, visit_pat, pats);
}
Pat::Box(ref pat) |
Pat::Ref(ref pat, _) => {
visitor.visit_pat(pat);
}
Pat::Lit(ref expr) => {
visitor.visit_expr(expr);
}
Pat::Range(ref start, ref end) => {
visitor.visit_expr(start);
visitor.visit_expr(end);
}
Pat::Slice(ref start, ref maybe_mid, ref end) => {
walk_list!(visitor, visit_pat, start);
if let Some(ref mid) = *maybe_mid {
visitor.visit_pat(mid);
}
walk_list!(visitor, visit_pat, end);
}
Pat::Mac(ref mac) => {
visitor.visit_mac(mac);
}
}
}
#[cfg(feature = "full")]
pub fn walk_fn_decl<V: Visitor>(visitor: &mut V, fn_decl: &FnDecl) {
for input in &fn_decl.inputs {
match *input {
FnArg::SelfRef(_, _) | FnArg::SelfValue(_) => {}
FnArg::Captured(ref pat, ref ty) => {
visitor.visit_pat(pat);
visitor.visit_ty(ty);
}
FnArg::Ignored(ref ty) => {
visitor.visit_ty(ty);
}
}
}
visitor.visit_fn_ret_ty(&fn_decl.output);
}
#[cfg(feature = "full")]
pub fn walk_trait_item<V: Visitor>(visitor: &mut V, trait_item: &TraitItem) {
visitor.visit_ident(&trait_item.ident);
walk_list!(visitor, visit_attribute, &trait_item.attrs);
match trait_item.node {
TraitItemKind::Const(ref ty, ref maybe_expr) => {
visitor.visit_ty(ty);
if let Some(ref expr) = *maybe_expr {
visitor.visit_expr(expr);
}
}
TraitItemKind::Method(ref method_sig, ref maybe_block) => {
visitor.visit_method_sig(method_sig);
if let Some(ref block) = *maybe_block {
walk_list!(visitor, visit_stmt, &block.stmts);
}
}
TraitItemKind::Type(ref bounds, ref maybe_ty) => {
walk_list!(visitor, visit_ty_param_bound, bounds);
if let Some(ref ty) = *maybe_ty {
visitor.visit_ty(ty);
}
}
TraitItemKind::Macro(ref mac) => {
visitor.visit_mac(mac);
}
}
}
#[cfg(feature = "full")]
pub fn walk_impl_item<V: Visitor>(visitor: &mut V, impl_item: &ImplItem) {
visitor.visit_ident(&impl_item.ident);
walk_list!(visitor, visit_attribute, &impl_item.attrs);
match impl_item.node {
ImplItemKind::Const(ref ty, ref expr) => {
visitor.visit_ty(ty);
visitor.visit_expr(expr);
}
ImplItemKind::Method(ref method_sig, ref block) => {
visitor.visit_method_sig(method_sig);
walk_list!(visitor, visit_stmt, &block.stmts);
}
ImplItemKind::Type(ref ty) => {
visitor.visit_ty(ty);
}
ImplItemKind::Macro(ref mac) => {
visitor.visit_mac(mac);
}
}
}
#[cfg(feature = "full")]
pub fn walk_method_sig<V: Visitor>(visitor: &mut V, method_sig: &MethodSig) {
visitor.visit_fn_decl(&method_sig.decl);
visitor.visit_generics(&method_sig.generics);
}
#[cfg(feature = "full")]
pub fn walk_stmt<V: Visitor>(visitor: &mut V, stmt: &Stmt) {
match *stmt {
Stmt::Local(ref local) => {
visitor.visit_local(local);
}
Stmt::Item(ref item) => {
visitor.visit_item(item);
}
Stmt::Expr(ref expr) |
Stmt::Semi(ref expr) => {
visitor.visit_expr(expr);
}
Stmt::Mac(ref details) => {
let (ref mac, _, ref attrs) = **details;
visitor.visit_mac(mac);
walk_list!(visitor, visit_attribute, attrs);
}
}
}
#[cfg(feature = "full")]
pub fn walk_local<V: Visitor>(visitor: &mut V, local: &Local) {
visitor.visit_pat(&local.pat);
if let Some(ref ty) = local.ty {
visitor.visit_ty(ty);
}
if let Some(ref init) = local.init {
visitor.visit_expr(init);
}
walk_list!(visitor, visit_attribute, &local.attrs);
}
#[cfg(feature = "full")]
pub fn walk_view_path<V: Visitor>(visitor: &mut V, view_path: &ViewPath) {
match *view_path {
ViewPath::Simple(ref path, ref maybe_ident) => {
visitor.visit_path(path);
walk_opt_ident(visitor, maybe_ident);
}
ViewPath::Glob(ref path) => {
visitor.visit_path(path);
}
ViewPath::List(ref path, ref items) => {
visitor.visit_path(path);
for &PathListItem{ref name, ref rename} in items {
visitor.visit_ident(name);
walk_opt_ident(visitor, rename);
}
}
} }
} }

View File

@ -0,0 +1 @@
{"files":{".cargo-ok":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","Cargo.toml":"ae60b51c5de386213c3f5c431763c10be0c35a94bdd75f3cca4e883376005205","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"e1e9c460f7c3ba2a28af2c6f140a2a9fbefdb252a8908986ed9e62b5dcf668bb","src/helper.rs":"073cba74f2667cd800bc4c3f3448814c005ab7922c671d26f3abd7f23231e045","src/lib.rs":"93c66afd80f9806ddac92b9d3a6382fad34c6b52cb3a8d37ae1067ae1ed5bec9","src/space.rs":"6fe05780dd47ed9d264bb436cbccf385f86bafc4debdd39bf250099b8210f660"},"package":"8fece1853fb872b0acdc3ff88f37c474018e125ef81cd4cb8c0ca515746b62ed"}

0
third_party/rust/synom/.cargo-ok vendored Normal file
View File

20
third_party/rust/synom/Cargo.toml vendored Normal file
View File

@ -0,0 +1,20 @@
[package]
name = "synom"
version = "0.11.0"
authors = ["David Tolnay <dtolnay@gmail.com>"]
license = "MIT/Apache-2.0"
description = "Stripped-down Nom parser used by Syn"
repository = "https://github.com/dtolnay/syn"
documentation = "https://docs.rs/synom/"
categories = ["development-tools::procedural-macro-helpers"]
readme = "../README.md"
include = ["Cargo.toml", "src/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
[dependencies]
unicode-xid = "0.0.4"
[dev-dependencies.syn]
version = "0.11"
path = ".."
features = ["parsing", "full"]
default-features = false

201
third_party/rust/synom/LICENSE-APACHE vendored Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
third_party/rust/synom/LICENSE-MIT vendored Normal file
View File

@ -0,0 +1,25 @@
Copyright (c) 2016 The Rust Project Developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

199
third_party/rust/synom/README.md vendored Normal file
View File

@ -0,0 +1,199 @@
Nom parser for Rust source code
===============================
[![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
[![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/)
Parse Rust source code without a Syntex dependency, intended for use with
[Macros 1.1](https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md).
Designed for fast compile time.
- Compile time for `syn` (from scratch including all dependencies): **6 seconds**
- Compile time for the `syntex`/`quasi`/`aster` stack: **60+ seconds**
If you get stuck with Macros 1.1 I am happy to provide help even if the issue is
not related to syn. Please file a ticket in this repo.
## Usage with Macros 1.1
```toml
[dependencies]
syn = "0.11"
quote = "0.3"
[lib]
proc-macro = true
```
```rust
extern crate proc_macro;
use proc_macro::TokenStream;
extern crate syn;
#[macro_use]
extern crate quote;
#[proc_macro_derive(MyMacro)]
pub fn my_macro(input: TokenStream) -> TokenStream {
let source = input.to_string();
// Parse the string representation into a syntax tree
let ast = syn::parse_derive_input(&source).unwrap();
// Build the output, possibly using quasi-quotation
let expanded = quote! {
// ...
};
// Parse back to a token stream and return it
expanded.parse().unwrap()
}
```
## Complete example
Suppose we have the following simple trait which returns the number of fields in
a struct:
```rust
trait NumFields {
fn num_fields() -> usize;
}
```
A complete Macros 1.1 implementation of `#[derive(NumFields)]` based on `syn`
and [`quote`](https://github.com/dtolnay/quote) looks like this:
```rust
extern crate proc_macro;
use proc_macro::TokenStream;
extern crate syn;
#[macro_use]
extern crate quote;
#[proc_macro_derive(NumFields)]
pub fn num_fields(input: TokenStream) -> TokenStream {
let source = input.to_string();
// Parse the string representation into a syntax tree
let ast = syn::parse_derive_input(&source).unwrap();
// Build the output
let expanded = expand_num_fields(&ast);
// Return the generated impl as a TokenStream
expanded.parse().unwrap()
}
fn expand_num_fields(ast: &syn::DeriveInput) -> quote::Tokens {
let n = match ast.body {
syn::Body::Struct(ref data) => data.fields().len(),
syn::Body::Enum(_) => panic!("#[derive(NumFields)] can only be used with structs"),
};
// Used in the quasi-quotation below as `#name`
let name = &ast.ident;
// Helper is provided for handling complex generic types correctly and effortlessly
let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();
quote! {
// The generated impl
impl #impl_generics ::mycrate::NumFields for #name #ty_generics #where_clause {
fn num_fields() -> usize {
#n
}
}
}
}
```
## Testing
Macros 1.1 has a restriction that your proc-macro crate must export nothing but
`proc_macro_derive` functions, and also `proc_macro_derive` procedural macros
cannot be used from the same crate in which they are defined. These restrictions
may be lifted in the future but for now they make writing tests a bit trickier
than for other types of code.
In particular, you will not be able to write test functions like `#[test] fn
it_works() { ... }` in line with your code. Instead, either put tests in a
[`tests` directory](https://doc.rust-lang.org/book/testing.html#the-tests-directory)
or in a separate crate entirely.
Additionally, if your procedural macro implements a particular trait, that trait
must be defined in a separate crate from the procedural macro.
As a concrete example, suppose your procedural macro crate is called `my_derive`
and it implements a trait called `my_crate::MyTrait`. Your unit tests for the
procedural macro can go in `my_derive/tests/test.rs` or into a separate crate
`my_tests/tests/test.rs`. Either way the test would look something like this:
```rust
#[macro_use]
extern crate my_derive;
extern crate my_crate;
use my_crate::MyTrait;
#[test]
fn it_works() {
#[derive(MyTrait)]
struct S { /* ... */ }
/* test the thing */
}
```
## Debugging
When developing a procedural macro it can be helpful to look at what the
generated code looks like. Use `cargo rustc -- -Zunstable-options
--pretty=expanded` or the
[`cargo expand`](https://github.com/dtolnay/cargo-expand) subcommand.
To show the expanded code for some crate that uses your procedural macro, run
`cargo expand` from that crate. To show the expanded code for one of your own
test cases, run `cargo expand --test the_test_case` where the last argument is
the name of the test file without the `.rs` extension.
This write-up by Brandon W Maister discusses debugging in more detail:
[Debugging Rust's new Custom Derive
system](https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/).
## Optional features
Syn puts a lot of functionality behind optional features in order to optimize
compile time for the most common use cases. These are the available features and
their effect on compile time. Dependencies are included in the compile times.
Features | Compile time | Functionality
--- | --- | ---
*(none)* | 3 sec | The data structures representing the AST of Rust structs, enums, and types.
parsing | 6 sec | Parsing Rust source code containing structs and enums into an AST.
printing | 4 sec | Printing an AST of structs and enums as Rust source code.
**parsing, printing** | **6 sec** | **This is the default.** Parsing and printing of Rust structs and enums. This is typically what you want for implementing Macros 1.1 custom derives.
full | 4 sec | The data structures representing the full AST of all possible Rust code.
full, parsing | 9 sec | Parsing any valid Rust source code to an AST.
full, printing | 6 sec | Turning an AST into Rust source code.
full, parsing, printing | 11 sec | Parsing and printing any Rust syntax.
## License
Licensed under either of
* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
be dual licensed as above, without any additional terms or conditions.

396
third_party/rust/synom/src/helper.rs vendored Normal file
View File

@ -0,0 +1,396 @@
use IResult;
use space::{skip_whitespace, word_break};
/// Parse a piece of punctuation like "+" or "+=".
///
/// See also `keyword!` for parsing keywords, which are subtly different from
/// punctuation.
///
/// - **Syntax:** `punct!("...")`
/// - **Output:** `&str`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// // Parse zero or more bangs.
/// named!(many_bangs -> Vec<&str>,
/// many0!(punct!("!"))
/// );
///
/// fn main() {
/// let input = "!! !";
/// let parsed = many_bangs(input).expect("bangs");
/// assert_eq!(parsed, ["!", "!", "!"]);
/// }
/// ```
#[macro_export]
macro_rules! punct {
($i:expr, $punct:expr) => {
$crate::helper::punct($i, $punct)
};
}
// Not public API.
#[doc(hidden)]
pub fn punct<'a>(input: &'a str, token: &'static str) -> IResult<&'a str, &'a str> {
let input = skip_whitespace(input);
if input.starts_with(token) {
IResult::Done(&input[token.len()..], token)
} else {
IResult::Error
}
}
/// Parse a keyword like "fn" or "struct".
///
/// See also `punct!` for parsing punctuation, which are subtly different from
/// keywords.
///
/// - **Syntax:** `keyword!("...")`
/// - **Output:** `&str`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use synom::IResult;
///
/// // Parse zero or more "bang" keywords.
/// named!(many_bangs -> Vec<&str>,
/// terminated!(
/// many0!(keyword!("bang")),
/// punct!(";")
/// )
/// );
///
/// fn main() {
/// let input = "bang bang bang;";
/// let parsed = many_bangs(input).expect("bangs");
/// assert_eq!(parsed, ["bang", "bang", "bang"]);
///
/// let input = "bangbang;";
/// let err = many_bangs(input);
/// assert_eq!(err, IResult::Error);
/// }
/// ```
#[macro_export]
macro_rules! keyword {
($i:expr, $keyword:expr) => {
$crate::helper::keyword($i, $keyword)
};
}
// Not public API.
#[doc(hidden)]
pub fn keyword<'a>(input: &'a str, token: &'static str) -> IResult<&'a str, &'a str> {
match punct(input, token) {
IResult::Done(rest, _) => {
match word_break(rest) {
IResult::Done(_, _) => IResult::Done(rest, token),
IResult::Error => IResult::Error,
}
}
IResult::Error => IResult::Error,
}
}
/// Turn a failed parse into `None` and a successful parse into `Some`.
///
/// - **Syntax:** `option!(THING)`
/// - **Output:** `Option<THING>`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// named!(maybe_bang -> Option<&str>, option!(punct!("!")));
///
/// fn main() {
/// let input = "!";
/// let parsed = maybe_bang(input).expect("maybe bang");
/// assert_eq!(parsed, Some("!"));
///
/// let input = "";
/// let parsed = maybe_bang(input).expect("maybe bang");
/// assert_eq!(parsed, None);
/// }
/// ```
#[macro_export]
macro_rules! option {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
$crate::IResult::Done(i, o) => $crate::IResult::Done(i, Some(o)),
$crate::IResult::Error => $crate::IResult::Done($i, None),
}
};
($i:expr, $f:expr) => {
option!($i, call!($f));
};
}
/// Turn a failed parse into an empty vector. The argument parser must itself
/// return a vector.
///
/// This is often more convenient than `option!(...)` when the argument produces
/// a vector.
///
/// - **Syntax:** `opt_vec!(THING)`
/// - **Output:** `THING`, which must be `Vec<T>`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::{Lifetime, Ty};
/// use syn::parse::{lifetime, ty};
///
/// named!(bound_lifetimes -> (Vec<Lifetime>, Ty), tuple!(
/// opt_vec!(do_parse!(
/// keyword!("for") >>
/// punct!("<") >>
/// lifetimes: terminated_list!(punct!(","), lifetime) >>
/// punct!(">") >>
/// (lifetimes)
/// )),
/// ty
/// ));
///
/// fn main() {
/// let input = "for<'a, 'b> fn(&'a A) -> &'b B";
/// let parsed = bound_lifetimes(input).expect("bound lifetimes");
/// assert_eq!(parsed.0, [Lifetime::new("'a"), Lifetime::new("'b")]);
/// println!("{:?}", parsed);
///
/// let input = "From<String>";
/// let parsed = bound_lifetimes(input).expect("bound lifetimes");
/// assert!(parsed.0.is_empty());
/// println!("{:?}", parsed);
/// }
/// ```
#[macro_export]
macro_rules! opt_vec {
($i:expr, $submac:ident!( $($args:tt)* )) => {
match $submac!($i, $($args)*) {
$crate::IResult::Done(i, o) => $crate::IResult::Done(i, o),
$crate::IResult::Error => $crate::IResult::Done($i, Vec::new()),
}
};
}
/// Parses nothing and always succeeds.
///
/// This can be useful as a fallthrough case in `alt!`.
///
/// - **Syntax:** `epsilon!()`
/// - **Output:** `()`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::Mutability;
///
/// named!(mutability -> Mutability, alt!(
/// keyword!("mut") => { |_| Mutability::Mutable }
/// |
/// epsilon!() => { |_| Mutability::Immutable }
/// ));
///
/// fn main() {
/// let input = "mut";
/// let parsed = mutability(input).expect("mutability");
/// assert_eq!(parsed, Mutability::Mutable);
///
/// let input = "";
/// let parsed = mutability(input).expect("mutability");
/// assert_eq!(parsed, Mutability::Immutable);
/// }
/// ```
#[macro_export]
macro_rules! epsilon {
($i:expr,) => {
$crate::IResult::Done($i, ())
};
}
/// Run a parser, binding the result to a name, and then evaluating an
/// expression.
///
/// Discards the result of the expression and parser.
///
/// - **Syntax:** `tap!(NAME : THING => EXPR)`
/// - **Output:** `()`
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::{Expr, ExprKind};
/// use syn::parse::expr;
///
/// named!(expr_with_arrow_call -> Expr, do_parse!(
/// mut e: expr >>
/// many0!(tap!(arg: tuple!(punct!("=>"), expr) => {
/// e = Expr {
/// node: ExprKind::Call(Box::new(e), vec![arg.1]),
/// attrs: Vec::new(),
/// };
/// })) >>
/// (e)
/// ));
///
/// fn main() {
/// let input = "something => argument1 => argument2";
///
/// let parsed = expr_with_arrow_call(input).expect("expr with arrow call");
///
/// println!("{:?}", parsed);
/// }
/// ```
#[doc(hidden)]
#[macro_export]
macro_rules! tap {
($i:expr, $name:ident : $submac:ident!( $($args:tt)* ) => $e:expr) => {
match $submac!($i, $($args)*) {
$crate::IResult::Done(i, o) => {
let $name = o;
$e;
$crate::IResult::Done(i, ())
}
$crate::IResult::Error => $crate::IResult::Error,
}
};
($i:expr, $name:ident : $f:expr => $e:expr) => {
tap!($i, $name: call!($f) => $e);
};
}
/// Zero or more values separated by some separator. Does not allow a trailing
/// seperator.
///
/// The implementation requires that the first parameter is a `punct!` macro,
/// and the second is a named parser.
///
/// - **Syntax:** `separated_list!(punct!("..."), THING)`
/// - **Output:** `Vec<THING>`
///
/// You may also be looking for:
///
/// - `separated_nonempty_list!` - one or more values
/// - `terminated_list!` - zero or more, allows trailing separator
/// - `many0!` - zero or more, no separator
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::Expr;
/// use syn::parse::expr;
///
/// named!(expr_list -> Vec<Expr>,
/// separated_list!(punct!(","), expr)
/// );
///
/// fn main() {
/// let input = "1 + 1, things, Construct { this: thing }";
///
/// let parsed = expr_list(input).expect("expr list");
/// assert_eq!(parsed.len(), 3);
/// }
/// ```
#[macro_export]
macro_rules! separated_list {
($i:expr, punct!($sep:expr), $f:expr) => {
$crate::helper::separated_list($i, $sep, $f, false)
};
}
/// Zero or more values separated by some separator. A trailing separator is
/// allowed.
///
/// The implementation requires that the first parameter is a `punct!` macro,
/// and the second is a named parser.
///
/// - **Syntax:** `terminated_list!(punct!("..."), THING)`
/// - **Output:** `Vec<THING>`
///
/// You may also be looking for:
///
/// - `separated_list!` - zero or more, allows trailing separator
/// - `separated_nonempty_list!` - one or more values
/// - `many0!` - zero or more, no separator
///
/// ```rust
/// extern crate syn;
/// #[macro_use] extern crate synom;
///
/// use syn::Expr;
/// use syn::parse::expr;
///
/// named!(expr_list -> Vec<Expr>,
/// terminated_list!(punct!(","), expr)
/// );
///
/// fn main() {
/// let input = "1 + 1, things, Construct { this: thing },";
///
/// let parsed = expr_list(input).expect("expr list");
/// assert_eq!(parsed.len(), 3);
/// }
/// ```
#[macro_export]
macro_rules! terminated_list {
($i:expr, punct!($sep:expr), $f:expr) => {
$crate::helper::separated_list($i, $sep, $f, true)
};
}
// Not public API.
#[doc(hidden)]
pub fn separated_list<'a, T>(mut input: &'a str,
sep: &'static str,
f: fn(&'a str) -> IResult<&'a str, T>,
terminated: bool)
-> IResult<&'a str, Vec<T>> {
let mut res = Vec::new();
// get the first element
match f(input) {
IResult::Error => IResult::Done(input, Vec::new()),
IResult::Done(i, o) => {
if i.len() == input.len() {
IResult::Error
} else {
res.push(o);
input = i;
// get the separator first
while let IResult::Done(i2, _) = punct(input, sep) {
if i2.len() == input.len() {
break;
}
// get the element next
if let IResult::Done(i3, o3) = f(i2) {
if i3.len() == i2.len() {
break;
}
res.push(o3);
input = i3;
} else {
break;
}
}
if terminated {
if let IResult::Done(after, _) = punct(input, sep) {
input = after;
}
}
IResult::Done(input, res)
}
}
}
}

1225
third_party/rust/synom/src/lib.rs vendored Normal file

File diff suppressed because it is too large Load Diff

99
third_party/rust/synom/src/space.rs vendored Normal file
View File

@ -0,0 +1,99 @@
use IResult;
use unicode_xid::UnicodeXID;
pub fn whitespace(input: &str) -> IResult<&str, ()> {
if input.is_empty() {
return IResult::Error;
}
let bytes = input.as_bytes();
let mut i = 0;
while i < bytes.len() {
let s = &input[i..];
if bytes[i] == b'/' {
if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////")) &&
!s.starts_with("//!") {
if let Some(len) = s.find('\n') {
i += len + 1;
continue;
}
break;
} else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***")) &&
!s.starts_with("/*!") {
match block_comment(s) {
IResult::Done(_, com) => {
i += com.len();
continue;
}
IResult::Error => {
return IResult::Error;
}
}
}
}
match bytes[i] {
b' ' | 0x09...0x0d => {
i += 1;
continue;
}
b if b <= 0x7f => {}
_ => {
let ch = s.chars().next().unwrap();
if is_whitespace(ch) {
i += ch.len_utf8();
continue;
}
}
}
return if i > 0 {
IResult::Done(s, ())
} else {
IResult::Error
};
}
IResult::Done("", ())
}
pub fn block_comment(input: &str) -> IResult<&str, &str> {
if !input.starts_with("/*") {
return IResult::Error;
}
let mut depth = 0;
let bytes = input.as_bytes();
let mut i = 0;
let upper = bytes.len() - 1;
while i < upper {
if bytes[i] == b'/' && bytes[i + 1] == b'*' {
depth += 1;
i += 1; // eat '*'
} else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
depth -= 1;
if depth == 0 {
return IResult::Done(&input[i + 2..], &input[..i + 2]);
}
i += 1; // eat '/'
}
i += 1;
}
IResult::Error
}
pub fn word_break(input: &str) -> IResult<&str, ()> {
match input.chars().next() {
Some(ch) if UnicodeXID::is_xid_continue(ch) => IResult::Error,
Some(_) | None => IResult::Done(input, ()),
}
}
pub fn skip_whitespace(input: &str) -> &str {
match whitespace(input) {
IResult::Done(rest, _) => rest,
IResult::Error => input,
}
}
fn is_whitespace(ch: char) -> bool {
// Rust treats left-to-right mark and right-to-left mark as whitespace
ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
}

View File

@ -207,12 +207,24 @@ dependencies = [
[[package]] [[package]]
name = "cssparser" name = "cssparser"
version = "0.10.0" version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "syn 0.11.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cssparser-macros"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.11.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -347,7 +359,7 @@ version = "0.0.1"
dependencies = [ dependencies = [
"app_units 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "app_units 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -752,7 +764,8 @@ name = "selectors"
version = "0.18.0" version = "0.18.0"
dependencies = [ dependencies = [
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -883,7 +896,8 @@ dependencies = [
"bindgen 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)", "bindgen 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", "encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -900,7 +914,6 @@ dependencies = [
"parking_lot 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"pdqsort 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "pdqsort 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", "phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"selectors 0.18.0", "selectors 0.18.0",
@ -918,7 +931,8 @@ name = "style_traits"
version = "0.0.1" version = "0.0.1"
dependencies = [ dependencies = [
"app_units 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "app_units 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -931,6 +945,24 @@ dependencies = [
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "syn"
version = "0.11.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
"synom 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "synom"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "syntex" name = "syntex"
version = "0.54.0" version = "0.54.0"
@ -1250,7 +1282,8 @@ dependencies = [
"checksum core-foundation-sys 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "41115a6aa5d3e1e5ef98148373f25971d1fad53818553f216495f9e67e90a624" "checksum core-foundation-sys 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "41115a6aa5d3e1e5ef98148373f25971d1fad53818553f216495f9e67e90a624"
"checksum core-graphics 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ead017dcf77f503dc991f6b52de6084eeea60a94b0a652baa9bf88654a28e83f" "checksum core-graphics 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ead017dcf77f503dc991f6b52de6084eeea60a94b0a652baa9bf88654a28e83f"
"checksum core-text 4.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e9719616a10f717628e074744f8c55df7b450f7a34d29c196d14f4498aad05d" "checksum core-text 4.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e9719616a10f717628e074744f8c55df7b450f7a34d29c196d14f4498aad05d"
"checksum cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "693cc9c8d3d0779ff60ff6b8b73497bda2c7151b6489c3a9c1f95f5d4f4497e5" "checksum cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d8352ccd22c5ebab558d179e32f6d3dd26eed30252f8420d636bfae5052eb50e"
"checksum cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a85e1452f40a50777c8424fa7fcaa7dd7074c7bc5419014fbffe7ea3d750dee8"
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf" "checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90" "checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
"checksum encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec" "checksum encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec"
@ -1319,6 +1352,8 @@ dependencies = [
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410" "checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum syn 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)" = "58fd09df59565db3399efbba34ba8a2fec1307511ebd245d0061ff9d42691673" "checksum syn 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)" = "58fd09df59565db3399efbba34ba8a2fec1307511ebd245d0061ff9d42691673"
"checksum syn 0.11.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0e28da8d02d75d1e58b89258e0741128f0b0d8a8309fb5c627be0fbd37a76c67"
"checksum synom 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8fece1853fb872b0acdc3ff88f37c474018e125ef81cd4cb8c0ca515746b62ed"
"checksum syntex 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb3f52553a966675982404dc34028291b347e0c9a9c0b0b34f2da6be8a0443f8" "checksum syntex 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb3f52553a966675982404dc34028291b347e0c9a9c0b0b34f2da6be8a0443f8"
"checksum syntex_errors 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dee2f6e49c075f71332bb775219d5982bee6732d26227fa1ae1b53cdb12f5cc5" "checksum syntex_errors 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dee2f6e49c075f71332bb775219d5982bee6732d26227fa1ae1b53cdb12f5cc5"
"checksum syntex_pos 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df3921c7945dfb9ffc53aa35adb2cf4313b5ab5f079c3619b3d4eb82a0efc2b" "checksum syntex_pos 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df3921c7945dfb9ffc53aa35adb2cf4313b5ab5f079c3619b3d4eb82a0efc2b"

View File

@ -205,12 +205,24 @@ dependencies = [
[[package]] [[package]]
name = "cssparser" name = "cssparser"
version = "0.10.0" version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)", "syn 0.11.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cssparser-macros"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.11.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -345,7 +357,7 @@ version = "0.0.1"
dependencies = [ dependencies = [
"app_units 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "app_units 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "atomic_refcell 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -739,7 +751,8 @@ name = "selectors"
version = "0.18.0" version = "0.18.0"
dependencies = [ dependencies = [
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -870,7 +883,8 @@ dependencies = [
"bindgen 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)", "bindgen 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)", "encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -887,7 +901,6 @@ dependencies = [
"parking_lot 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"pdqsort 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "pdqsort 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)", "phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"selectors 0.18.0", "selectors 0.18.0",
@ -905,7 +918,8 @@ name = "style_traits"
version = "0.0.1" version = "0.0.1"
dependencies = [ dependencies = [
"app_units 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "app_units 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "euclid 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -918,6 +932,24 @@ dependencies = [
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "syn"
version = "0.11.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)",
"synom 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "synom"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "syntex" name = "syntex"
version = "0.54.0" version = "0.54.0"
@ -1237,7 +1269,8 @@ dependencies = [
"checksum core-foundation-sys 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "41115a6aa5d3e1e5ef98148373f25971d1fad53818553f216495f9e67e90a624" "checksum core-foundation-sys 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "41115a6aa5d3e1e5ef98148373f25971d1fad53818553f216495f9e67e90a624"
"checksum core-graphics 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ead017dcf77f503dc991f6b52de6084eeea60a94b0a652baa9bf88654a28e83f" "checksum core-graphics 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ead017dcf77f503dc991f6b52de6084eeea60a94b0a652baa9bf88654a28e83f"
"checksum core-text 4.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e9719616a10f717628e074744f8c55df7b450f7a34d29c196d14f4498aad05d" "checksum core-text 4.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e9719616a10f717628e074744f8c55df7b450f7a34d29c196d14f4498aad05d"
"checksum cssparser 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "693cc9c8d3d0779ff60ff6b8b73497bda2c7151b6489c3a9c1f95f5d4f4497e5" "checksum cssparser 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d8352ccd22c5ebab558d179e32f6d3dd26eed30252f8420d636bfae5052eb50e"
"checksum cssparser-macros 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a85e1452f40a50777c8424fa7fcaa7dd7074c7bc5419014fbffe7ea3d750dee8"
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf" "checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90" "checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
"checksum encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec" "checksum encoding 0.2.33 (registry+https://github.com/rust-lang/crates.io-index)" = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec"
@ -1306,6 +1339,8 @@ dependencies = [
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410" "checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum syn 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)" = "58fd09df59565db3399efbba34ba8a2fec1307511ebd245d0061ff9d42691673" "checksum syn 0.10.8 (registry+https://github.com/rust-lang/crates.io-index)" = "58fd09df59565db3399efbba34ba8a2fec1307511ebd245d0061ff9d42691673"
"checksum syn 0.11.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0e28da8d02d75d1e58b89258e0741128f0b0d8a8309fb5c627be0fbd37a76c67"
"checksum synom 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8fece1853fb872b0acdc3ff88f37c474018e125ef81cd4cb8c0ca515746b62ed"
"checksum syntex 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb3f52553a966675982404dc34028291b347e0c9a9c0b0b34f2da6be8a0443f8" "checksum syntex 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb3f52553a966675982404dc34028291b347e0c9a9c0b0b34f2da6be8a0443f8"
"checksum syntex_errors 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dee2f6e49c075f71332bb775219d5982bee6732d26227fa1ae1b53cdb12f5cc5" "checksum syntex_errors 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dee2f6e49c075f71332bb775219d5982bee6732d26227fa1ae1b53cdb12f5cc5"
"checksum syntex_pos 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df3921c7945dfb9ffc53aa35adb2cf4313b5ab5f079c3619b3d4eb82a0efc2b" "checksum syntex_pos 0.54.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df3921c7945dfb9ffc53aa35adb2cf4313b5ab5f079c3619b3d4eb82a0efc2b"