diff --git a/.cargo-ok b/.cargo-ok deleted file mode 100644 index b5754e2..0000000 --- a/.cargo-ok +++ /dev/null @@ -1 +0,0 @@ -ok \ No newline at end of file diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json deleted file mode 100644 index 0448b81..0000000 --- a/.cargo_vcs_info.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "git": { - "sha1": "ea778eb80729a01094b033c45633122052d96fd4" - }, - "path_in_vcs": "" -} \ No newline at end of file diff --git a/.clippy.toml b/.clippy.toml new file mode 100644 index 0000000..3d30690 --- /dev/null +++ b/.clippy.toml @@ -0,0 +1 @@ +msrv = "1.31.0" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c51ca9c..3e1bbba 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,14 +24,13 @@ jobs: strategy: fail-fast: false matrix: - rust: [1.56.0, stable, beta] + rust: [1.31.0, stable, beta] timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@master with: toolchain: ${{matrix.rust}} - components: rust-src - run: cargo test - run: cargo test --no-default-features - run: cargo test --features span-locations @@ -51,15 +50,10 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@nightly - with: - components: rust-src - name: Enable type layout randomization run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV - - run: cargo check - env: - RUSTFLAGS: --cfg procmacro2_nightly_testing ${{env.RUSTFLAGS}} - run: cargo test - run: cargo test --no-default-features - run: cargo test --no-default-features --test features -- --ignored make_sure_no_proc_macro # run the ignored test to make sure the `proc-macro` feature is disabled @@ -76,19 +70,8 @@ jobs: - name: RUSTFLAGS='-Z allow-features=' cargo test run: cargo test env: - RUSTFLAGS: -Z allow-features= --cfg procmacro2_backtrace ${{env.RUSTFLAGS}} - - minimal: - name: Minimal versions - needs: pre_ci - if: needs.pre_ci.outputs.continue - runs-on: ubuntu-latest - timeout-minutes: 45 - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@nightly - - run: cargo generate-lockfile -Z minimal-versions - - run: cargo check --locked + RUSTFLAGS: -Z allow-features= ${{env.RUSTFLAGS}} + - run: cargo update -Z minimal-versions && cargo build webassembly: name: WebAssembly @@ -97,11 +80,10 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@nightly with: target: wasm32-unknown-unknown - components: rust-src - run: cargo test --target wasm32-unknown-unknown --no-run fuzz: @@ -111,35 +93,10 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@nightly - with: - components: rust-src - uses: dtolnay/install@cargo-fuzz - run: cargo fuzz check - - run: cargo check --no-default-features --features afl - working-directory: fuzz - - uses: dtolnay/install@honggfuzz - - run: sudo apt-get update # https://github.com/actions/runner-images/issues/8953 - - run: sudo apt-get install binutils-dev libunwind-dev - - run: cargo hfuzz build --no-default-features --features honggfuzz - working-directory: fuzz - - doc: - name: Documentation - needs: pre_ci - if: needs.pre_ci.outputs.continue - runs-on: ubuntu-latest - timeout-minutes: 45 - env: - RUSTDOCFLAGS: -Dwarnings - steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@nightly - with: - components: rust-src - - uses: dtolnay/install@cargo-docs-rs - - run: cargo docs-rs clippy: name: Clippy @@ -147,10 +104,8 @@ jobs: if: github.event_name != 'pull_request' timeout-minutes: 45 steps: - - uses: actions/checkout@v4 - - uses: dtolnay/rust-toolchain@nightly - with: - components: clippy, rust-src + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@clippy - run: cargo clippy --tests -- -Dclippy::all -Dclippy::pedantic - run: cargo clippy --tests --all-features -- -Dclippy::all -Dclippy::pedantic @@ -161,9 +116,8 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@miri - - run: cargo miri setup - run: cargo miri test env: MIRIFLAGS: -Zmiri-strict-provenance @@ -174,7 +128,7 @@ jobs: if: github.event_name != 'pull_request' timeout-minutes: 45 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v3 - uses: dtolnay/install@cargo-outdated - run: cargo outdated --workspace --exit-code 1 - run: cargo outdated --manifest-path fuzz/Cargo.toml --exit-code 1 diff --git a/BUILD.gn b/BUILD.gn index fa6a97a..65be253 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -19,8 +19,8 @@ ohos_cargo_crate("lib") { crate_root = "src/lib.rs" sources = [ "src/lib.rs" ] - edition = "2021" - cargo_pkg_version = "1.0.76" + edition = "2018" + cargo_pkg_version = "1.0.51" cargo_pkg_authors = "David Tolnay , Alex Crichton " cargo_pkg_name = "proc-macro2" diff --git a/Cargo.toml b/Cargo.toml index 280bbdf..ec3bbdb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,67 +1,59 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - [package] -edition = "2021" -rust-version = "1.56" name = "proc-macro2" -version = "1.0.76" -authors = [ - "David Tolnay ", - "Alex Crichton ", -] +version = "1.0.53" # remember to update html_root_url +authors = ["David Tolnay ", "Alex Crichton "] autobenches = false +categories = ["development-tools::procedural-macro-helpers"] description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case." documentation = "https://docs.rs/proc-macro2" -readme = "README.md" -keywords = [ - "macros", - "syn", -] -categories = ["development-tools::procedural-macro-helpers"] +edition = "2018" +keywords = ["macros", "syn"] license = "MIT OR Apache-2.0" repository = "https://github.com/dtolnay/proc-macro2" +rust-version = "1.31" [package.metadata.docs.rs] -rustc-args = [ - "--cfg", - "procmacro2_semver_exempt", -] -rustdoc-args = [ - "--cfg", - "procmacro2_semver_exempt", - "--cfg", - "doc_cfg", - "--generate-link-to-definition", -] +rustc-args = ["--cfg", "procmacro2_semver_exempt"] +rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg"] targets = ["x86_64-unknown-linux-gnu"] [package.metadata.playground] features = ["span-locations"] +[dependencies] +unicode-ident = "1.0" + +[dev-dependencies] +quote = { version = "1.0", default_features = false } +rustversion = "1" + +[features] +proc-macro = [] +default = ["proc-macro"] + +# Expose methods Span::start and Span::end which give the line/column location +# of a token. +span-locations = [] + +# This feature no longer means anything. +nightly = [] + [lib] doc-scrape-examples = false -[dependencies.unicode-ident] -version = "1.0" +[workspace] +members = ["benches/bench-libproc-macro", "tests/ui"] -[dev-dependencies.quote] -version = "1.0" -default_features = false - -[dev-dependencies.rustversion] -version = "1" - -[features] -default = ["proc-macro"] -nightly = [] -proc-macro = [] -span-locations = [] +[patch.crates-io] +# Our doc tests depend on quote which depends on proc-macro2. Without this line, +# the proc-macro2 dependency of quote would be the released version of +# proc-macro2. Quote would implement its traits for types from that proc-macro2, +# meaning impls would be missing when tested against types from the local +# proc-macro2. +# +# GitHub Actions builds that are in progress at the time that you publish may +# spuriously fail. This is because they'll be building a local proc-macro2 which +# carries the second-most-recent version number, pulling in quote which resolves +# to a dependency on the just-published most recent version number. Thus the +# patch will fail to apply because the version numbers are different. +proc-macro2 = { path = "." } diff --git a/Cargo.toml.orig b/Cargo.toml.orig deleted file mode 100644 index 6f7c786..0000000 --- a/Cargo.toml.orig +++ /dev/null @@ -1,59 +0,0 @@ -[package] -name = "proc-macro2" -version = "1.0.76" -authors = ["David Tolnay ", "Alex Crichton "] -autobenches = false -categories = ["development-tools::procedural-macro-helpers"] -description = "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case." -documentation = "https://docs.rs/proc-macro2" -edition = "2021" -keywords = ["macros", "syn"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/dtolnay/proc-macro2" -rust-version = "1.56" - -[package.metadata.docs.rs] -rustc-args = ["--cfg", "procmacro2_semver_exempt"] -rustdoc-args = ["--cfg", "procmacro2_semver_exempt", "--cfg", "doc_cfg", "--generate-link-to-definition"] -targets = ["x86_64-unknown-linux-gnu"] - -[package.metadata.playground] -features = ["span-locations"] - -[dependencies] -unicode-ident = "1.0" - -[dev-dependencies] -quote = { version = "1.0", default_features = false } -rustversion = "1" - -[features] -proc-macro = [] -default = ["proc-macro"] - -# Expose methods Span::start and Span::end which give the line/column location -# of a token. -span-locations = [] - -# This feature no longer means anything. -nightly = [] - -[lib] -doc-scrape-examples = false - -[workspace] -members = ["benches/bench-libproc-macro", "tests/ui"] - -[patch.crates-io] -# Our doc tests depend on quote which depends on proc-macro2. Without this line, -# the proc-macro2 dependency of quote would be the released version of -# proc-macro2. Quote would implement its traits for types from that proc-macro2, -# meaning impls would be missing when tested against types from the local -# proc-macro2. -# -# GitHub Actions builds that are in progress at the time that you publish may -# spuriously fail. This is because they'll be building a local proc-macro2 which -# carries the second-most-recent version number, pulling in quote which resolves -# to a dependency on the just-published most recent version number. Thus the -# patch will fail to apply because the version numbers are different. -proc-macro2 = { path = "." } diff --git a/README.OpenSource b/README.OpenSource index 4100b6f..db7f62b 100644 --- a/README.OpenSource +++ b/README.OpenSource @@ -3,7 +3,7 @@ "Name": "proc-macro2", "License": "Apache License V2.0, MIT", "License File": "LICENSE-APACHE, LICENSE-MIT", - "Version Number": "1.0.76", + "Version Number": "1.0.53", "Owner": "fangting12@huawei.com", "Upstream URL": "https://github.com/dtolnay/proc-macro2", "Description": "A Rust library that provides support for error handling in procedural macros." diff --git a/README.md b/README.md index 3a29ce8..131ba51 100644 --- a/README.md +++ b/README.md @@ -52,7 +52,7 @@ pub fn my_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { If parsing with [Syn], you'll use [`parse_macro_input!`] instead to propagate parse errors correctly back to the compiler when parsing fails. -[`parse_macro_input!`]: https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html +[`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html ## Unstable features @@ -62,7 +62,7 @@ proc-macro2 by default. To opt into the additional APIs available in the most recent nightly compiler, the `procmacro2_semver_exempt` config flag must be passed to rustc. We will -polyfill those nightly-only APIs back to Rust 1.56.0. As these are unstable APIs +polyfill those nightly-only APIs back to Rust 1.31.0. As these are unstable APIs that track the nightly compiler, minor versions of proc-macro2 may make breaking changes to them at any time. diff --git a/benches/bench-libproc-macro/Cargo.toml b/benches/bench-libproc-macro/Cargo.toml new file mode 100644 index 0000000..5f8e56b --- /dev/null +++ b/benches/bench-libproc-macro/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "bench-libproc-macro" +version = "0.0.0" +authors = ["David Tolnay "] +edition = "2018" +publish = false + +[lib] +path = "lib.rs" +proc-macro = true + +[[bin]] +name = "bench-libproc-macro" +path = "main.rs" diff --git a/benches/bench-libproc-macro/README.md b/benches/bench-libproc-macro/README.md new file mode 100644 index 0000000..e286ddd --- /dev/null +++ b/benches/bench-libproc-macro/README.md @@ -0,0 +1,10 @@ +Example output: + +```console +$ cargo check --release + + Compiling bench-libproc-macro v0.0.0 +STRING: 37 millis +TOKENSTREAM: 276 millis + Finished release [optimized] target(s) in 1.16s +``` diff --git a/benches/bench-libproc-macro/lib.rs b/benches/bench-libproc-macro/lib.rs new file mode 100644 index 0000000..44c2e68 --- /dev/null +++ b/benches/bench-libproc-macro/lib.rs @@ -0,0 +1,49 @@ +extern crate proc_macro; + +use proc_macro::{Ident, Punct, Spacing, Span, TokenStream, TokenTree}; +use std::iter::once; +use std::time::Instant; + +const N: u32 = 20000; + +#[proc_macro] +pub fn bench(_input: TokenStream) -> TokenStream { + let start = Instant::now(); + let mut string = String::new(); + for _ in 0..N { + string += "core"; + string += ":"; + string += ":"; + string += "option"; + string += ":"; + string += ":"; + string += "Option"; + string += ":"; + string += ":"; + string += "None"; + string += ","; + } + string.parse::().unwrap(); + eprintln!("STRING: {} millis", start.elapsed().as_millis()); + + let start = Instant::now(); + let span = Span::call_site(); + let mut tokens = TokenStream::new(); + for _ in 0..N { + // Similar to what is emitted by quote. + tokens.extend(once(TokenTree::Ident(Ident::new("core", span)))); + tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Joint)))); + tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Alone)))); + tokens.extend(once(TokenTree::Ident(Ident::new("option", span)))); + tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Joint)))); + tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Alone)))); + tokens.extend(once(TokenTree::Ident(Ident::new("Option", span)))); + tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Joint)))); + tokens.extend(once(TokenTree::Punct(Punct::new(':', Spacing::Alone)))); + tokens.extend(once(TokenTree::Ident(Ident::new("None", span)))); + tokens.extend(once(TokenTree::Punct(Punct::new(',', Spacing::Joint)))); + } + eprintln!("TOKENSTREAM: {} millis", start.elapsed().as_millis()); + + TokenStream::new() +} diff --git a/benches/bench-libproc-macro/main.rs b/benches/bench-libproc-macro/main.rs new file mode 100644 index 0000000..34eedf6 --- /dev/null +++ b/benches/bench-libproc-macro/main.rs @@ -0,0 +1,3 @@ +bench_libproc_macro::bench!(); + +fn main() {} diff --git a/build.rs b/build.rs index 3347f87..59505a5 100644 --- a/build.rs +++ b/build.rs @@ -1,5 +1,11 @@ // rustc-cfg emitted by the build script: // +// "use_proc_macro" +// Link to extern crate proc_macro. Available on any compiler and any target +// except wasm32. Requires "proc-macro" Cargo cfg to be enabled (default is +// enabled). On wasm32 we never link to proc_macro even if "proc-macro" cfg +// is enabled. +// // "wrap_proc_macro" // Wrap types from libproc_macro rather than polyfilling the whole API. // Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set, @@ -35,14 +41,21 @@ // 1.57+. use std::env; -use std::ffi::OsString; -use std::path::Path; -use std::process::{self, Command, Stdio}; +use std::process::{self, Command}; use std::str; -use std::u32; fn main() { - let rustc = rustc_minor_version().unwrap_or(u32::MAX); + println!("cargo:rerun-if-changed=build.rs"); + + let version = match rustc_version() { + Some(version) => version, + None => return, + }; + + if version.minor < 31 { + eprintln!("Minimum supported rustc version is 1.31"); + process::exit(1); + } let docs_rs = env::var_os("DOCS_RS").is_some(); let semver_exempt = cfg!(procmacro2_semver_exempt) || docs_rs; @@ -55,148 +68,120 @@ fn main() { println!("cargo:rustc-cfg=span_locations"); } - if rustc < 57 { + if version.minor < 32 { + println!("cargo:rustc-cfg=no_libprocmacro_unwind_safe"); + } + + if version.minor < 39 { + println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard"); + } + + if version.minor < 44 { + println!("cargo:rustc-cfg=no_lexerror_display"); + } + + if version.minor < 45 { + println!("cargo:rustc-cfg=no_hygiene"); + } + + if version.minor < 47 { + println!("cargo:rustc-cfg=no_ident_new_raw"); + } + + if version.minor < 54 { + println!("cargo:rustc-cfg=no_literal_from_str"); + } + + if version.minor < 55 { + println!("cargo:rustc-cfg=no_group_open_close"); + } + + if version.minor < 57 { println!("cargo:rustc-cfg=no_is_available"); } - if rustc < 66 { + if version.minor < 66 { println!("cargo:rustc-cfg=no_source_text"); } - if !cfg!(feature = "proc-macro") { - println!("cargo:rerun-if-changed=build.rs"); + let target = env::var("TARGET").unwrap(); + if !enable_use_proc_macro(&target) { return; } - println!("cargo:rerun-if-changed=build/probe.rs"); + println!("cargo:rustc-cfg=use_proc_macro"); - let proc_macro_span; - let consider_rustc_bootstrap; - if compile_probe(false) { - // This is a nightly or dev compiler, so it supports unstable features - // regardless of RUSTC_BOOTSTRAP. No need to rerun build script if - // RUSTC_BOOTSTRAP is changed. - proc_macro_span = true; - consider_rustc_bootstrap = false; - } else if let Some(rustc_bootstrap) = env::var_os("RUSTC_BOOTSTRAP") { - if compile_probe(true) { - // This is a stable or beta compiler for which the user has set - // RUSTC_BOOTSTRAP to turn on unstable features. Rerun build script - // if they change it. - proc_macro_span = true; - consider_rustc_bootstrap = true; - } else if rustc_bootstrap == "1" { - // This compiler does not support the proc macro Span API in the - // form that proc-macro2 expects. No need to pay attention to - // RUSTC_BOOTSTRAP. - proc_macro_span = false; - consider_rustc_bootstrap = false; - } else { - // This is a stable or beta compiler for which RUSTC_BOOTSTRAP is - // set to restrict the use of unstable features by this crate. - proc_macro_span = false; - consider_rustc_bootstrap = true; - } - } else { - // Without RUSTC_BOOTSTRAP, this compiler does not support the proc - // macro Span API in the form that proc-macro2 expects, but try again if - // the user turns on unstable features. - proc_macro_span = false; - consider_rustc_bootstrap = true; - } - - if proc_macro_span || !semver_exempt { + if version.nightly || !semver_exempt { println!("cargo:rustc-cfg=wrap_proc_macro"); } - if proc_macro_span { + if version.nightly + && feature_allowed("proc_macro_span") + && feature_allowed("proc_macro_span_shrink") + { println!("cargo:rustc-cfg=proc_macro_span"); } - if semver_exempt && proc_macro_span { + if semver_exempt && version.nightly { println!("cargo:rustc-cfg=super_unstable"); } - - if consider_rustc_bootstrap { - println!("cargo:rerun-if-env-changed=RUSTC_BOOTSTRAP"); - } } -fn compile_probe(rustc_bootstrap: bool) -> bool { - if env::var_os("RUSTC_STAGE").is_some() { - // We are running inside rustc bootstrap. This is a highly non-standard - // environment with issues such as: - // - // https://github.com/rust-lang/cargo/issues/11138 - // https://github.com/rust-lang/rust/issues/114839 - // - // Let's just not use nightly features here. +fn enable_use_proc_macro(target: &str) -> bool { + // wasm targets don't have the `proc_macro` crate, disable this feature. + if target.contains("wasm32") { return false; } - let rustc = cargo_env_var("RUSTC"); - let out_dir = cargo_env_var("OUT_DIR"); - let probefile = Path::new("build").join("probe.rs"); - - // Make sure to pick up Cargo rustc configuration. - let mut cmd = if let Some(wrapper) = env::var_os("RUSTC_WRAPPER") { - let mut cmd = Command::new(wrapper); - // The wrapper's first argument is supposed to be the path to rustc. - cmd.arg(rustc); - cmd - } else { - Command::new(rustc) - }; - - if !rustc_bootstrap { - cmd.env_remove("RUSTC_BOOTSTRAP"); - } - - cmd.stderr(Stdio::null()) - .arg("--edition=2021") - .arg("--crate-name=proc_macro2") - .arg("--crate-type=lib") - .arg("--emit=dep-info,metadata") - .arg("--out-dir") - .arg(out_dir) - .arg(probefile); - - if let Some(target) = env::var_os("TARGET") { - cmd.arg("--target").arg(target); - } - - // If Cargo wants to set RUSTFLAGS, use that. - if let Ok(rustflags) = env::var("CARGO_ENCODED_RUSTFLAGS") { - if !rustflags.is_empty() { - for arg in rustflags.split('\x1f') { - cmd.arg(arg); - } - } - } - - match cmd.status() { - Ok(status) => status.success(), - Err(_) => false, - } + // Otherwise, only enable it if our feature is actually enabled. + cfg!(feature = "proc-macro") } -fn rustc_minor_version() -> Option { - let rustc = cargo_env_var("RUSTC"); +struct RustcVersion { + minor: u32, + nightly: bool, +} + +fn rustc_version() -> Option { + let rustc = env::var_os("RUSTC")?; let output = Command::new(rustc).arg("--version").output().ok()?; let version = str::from_utf8(&output.stdout).ok()?; + let nightly = version.contains("nightly") || version.contains("dev"); let mut pieces = version.split('.'); if pieces.next() != Some("rustc 1") { return None; } - pieces.next()?.parse().ok() + let minor = pieces.next()?.parse().ok()?; + Some(RustcVersion { minor, nightly }) } -fn cargo_env_var(key: &str) -> OsString { - env::var_os(key).unwrap_or_else(|| { - eprintln!( - "Environment variable ${} is not set during execution of build script", - key, - ); - process::exit(1); - }) +fn feature_allowed(feature: &str) -> bool { + // Recognized formats: + // + // -Z allow-features=feature1,feature2 + // + // -Zallow-features=feature1,feature2 + + let flags_var; + let flags_var_string; + let flags = if let Some(encoded_rustflags) = env::var_os("CARGO_ENCODED_RUSTFLAGS") { + flags_var = encoded_rustflags; + flags_var_string = flags_var.to_string_lossy(); + flags_var_string.split('\x1f') + } else { + return true; + }; + + for mut flag in flags { + if flag.starts_with("-Z") { + flag = &flag["-Z".len()..]; + } + if flag.starts_with("allow-features=") { + flag = &flag["allow-features=".len()..]; + return flag.split(',').any(|allowed| allowed == feature); + } + } + + // No allow-features= flag, allowed by default. + true } diff --git a/build/probe.rs b/build/probe.rs deleted file mode 100644 index 5afa13a..0000000 --- a/build/probe.rs +++ /dev/null @@ -1,21 +0,0 @@ -// This code exercises the surface area that we expect of Span's unstable API. -// If the current toolchain is able to compile it, then proc-macro2 is able to -// offer these APIs too. - -#![feature(proc_macro_span)] - -extern crate proc_macro; - -use core::ops::RangeBounds; -use proc_macro::{Literal, Span}; - -pub fn join(this: &Span, other: Span) -> Option { - this.join(other) -} - -pub fn subspan>(this: &Literal, range: R) -> Option { - this.subspan(range) -} - -// Include in sccache cache key. -const _: Option<&str> = option_env!("RUSTC_BOOTSTRAP"); diff --git a/fuzz/.gitignore b/fuzz/.gitignore new file mode 100644 index 0000000..188f196 --- /dev/null +++ b/fuzz/.gitignore @@ -0,0 +1,3 @@ +artifacts/ +corpus/ +target/ diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml new file mode 100644 index 0000000..0923993 --- /dev/null +++ b/fuzz/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "proc-macro2-fuzz" +version = "0.0.0" +authors = ["David Tolnay "] +edition = "2021" +publish = false + +[package.metadata] +cargo-fuzz = true + +[dependencies] +libfuzzer-sys = "0.4" +proc-macro2 = { path = ".." } + +[[bin]] +name = "parse_token_stream" +path = "fuzz_targets/parse_token_stream.rs" +test = false +doc = false + +[workspace] diff --git a/fuzz/fuzz_targets/parse_token_stream.rs b/fuzz/fuzz_targets/parse_token_stream.rs new file mode 100644 index 0000000..5b73e1b --- /dev/null +++ b/fuzz/fuzz_targets/parse_token_stream.rs @@ -0,0 +1,12 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use std::str; + +fuzz_target!(|bytes: &[u8]| { + if bytes.len() < 200 { + if let Ok(string) = str::from_utf8(bytes) { + _ = string.parse::(); + } + } +}); diff --git a/rust-toolchain.toml b/rust-toolchain.toml deleted file mode 100644 index 20fe888..0000000 --- a/rust-toolchain.toml +++ /dev/null @@ -1,2 +0,0 @@ -[toolchain] -components = ["rust-src"] diff --git a/src/extra.rs b/src/extra.rs index 4a69d46..cbce162 100644 --- a/src/extra.rs +++ b/src/extra.rs @@ -22,7 +22,9 @@ enum DelimSpanEnum { #[cfg(wrap_proc_macro)] Compiler { join: proc_macro::Span, + #[cfg(not(no_group_open_close))] open: proc_macro::Span, + #[cfg(not(no_group_open_close))] close: proc_macro::Span, }, Fallback(fallback::Span), @@ -34,7 +36,9 @@ impl DelimSpan { let inner = match group { imp::Group::Compiler(group) => DelimSpanEnum::Compiler { join: group.span(), + #[cfg(not(no_group_open_close))] open: group.span_open(), + #[cfg(not(no_group_open_close))] close: group.span_close(), }, imp::Group::Fallback(group) => DelimSpanEnum::Fallback(group.span()), @@ -62,7 +66,13 @@ impl DelimSpan { pub fn open(&self) -> Span { match &self.inner { #[cfg(wrap_proc_macro)] - DelimSpanEnum::Compiler { open, .. } => Span::_new(imp::Span::Compiler(*open)), + DelimSpanEnum::Compiler { + #[cfg(not(no_group_open_close))] + open, + #[cfg(no_group_open_close)] + join: open, + .. + } => Span::_new(imp::Span::Compiler(*open)), DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.first_byte()), } } @@ -71,7 +81,13 @@ impl DelimSpan { pub fn close(&self) -> Span { match &self.inner { #[cfg(wrap_proc_macro)] - DelimSpanEnum::Compiler { close, .. } => Span::_new(imp::Span::Compiler(*close)), + DelimSpanEnum::Compiler { + #[cfg(not(no_group_open_close))] + close, + #[cfg(no_group_open_close)] + join: close, + .. + } => Span::_new(imp::Span::Compiler(*close)), DelimSpanEnum::Fallback(span) => Span::_new_fallback(span.last_byte()), } } diff --git a/src/fallback.rs b/src/fallback.rs index 7b40427..b0ed7b0 100644 --- a/src/fallback.rs +++ b/src/fallback.rs @@ -3,17 +3,18 @@ use crate::location::LineColumn; use crate::parse::{self, Cursor}; use crate::rcvec::{RcVec, RcVecBuilder, RcVecIntoIter, RcVecMut}; use crate::{Delimiter, Spacing, TokenTree}; -#[cfg(all(span_locations, not(fuzzing)))] -use alloc::collections::BTreeMap; -#[cfg(all(span_locations, not(fuzzing)))] +#[cfg(span_locations)] use core::cell::RefCell; #[cfg(span_locations)] use core::cmp; use core::fmt::{self, Debug, Display, Write}; +use core::iter::FromIterator; use core::mem::ManuallyDrop; use core::ops::RangeBounds; use core::ptr; use core::str::FromStr; +#[cfg(procmacro2_semver_exempt)] +use std::path::Path; use std::path::PathBuf; /// Force use of proc-macro2's fallback implementation of the API for now, even @@ -45,7 +46,7 @@ impl LexError { self.span } - pub(crate) fn call_site() -> Self { + fn call_site() -> Self { LexError { span: Span::call_site(), } @@ -72,6 +73,7 @@ impl TokenStream { fn push_token_from_proc_macro(mut vec: RcVecMut, token: TokenTree) { // https://github.com/dtolnay/proc-macro2/issues/235 match token { + #[cfg(not(no_bind_by_move_pattern_guard))] TokenTree::Literal(crate::Literal { #[cfg(wrap_proc_macro)] inner: crate::imp::Literal::Fallback(literal), @@ -81,6 +83,20 @@ fn push_token_from_proc_macro(mut vec: RcVecMut, token: TokenTree) { }) if literal.repr.starts_with('-') => { push_negative_literal(vec, literal); } + #[cfg(no_bind_by_move_pattern_guard)] + TokenTree::Literal(crate::Literal { + #[cfg(wrap_proc_macro)] + inner: crate::imp::Literal::Fallback(literal), + #[cfg(not(wrap_proc_macro))] + inner: literal, + .. + }) => { + if literal.repr.starts_with('-') { + push_negative_literal(vec, literal); + } else { + vec.push(TokenTree::Literal(crate::Literal::_new_fallback(literal))); + } + } _ => vec.push(token), } @@ -146,14 +162,11 @@ impl TokenStreamBuilder { #[cfg(span_locations)] fn get_cursor(src: &str) -> Cursor { - #[cfg(fuzzing)] - return Cursor { rest: src, off: 1 }; - // Create a dummy file & add it to the source map - #[cfg(not(fuzzing))] SOURCE_MAP.with(|cm| { let mut cm = cm.borrow_mut(); - let span = cm.add_file(src); + let name = format!("", cm.files.len()); + let span = cm.add_file(&name, src); Cursor { rest: src, off: span.lo, @@ -219,7 +232,7 @@ impl Debug for TokenStream { } } -#[cfg(feature = "proc-macro")] +#[cfg(use_proc_macro)] impl From for TokenStream { fn from(inner: proc_macro::TokenStream) -> Self { inner @@ -229,7 +242,7 @@ impl From for TokenStream { } } -#[cfg(feature = "proc-macro")] +#[cfg(use_proc_macro)] impl From for proc_macro::TokenStream { fn from(inner: TokenStream) -> Self { inner @@ -307,6 +320,7 @@ impl SourceFile { } pub fn is_real(&self) -> bool { + // XXX(nika): Support real files in the future? false } } @@ -320,34 +334,36 @@ impl Debug for SourceFile { } } -#[cfg(all(span_locations, not(fuzzing)))] +#[cfg(span_locations)] thread_local! { static SOURCE_MAP: RefCell = RefCell::new(SourceMap { - // Start with a single dummy file which all call_site() and def_site() - // spans reference. + // NOTE: We start with a single dummy file which all call_site() and + // def_site() spans reference. files: vec![FileInfo { + #[cfg(procmacro2_semver_exempt)] + name: "".to_owned(), source_text: String::new(), span: Span { lo: 0, hi: 0 }, lines: vec![0], - char_index_to_byte_offset: BTreeMap::new(), }], }); } -#[cfg(all(span_locations, not(fuzzing)))] +#[cfg(span_locations)] struct FileInfo { + #[cfg(procmacro2_semver_exempt)] + name: String, source_text: String, span: Span, lines: Vec, - char_index_to_byte_offset: BTreeMap, } -#[cfg(all(span_locations, not(fuzzing)))] +#[cfg(span_locations)] impl FileInfo { fn offset_line_column(&self, offset: usize) -> LineColumn { assert!(self.span_within(Span { lo: offset as u32, - hi: offset as u32, + hi: offset as u32 })); let offset = offset - self.span.lo as usize; match self.lines.binary_search(&offset) { @@ -366,46 +382,16 @@ impl FileInfo { span.lo >= self.span.lo && span.hi <= self.span.hi } - fn source_text(&mut self, span: Span) -> String { - let lo_char = (span.lo - self.span.lo) as usize; - - // Look up offset of the largest already-computed char index that is - // less than or equal to the current requested one. We resume counting - // chars from that point. - let (&last_char_index, &last_byte_offset) = self - .char_index_to_byte_offset - .range(..=lo_char) - .next_back() - .unwrap_or((&0, &0)); - - let lo_byte = if last_char_index == lo_char { - last_byte_offset - } else { - let total_byte_offset = match self.source_text[last_byte_offset..] - .char_indices() - .nth(lo_char - last_char_index) - { - Some((additional_offset, _ch)) => last_byte_offset + additional_offset, - None => self.source_text.len(), - }; - self.char_index_to_byte_offset - .insert(lo_char, total_byte_offset); - total_byte_offset - }; - - let trunc_lo = &self.source_text[lo_byte..]; - let char_len = (span.hi - span.lo) as usize; - let source_text = match trunc_lo.char_indices().nth(char_len) { - Some((offset, _ch)) => &trunc_lo[..offset], - None => trunc_lo, - }; - source_text.to_owned() + fn source_text(&self, span: Span) -> String { + let lo = (span.lo - self.span.lo) as usize; + let hi = (span.hi - self.span.lo) as usize; + self.source_text[lo..hi].to_owned() } } /// Computes the offsets of each line in the given source string /// and the total number of characters -#[cfg(all(span_locations, not(fuzzing)))] +#[cfg(span_locations)] fn lines_offsets(s: &str) -> (usize, Vec) { let mut lines = vec![0]; let mut total = 0; @@ -420,12 +406,12 @@ fn lines_offsets(s: &str) -> (usize, Vec) { (total, lines) } -#[cfg(all(span_locations, not(fuzzing)))] +#[cfg(span_locations)] struct SourceMap { files: Vec, } -#[cfg(all(span_locations, not(fuzzing)))] +#[cfg(span_locations)] impl SourceMap { fn next_start_pos(&self) -> u32 { // Add 1 so there's always space between files. @@ -435,37 +421,27 @@ impl SourceMap { self.files.last().unwrap().span.hi + 1 } - fn add_file(&mut self, src: &str) -> Span { + fn add_file(&mut self, name: &str, src: &str) -> Span { let (len, lines) = lines_offsets(src); let lo = self.next_start_pos(); + // XXX(nika): Should we bother doing a checked cast or checked add here? let span = Span { lo, hi: lo + (len as u32), }; self.files.push(FileInfo { + #[cfg(procmacro2_semver_exempt)] + name: name.to_owned(), source_text: src.to_owned(), span, lines, - // Populated lazily by source_text(). - char_index_to_byte_offset: BTreeMap::new(), }); - span - } + #[cfg(not(procmacro2_semver_exempt))] + let _ = name; - #[cfg(procmacro2_semver_exempt)] - fn filepath(&self, span: Span) -> PathBuf { - for (i, file) in self.files.iter().enumerate() { - if file.span_within(span) { - return PathBuf::from(if i == 0 { - "".to_owned() - } else { - format!("", i) - }); - } - } - unreachable!("Invalid span with no related FileInfo!"); + span } fn fileinfo(&self, span: Span) -> &FileInfo { @@ -474,16 +450,7 @@ impl SourceMap { return file; } } - unreachable!("Invalid span with no related FileInfo!"); - } - - fn fileinfo_mut(&mut self, span: Span) -> &mut FileInfo { - for file in &mut self.files { - if file.span_within(span) { - return file; - } - } - unreachable!("Invalid span with no related FileInfo!"); + panic!("Invalid span with no related FileInfo!"); } } @@ -506,6 +473,7 @@ impl Span { Span { lo: 0, hi: 0 } } + #[cfg(not(no_hygiene))] pub fn mixed_site() -> Self { Span::call_site() } @@ -528,25 +496,17 @@ impl Span { #[cfg(procmacro2_semver_exempt)] pub fn source_file(&self) -> SourceFile { - #[cfg(fuzzing)] - return SourceFile { - path: PathBuf::from(""), - }; - - #[cfg(not(fuzzing))] SOURCE_MAP.with(|cm| { let cm = cm.borrow(); - let path = cm.filepath(*self); - SourceFile { path } + let fi = cm.fileinfo(*self); + SourceFile { + path: Path::new(&fi.name).to_owned(), + } }) } #[cfg(span_locations)] pub fn start(&self) -> LineColumn { - #[cfg(fuzzing)] - return LineColumn { line: 0, column: 0 }; - - #[cfg(not(fuzzing))] SOURCE_MAP.with(|cm| { let cm = cm.borrow(); let fi = cm.fileinfo(*self); @@ -556,10 +516,6 @@ impl Span { #[cfg(span_locations)] pub fn end(&self) -> LineColumn { - #[cfg(fuzzing)] - return LineColumn { line: 0, column: 0 }; - - #[cfg(not(fuzzing))] SOURCE_MAP.with(|cm| { let cm = cm.borrow(); let fi = cm.fileinfo(*self); @@ -567,6 +523,26 @@ impl Span { }) } + #[cfg(procmacro2_semver_exempt)] + pub fn before(&self) -> Span { + Span { + #[cfg(span_locations)] + lo: self.lo, + #[cfg(span_locations)] + hi: self.lo, + } + } + + #[cfg(procmacro2_semver_exempt)] + pub fn after(&self) -> Span { + Span { + #[cfg(span_locations)] + lo: self.hi, + #[cfg(span_locations)] + hi: self.hi, + } + } + #[cfg(not(span_locations))] pub fn join(&self, _other: Span) -> Option { Some(Span {}) @@ -574,13 +550,6 @@ impl Span { #[cfg(span_locations)] pub fn join(&self, other: Span) -> Option { - #[cfg(fuzzing)] - return { - let _ = other; - None - }; - - #[cfg(not(fuzzing))] SOURCE_MAP.with(|cm| { let cm = cm.borrow(); // If `other` is not within the same FileInfo as us, return None. @@ -601,16 +570,10 @@ impl Span { #[cfg(span_locations)] pub fn source_text(&self) -> Option { - #[cfg(fuzzing)] - return None; - - #[cfg(not(fuzzing))] - { - if self.is_call_site() { - None - } else { - Some(SOURCE_MAP.with(|cm| cm.borrow_mut().fileinfo_mut(*self).source_text(*self))) - } + if self.is_call_site() { + None + } else { + Some(SOURCE_MAP.with(|cm| cm.borrow().fileinfo(*self).source_text(*self))) } } @@ -755,32 +718,22 @@ pub(crate) struct Ident { } impl Ident { - #[track_caller] - pub fn new_checked(string: &str, span: Span) -> Self { - validate_ident(string); - Ident::new_unchecked(string, span) - } + fn _new(string: &str, raw: bool, span: Span) -> Self { + validate_ident(string, raw); - pub fn new_unchecked(string: &str, span: Span) -> Self { Ident { sym: string.to_owned(), span, - raw: false, + raw, } } - #[track_caller] - pub fn new_raw_checked(string: &str, span: Span) -> Self { - validate_ident_raw(string); - Ident::new_raw_unchecked(string, span) + pub fn new(string: &str, span: Span) -> Self { + Ident::_new(string, false, span) } - pub fn new_raw_unchecked(string: &str, span: Span) -> Self { - Ident { - sym: string.to_owned(), - span, - raw: true, - } + pub fn new_raw(string: &str, span: Span) -> Self { + Ident::_new(string, true, span) } pub fn span(&self) -> Span { @@ -800,13 +753,12 @@ pub(crate) fn is_ident_continue(c: char) -> bool { unicode_ident::is_xid_continue(c) } -#[track_caller] -fn validate_ident(string: &str) { +fn validate_ident(string: &str, raw: bool) { if string.is_empty() { panic!("Ident is not allowed to be empty; use Option"); } - if string.bytes().all(|digit| b'0' <= digit && digit <= b'9') { + if string.bytes().all(|digit| digit >= b'0' && digit <= b'9') { panic!("Ident cannot be a number; use Literal instead"); } @@ -827,17 +779,14 @@ fn validate_ident(string: &str) { if !ident_ok(string) { panic!("{:?} is not a valid Ident", string); } -} -#[track_caller] -fn validate_ident_raw(string: &str) { - validate_ident(string); - - match string { - "_" | "super" | "self" | "Self" | "crate" => { - panic!("`r#{}` cannot be a raw identifier", string); + if raw { + match string { + "_" | "super" | "self" | "Self" | "crate" => { + panic!("`r#{}` cannot be a raw identifier", string); + } + _ => {} } - _ => {} } } @@ -870,7 +819,6 @@ impl Display for Ident { } } -#[allow(clippy::missing_fields_in_debug)] impl Debug for Ident { // Ident(proc_macro), Ident(r#union) #[cfg(not(span_locations))] @@ -979,25 +927,12 @@ impl Literal { pub fn string(t: &str) -> Literal { let mut repr = String::with_capacity(t.len() + 2); repr.push('"'); - let mut chars = t.chars(); - while let Some(ch) = chars.next() { - if ch == '\0' { - repr.push_str( - if chars - .as_str() - .starts_with(|next| '0' <= next && next <= '7') - { - // circumvent clippy::octal_escapes lint - "\\x00" - } else { - "\\0" - }, - ); - } else if ch == '\'' { + for c in t.chars() { + if c == '\'' { // escape_debug turns this into "\'" which is unnecessary. - repr.push(ch); + repr.push(c); } else { - repr.extend(ch.escape_debug()); + repr.extend(c.escape_debug()); } } repr.push('"'); @@ -1019,21 +954,16 @@ impl Literal { pub fn byte_string(bytes: &[u8]) -> Literal { let mut escaped = "b\"".to_string(); - let mut bytes = bytes.iter(); - while let Some(&b) = bytes.next() { + for b in bytes { #[allow(clippy::match_overlapping_arm)] - match b { - b'\0' => escaped.push_str(match bytes.as_slice().first() { - // circumvent clippy::octal_escapes lint - Some(b'0'..=b'7') => r"\x00", - _ => r"\0", - }), + match *b { + b'\0' => escaped.push_str(r"\0"), b'\t' => escaped.push_str(r"\t"), b'\n' => escaped.push_str(r"\n"), b'\r' => escaped.push_str(r"\r"), b'"' => escaped.push_str("\\\""), b'\\' => escaped.push_str("\\\\"), - b'\x20'..=b'\x7E' => escaped.push(b as char), + b'\x20'..=b'\x7E' => escaped.push(*b as char), _ => { let _ = write!(escaped, "\\x{:02X}", b); } @@ -1051,75 +981,28 @@ impl Literal { self.span = span; } - pub fn subspan>(&self, range: R) -> Option { - #[cfg(not(span_locations))] - { - let _ = range; - None - } - - #[cfg(span_locations)] - { - use core::ops::Bound; - - let lo = match range.start_bound() { - Bound::Included(start) => { - let start = u32::try_from(*start).ok()?; - self.span.lo.checked_add(start)? - } - Bound::Excluded(start) => { - let start = u32::try_from(*start).ok()?; - self.span.lo.checked_add(start)?.checked_add(1)? - } - Bound::Unbounded => self.span.lo, - }; - let hi = match range.end_bound() { - Bound::Included(end) => { - let end = u32::try_from(*end).ok()?; - self.span.lo.checked_add(end)?.checked_add(1)? - } - Bound::Excluded(end) => { - let end = u32::try_from(*end).ok()?; - self.span.lo.checked_add(end)? - } - Bound::Unbounded => self.span.hi, - }; - if lo <= hi && hi <= self.span.hi { - Some(Span { lo, hi }) - } else { - None - } - } + pub fn subspan>(&self, _range: R) -> Option { + None } } impl FromStr for Literal { type Err = LexError; - fn from_str(repr: &str) -> Result { - let mut cursor = get_cursor(repr); - #[cfg(span_locations)] - let lo = cursor.off; - - let negative = cursor.starts_with_char('-'); + fn from_str(mut repr: &str) -> Result { + let negative = repr.starts_with('-'); if negative { - cursor = cursor.advance(1); - if !cursor.starts_with_fn(|ch| ch.is_ascii_digit()) { + repr = &repr[1..]; + if !repr.starts_with(|ch: char| ch.is_ascii_digit()) { return Err(LexError::call_site()); } } - - if let Ok((rest, mut literal)) = parse::literal(cursor) { - if rest.is_empty() { + let cursor = get_cursor(repr); + if let Ok((_rest, mut literal)) = parse::literal(cursor) { + if literal.repr.len() == repr.len() { if negative { literal.repr.insert(0, '-'); } - literal.span = Span { - #[cfg(span_locations)] - lo, - #[cfg(span_locations)] - hi: rest.off, - }; return Ok(literal); } } diff --git a/src/lib.rs b/src/lib.rs index 7e8f543..138627a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -55,7 +55,7 @@ //! If parsing with [Syn], you'll use [`parse_macro_input!`] instead to //! propagate parse errors correctly back to the compiler when parsing fails. //! -//! [`parse_macro_input!`]: https://docs.rs/syn/2.0/syn/macro.parse_macro_input.html +//! [`parse_macro_input!`]: https://docs.rs/syn/1.0/syn/macro.parse_macro_input.html //! //! # Unstable features //! @@ -65,7 +65,7 @@ //! //! To opt into the additional APIs available in the most recent nightly //! compiler, the `procmacro2_semver_exempt` config flag must be passed to -//! rustc. We will polyfill those nightly-only APIs back to Rust 1.56.0. As +//! rustc. We will polyfill those nightly-only APIs back to Rust 1.31.0. As //! these are unstable APIs that track the nightly compiler, minor versions of //! proc-macro2 may make breaking changes to them at any time. //! @@ -86,25 +86,22 @@ //! a different thread. // Proc-macro2 types in rustdoc of other crates get linked to here. -#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.76")] -#![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))] +#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.53")] +#![cfg_attr( + any(proc_macro_span, super_unstable), + feature(proc_macro_span, proc_macro_span_shrink) +)] #![cfg_attr(super_unstable, feature(proc_macro_def_site))] #![cfg_attr(doc_cfg, feature(doc_cfg))] -#![deny(unsafe_op_in_unsafe_fn)] #![allow( clippy::cast_lossless, clippy::cast_possible_truncation, - clippy::checked_conversions, clippy::doc_markdown, clippy::items_after_statements, - clippy::iter_without_into_iter, clippy::let_underscore_untyped, clippy::manual_assert, - clippy::manual_range_contains, - clippy::missing_safety_doc, clippy::must_use_candidate, clippy::needless_doctest_main, - clippy::new_without_default, clippy::return_self_not_must_use, clippy::shadow_unrelated, clippy::trivially_copy_pass_by_ref, @@ -122,18 +119,7 @@ compile_error! {"\ build script as well. "} -#[cfg(all( - procmacro2_nightly_testing, - feature = "proc-macro", - not(proc_macro_span) -))] -compile_error! {"\ - Build script probe failed to compile. -"} - -extern crate alloc; - -#[cfg(feature = "proc-macro")] +#[cfg(use_proc_macro)] extern crate proc_macro; mod marker; @@ -164,6 +150,7 @@ use crate::marker::Marker; use core::cmp::Ordering; use core::fmt::{self, Debug, Display}; use core::hash::{Hash, Hasher}; +use core::iter::FromIterator; use core::ops::RangeBounds; use core::str::FromStr; use std::error::Error; @@ -171,7 +158,6 @@ use std::error::Error; use std::path::PathBuf; #[cfg(span_locations)] -#[cfg_attr(doc_cfg, doc(cfg(feature = "span-locations")))] pub use crate::location::LineColumn; /// An abstract stream of tokens, or more concretely a sequence of token trees. @@ -247,16 +233,14 @@ impl FromStr for TokenStream { } } -#[cfg(feature = "proc-macro")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] +#[cfg(use_proc_macro)] impl From for TokenStream { fn from(inner: proc_macro::TokenStream) -> Self { TokenStream::_new(inner.into()) } } -#[cfg(feature = "proc-macro")] -#[cfg_attr(doc_cfg, doc(cfg(feature = "proc-macro")))] +#[cfg(use_proc_macro)] impl From for proc_macro::TokenStream { fn from(inner: TokenStream) -> Self { inner.inner.into() @@ -416,6 +400,9 @@ impl Span { /// The span located at the invocation of the procedural macro, but with /// local variables, labels, and `$crate` resolved at the definition site /// of the macro. This is the same hygiene behavior as `macro_rules`. + /// + /// This function requires Rust 1.45 or later. + #[cfg(not(no_hygiene))] pub fn mixed_site() -> Self { Span::_new(imp::Span::mixed_site()) } @@ -502,6 +489,24 @@ impl Span { self.inner.end() } + /// Creates an empty span pointing to directly before this span. + /// + /// This method is semver exempt and not exposed by default. + #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] + #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] + pub fn before(&self) -> Span { + Span::_new(self.inner.before()) + } + + /// Creates an empty span pointing to directly after this span. + /// + /// This method is semver exempt and not exposed by default. + #[cfg(all(procmacro2_semver_exempt, any(not(wrap_proc_macro), super_unstable)))] + #[cfg_attr(doc_cfg, doc(cfg(procmacro2_semver_exempt)))] + pub fn after(&self) -> Span { + Span::_new(self.inner.after()) + } + /// Create a new span encompassing `self` and `other`. /// /// Returns `None` if `self` and `other` are from different files. @@ -866,7 +871,7 @@ impl Debug for Punct { /// Rust keywords. Use `input.call(Ident::parse_any)` when parsing to match the /// behaviour of `Ident::new`. /// -/// [`Parse`]: https://docs.rs/syn/2.0/syn/parse/trait.Parse.html +/// [`Parse`]: https://docs.rs/syn/1.0/syn/parse/trait.Parse.html /// /// # Examples /// @@ -957,13 +962,12 @@ impl Ident { /// Panics if the input string is neither a keyword nor a legal variable /// name. If you are not sure whether the string contains an identifier and /// need to handle an error case, use - /// syn::parse_str::<Ident> /// rather than `Ident::new`. - #[track_caller] pub fn new(string: &str, span: Span) -> Self { - Ident::_new(imp::Ident::new_checked(string, span.inner)) + Ident::_new(imp::Ident::new(string, span.inner)) } /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). The @@ -971,9 +975,12 @@ impl Ident { /// (including keywords, e.g. `fn`). Keywords which are usable in path /// segments (e.g. `self`, `super`) are not supported, and will cause a /// panic. - #[track_caller] pub fn new_raw(string: &str, span: Span) -> Self { - Ident::_new(imp::Ident::new_raw_checked(string, span.inner)) + Ident::_new_raw(string, span) + } + + fn _new_raw(string: &str, span: Span) -> Self { + Ident::_new(imp::Ident::new_raw(string, span.inner)) } /// Returns the span of this `Ident`. @@ -1250,7 +1257,7 @@ impl Literal { // representation. This is not public API other than for quote. #[doc(hidden)] pub unsafe fn from_str_unchecked(repr: &str) -> Self { - Literal::_new(unsafe { imp::Literal::from_str_unchecked(repr) }) + Literal::_new(imp::Literal::from_str_unchecked(repr)) } } diff --git a/src/marker.rs b/src/marker.rs index e8874bd..59fd096 100644 --- a/src/marker.rs +++ b/src/marker.rs @@ -1,6 +1,6 @@ -use alloc::rc::Rc; use core::marker::PhantomData; -use core::panic::{RefUnwindSafe, UnwindSafe}; +use std::panic::{RefUnwindSafe, UnwindSafe}; +use std::rc::Rc; // Zero sized marker with the correct set of autotrait impls we want all proc // macro types to have. @@ -12,10 +12,7 @@ mod value { pub(crate) use core::marker::PhantomData as Marker; } -pub(crate) struct ProcMacroAutoTraits( - #[allow(dead_code)] // https://github.com/rust-lang/rust/issues/119645 - Rc<()>, -); +pub(crate) struct ProcMacroAutoTraits(Rc<()>); impl UnwindSafe for ProcMacroAutoTraits {} impl RefUnwindSafe for ProcMacroAutoTraits {} diff --git a/src/parse.rs b/src/parse.rs index 07239bc..82291da 100644 --- a/src/parse.rs +++ b/src/parse.rs @@ -1,5 +1,5 @@ use crate::fallback::{ - self, is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream, + is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream, TokenStreamBuilder, }; use crate::{Delimiter, Punct, Spacing, TokenTree}; @@ -27,18 +27,7 @@ impl<'a> Cursor<'a> { self.rest.starts_with(s) } - pub fn starts_with_char(&self, ch: char) -> bool { - self.rest.starts_with(ch) - } - - pub fn starts_with_fn(&self, f: Pattern) -> bool - where - Pattern: FnMut(char) -> bool, - { - self.rest.starts_with(f) - } - - pub fn is_empty(&self) -> bool { + fn is_empty(&self) -> bool { self.rest.is_empty() } @@ -108,7 +97,7 @@ fn skip_whitespace(input: Cursor) -> Cursor { s = s.advance(1); continue; } - b if b.is_ascii() => {} + b if b <= 0x7f => {} _ => { let ch = s.chars().next().unwrap(); if is_whitespace(ch) { @@ -161,10 +150,6 @@ fn word_break(input: Cursor) -> Result { } } -// Rustc's representation of a macro expansion error in expression position or -// type position. -const ERROR: &str = "(/*ERROR*/)"; - pub(crate) fn token_stream(mut input: Cursor) -> Result { let mut trees = TokenStreamBuilder::new(); let mut stack = Vec::new(); @@ -196,7 +181,7 @@ pub(crate) fn token_stream(mut input: Cursor) -> Result { }; if let Some(open_delimiter) = match first { - b'(' if !input.starts_with(ERROR) => Some(Delimiter::Parenthesis), + b'(' => Some(Delimiter::Parenthesis), b'[' => Some(Delimiter::Bracket), b'{' => Some(Delimiter::Brace), _ => None, @@ -271,21 +256,15 @@ fn leaf_token(input: Cursor) -> PResult { Ok((input, TokenTree::Punct(p))) } else if let Ok((input, i)) = ident(input) { Ok((input, TokenTree::Ident(i))) - } else if input.starts_with(ERROR) { - let rest = input.advance(ERROR.len()); - let repr = crate::Literal::_new_fallback(Literal::_new(ERROR.to_owned())); - Ok((rest, TokenTree::Literal(repr))) } else { Err(Reject) } } fn ident(input: Cursor) -> PResult { - if [ - "r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#", "c\"", "cr\"", "cr#", - ] - .iter() - .any(|prefix| input.starts_with(prefix)) + if ["r\"", "r#\"", "r##", "b\"", "b\'", "br\"", "br#"] + .iter() + .any(|prefix| input.starts_with(prefix)) { Err(Reject) } else { @@ -300,10 +279,7 @@ fn ident_any(input: Cursor) -> PResult { let (rest, sym) = ident_not_raw(rest)?; if !raw { - let ident = crate::Ident::_new(crate::imp::Ident::new_unchecked( - sym, - fallback::Span::call_site(), - )); + let ident = crate::Ident::new(sym, crate::Span::call_site()); return Ok((rest, ident)); } @@ -312,10 +288,7 @@ fn ident_any(input: Cursor) -> PResult { _ => {} } - let ident = crate::Ident::_new(crate::imp::Ident::new_raw_unchecked( - sym, - fallback::Span::call_site(), - )); + let ident = crate::Ident::_new_raw(sym, crate::Span::call_site()); Ok((rest, ident)) } @@ -349,8 +322,6 @@ fn literal_nocapture(input: Cursor) -> Result { Ok(ok) } else if let Ok(ok) = byte_string(input) { Ok(ok) - } else if let Ok(ok) = c_string(input) { - Ok(ok) } else if let Ok(ok) = byte(input) { Ok(ok) } else if let Ok(ok) = character(input) { @@ -381,8 +352,8 @@ fn string(input: Cursor) -> Result { } } -fn cooked_string(mut input: Cursor) -> Result { - let mut chars = input.char_indices(); +fn cooked_string(input: Cursor) -> Result { + let mut chars = input.char_indices().peekable(); while let Some((i, ch)) = chars.next() { match ch { @@ -396,16 +367,31 @@ fn cooked_string(mut input: Cursor) -> Result { }, '\\' => match chars.next() { Some((_, 'x')) => { - backslash_x_char(&mut chars)?; + if !backslash_x_char(&mut chars) { + break; + } } - Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0')) => {} + Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\')) + | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {} Some((_, 'u')) => { - backslash_u(&mut chars)?; + if !backslash_u(&mut chars) { + break; + } } - Some((newline, ch @ ('\n' | '\r'))) => { - input = input.advance(newline + 1); - trailing_backslash(&mut input, ch as u8)?; - chars = input.char_indices(); + Some((_, ch @ '\n')) | Some((_, ch @ '\r')) => { + let mut last = ch; + loop { + if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') { + return Err(Reject); + } + match chars.peek() { + Some((_, ch)) if ch.is_whitespace() => { + last = *ch; + chars.next(); + } + _ => break, + } + } } _ => break, }, @@ -415,30 +401,11 @@ fn cooked_string(mut input: Cursor) -> Result { Err(Reject) } -fn raw_string(input: Cursor) -> Result { - let (input, delimiter) = delimiter_of_raw_string(input)?; - let mut bytes = input.bytes().enumerate(); - while let Some((i, byte)) = bytes.next() { - match byte { - b'"' if input.rest[i + 1..].starts_with(delimiter) => { - let rest = input.advance(i + 1 + delimiter.len()); - return Ok(literal_suffix(rest)); - } - b'\r' => match bytes.next() { - Some((_, b'\n')) => {} - _ => break, - }, - _ => {} - } - } - Err(Reject) -} - fn byte_string(input: Cursor) -> Result { if let Ok(input) = input.parse("b\"") { cooked_byte_string(input) } else if let Ok(input) = input.parse("br") { - raw_byte_string(input) + raw_string(input) } else { Err(Reject) } @@ -458,125 +425,68 @@ fn cooked_byte_string(mut input: Cursor) -> Result { }, b'\\' => match bytes.next() { Some((_, b'x')) => { - backslash_x_byte(&mut bytes)?; + if !backslash_x_byte(&mut bytes) { + break; + } } - Some((_, b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"')) => {} - Some((newline, b @ (b'\n' | b'\r'))) => { - input = input.advance(newline + 1); - trailing_backslash(&mut input, b)?; - bytes = input.bytes().enumerate(); + Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\')) + | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {} + Some((newline, b @ b'\n')) | Some((newline, b @ b'\r')) => { + let mut last = b as char; + let rest = input.advance(newline + 1); + let mut chars = rest.char_indices(); + loop { + if last == '\r' && chars.next().map_or(true, |(_, ch)| ch != '\n') { + return Err(Reject); + } + match chars.next() { + Some((_, ch)) if ch.is_whitespace() => last = ch, + Some((offset, _)) => { + input = rest.advance(offset); + bytes = input.bytes().enumerate(); + break; + } + None => return Err(Reject), + } + } } _ => break, }, - b if b.is_ascii() => {} + b if b < 0x80 => {} _ => break, } } Err(Reject) } -fn delimiter_of_raw_string(input: Cursor) -> PResult<&str> { - for (i, byte) in input.bytes().enumerate() { - match byte { - b'"' => { - if i > 255 { - // https://github.com/rust-lang/rust/pull/95251 - return Err(Reject); - } - return Ok((input.advance(i + 1), &input.rest[..i])); - } - b'#' => {} - _ => break, - } - } - Err(Reject) -} - -fn raw_byte_string(input: Cursor) -> Result { - let (input, delimiter) = delimiter_of_raw_string(input)?; - let mut bytes = input.bytes().enumerate(); - while let Some((i, byte)) = bytes.next() { - match byte { - b'"' if input.rest[i + 1..].starts_with(delimiter) => { - let rest = input.advance(i + 1 + delimiter.len()); - return Ok(literal_suffix(rest)); - } - b'\r' => match bytes.next() { - Some((_, b'\n')) => {} - _ => break, - }, - other => { - if !other.is_ascii() { - break; - } - } - } - } - Err(Reject) -} - -fn c_string(input: Cursor) -> Result { - if let Ok(input) = input.parse("c\"") { - cooked_c_string(input) - } else if let Ok(input) = input.parse("cr") { - raw_c_string(input) - } else { - Err(Reject) - } -} - -fn raw_c_string(input: Cursor) -> Result { - let (input, delimiter) = delimiter_of_raw_string(input)?; - let mut bytes = input.bytes().enumerate(); - while let Some((i, byte)) = bytes.next() { - match byte { - b'"' if input.rest[i + 1..].starts_with(delimiter) => { - let rest = input.advance(i + 1 + delimiter.len()); - return Ok(literal_suffix(rest)); - } - b'\r' => match bytes.next() { - Some((_, b'\n')) => {} - _ => break, - }, - b'\0' => break, - _ => {} - } - } - Err(Reject) -} - -fn cooked_c_string(mut input: Cursor) -> Result { +fn raw_string(input: Cursor) -> Result { let mut chars = input.char_indices(); - - while let Some((i, ch)) = chars.next() { + let mut n = 0; + for (i, ch) in &mut chars { match ch { '"' => { - let input = input.advance(i + 1); - return Ok(literal_suffix(input)); + n = i; + break; + } + '#' => {} + _ => return Err(Reject), + } + } + if n > 255 { + // https://github.com/rust-lang/rust/pull/95251 + return Err(Reject); + } + while let Some((i, ch)) = chars.next() { + match ch { + '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => { + let rest = input.advance(i + 1 + n); + return Ok(literal_suffix(rest)); } '\r' => match chars.next() { Some((_, '\n')) => {} _ => break, }, - '\\' => match chars.next() { - Some((_, 'x')) => { - backslash_x_nonzero(&mut chars)?; - } - Some((_, 'n' | 'r' | 't' | '\\' | '\'' | '"')) => {} - Some((_, 'u')) => { - if backslash_u(&mut chars)? == '\0' { - break; - } - } - Some((newline, ch @ ('\n' | '\r'))) => { - input = input.advance(newline + 1); - trailing_backslash(&mut input, ch as u8)?; - chars = input.char_indices(); - } - _ => break, - }, - '\0' => break, - _ch => {} + _ => {} } } Err(Reject) @@ -587,8 +497,9 @@ fn byte(input: Cursor) -> Result { let mut bytes = input.bytes().enumerate(); let ok = match bytes.next().map(|(_, b)| b) { Some(b'\\') => match bytes.next().map(|(_, b)| b) { - Some(b'x') => backslash_x_byte(&mut bytes).is_ok(), - Some(b'n' | b'r' | b't' | b'\\' | b'0' | b'\'' | b'"') => true, + Some(b'x') => backslash_x_byte(&mut bytes), + Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'') + | Some(b'"') => true, _ => false, }, b => b.is_some(), @@ -609,9 +520,11 @@ fn character(input: Cursor) -> Result { let mut chars = input.char_indices(); let ok = match chars.next().map(|(_, ch)| ch) { Some('\\') => match chars.next().map(|(_, ch)| ch) { - Some('x') => backslash_x_char(&mut chars).is_ok(), - Some('u') => backslash_u(&mut chars).is_ok(), - Some('n' | 'r' | 't' | '\\' | '0' | '\'' | '"') => true, + Some('x') => backslash_x_char(&mut chars), + Some('u') => backslash_u(&mut chars), + Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => { + true + } _ => false, }, ch => ch.is_some(), @@ -625,49 +538,36 @@ fn character(input: Cursor) -> Result { } macro_rules! next_ch { - ($chars:ident @ $pat:pat) => { + ($chars:ident @ $pat:pat $(| $rest:pat)*) => { match $chars.next() { Some((_, ch)) => match ch { - $pat => ch, - _ => return Err(Reject), + $pat $(| $rest)* => ch, + _ => return false, }, - None => return Err(Reject), + None => return false, } }; } -fn backslash_x_char(chars: &mut I) -> Result<(), Reject> +fn backslash_x_char(chars: &mut I) -> bool where I: Iterator, { next_ch!(chars @ '0'..='7'); next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); - Ok(()) + true } -fn backslash_x_byte(chars: &mut I) -> Result<(), Reject> +fn backslash_x_byte(chars: &mut I) -> bool where I: Iterator, { next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'); - Ok(()) + true } -fn backslash_x_nonzero(chars: &mut I) -> Result<(), Reject> -where - I: Iterator, -{ - let first = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); - let second = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F'); - if first == '0' && second == '0' { - Err(Reject) - } else { - Ok(()) - } -} - -fn backslash_u(chars: &mut I) -> Result +fn backslash_u(chars: &mut I) -> bool where I: Iterator, { @@ -680,36 +580,17 @@ where 'a'..='f' => 10 + ch as u8 - b'a', 'A'..='F' => 10 + ch as u8 - b'A', '_' if len > 0 => continue, - '}' if len > 0 => return char::from_u32(value).ok_or(Reject), - _ => break, + '}' if len > 0 => return char::from_u32(value).is_some(), + _ => return false, }; if len == 6 { - break; + return false; } value *= 0x10; value += u32::from(digit); len += 1; } - Err(Reject) -} - -fn trailing_backslash(input: &mut Cursor, mut last: u8) -> Result<(), Reject> { - let mut whitespace = input.bytes().enumerate(); - loop { - if last == b'\r' && whitespace.next().map_or(true, |(_, b)| b != b'\n') { - return Err(Reject); - } - match whitespace.next() { - Some((_, b @ (b' ' | b'\t' | b'\n' | b'\r'))) => { - last = b; - } - Some((offset, _)) => { - *input = input.advance(offset); - return Ok(()); - } - None => return Err(Reject), - } - } + false } fn float(input: Cursor) -> Result { @@ -725,7 +606,7 @@ fn float(input: Cursor) -> Result { fn float_digits(input: Cursor) -> Result { let mut chars = input.chars().peekable(); match chars.next() { - Some(ch) if '0' <= ch && ch <= '9' => {} + Some(ch) if ch >= '0' && ch <= '9' => {} _ => return Err(Reject), } @@ -875,7 +756,7 @@ fn digits(mut input: Cursor) -> Result { fn punct(input: Cursor) -> PResult { let (rest, ch) = punct_char(input)?; if ch == '\'' { - if ident_any(rest)?.0.starts_with_char('\'') { + if ident_any(rest)?.0.starts_with("'") { Err(Reject) } else { Ok((rest, Punct::new('\'', Spacing::Joint))) @@ -914,13 +795,12 @@ fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult #[cfg(span_locations)] let lo = input.off; let (rest, (comment, inner)) = doc_comment_contents(input)?; - let fallback_span = Span { + let span = crate::Span::_new_fallback(Span { #[cfg(span_locations)] lo, #[cfg(span_locations)] hi: rest.off, - }; - let span = crate::Span::_new_fallback(fallback_span); + }); let mut scan_for_bare_cr = comment; while let Some(cr) = scan_for_bare_cr.find('\r') { @@ -941,7 +821,7 @@ fn doc_comment<'a>(input: Cursor<'a>, trees: &mut TokenStreamBuilder) -> PResult trees.push_token_from_parser(TokenTree::Punct(bang)); } - let doc_ident = crate::Ident::_new(crate::imp::Ident::new_unchecked("doc", fallback_span)); + let doc_ident = crate::Ident::new("doc", span); let mut equal = Punct::new('=', Spacing::Alone); equal.set_span(span); let mut literal = crate::Literal::string(comment); @@ -968,7 +848,7 @@ fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> { Ok((input, (&s[3..s.len() - 2], true))) } else if input.starts_with("///") { let input = input.advance(3); - if input.starts_with_char('/') { + if input.starts_with("/") { return Err(Reject); } let (input, s) = take_until_newline_or_eof(input); diff --git a/src/rcvec.rs b/src/rcvec.rs index 37955af..86ca7d8 100644 --- a/src/rcvec.rs +++ b/src/rcvec.rs @@ -1,8 +1,7 @@ -use alloc::rc::Rc; -use alloc::vec; use core::mem; -use core::panic::RefUnwindSafe; use core::slice; +use std::rc::Rc; +use std::vec; pub(crate) struct RcVec { inner: Rc>, @@ -53,7 +52,7 @@ impl RcVec { T: Clone, { let vec = if let Some(owned) = Rc::get_mut(&mut self.inner) { - mem::take(owned) + mem::replace(owned, Vec::new()) } else { Vec::clone(&self.inner) }; @@ -141,5 +140,3 @@ impl Iterator for RcVecIntoIter { self.inner.size_hint() } } - -impl RefUnwindSafe for RcVec where T: RefUnwindSafe {} diff --git a/src/wrapper.rs b/src/wrapper.rs index f5eb826..00f67cd 100644 --- a/src/wrapper.rs +++ b/src/wrapper.rs @@ -3,6 +3,7 @@ use crate::detection::inside_proc_macro; use crate::location::LineColumn; use crate::{fallback, Delimiter, Punct, Spacing, TokenTree}; use core::fmt::{self, Debug, Display}; +use core::iter::FromIterator; use core::ops::RangeBounds; use core::str::FromStr; use std::panic; @@ -28,25 +29,20 @@ pub(crate) struct DeferredTokenStream { pub(crate) enum LexError { Compiler(proc_macro::LexError), Fallback(fallback::LexError), - - // Rustc was supposed to return a LexError, but it panicked instead. - // https://github.com/rust-lang/rust/issues/58736 - CompilerPanic, } -#[cold] -fn mismatch(line: u32) -> ! { - #[cfg(procmacro2_backtrace)] - { - let backtrace = std::backtrace::Backtrace::force_capture(); - panic!("compiler/fallback mismatch #{}\n\n{}", line, backtrace) - } - #[cfg(not(procmacro2_backtrace))] - { - panic!("compiler/fallback mismatch #{}", line) +impl LexError { + fn call_site() -> Self { + LexError::Fallback(fallback::LexError { + span: fallback::Span::call_site(), + }) } } +fn mismatch() -> ! { + panic!("compiler/fallback mismatch") +} + impl DeferredTokenStream { fn new(stream: proc_macro::TokenStream) -> Self { DeferredTokenStream { @@ -93,13 +89,13 @@ impl TokenStream { fn unwrap_nightly(self) -> proc_macro::TokenStream { match self { TokenStream::Compiler(s) => s.into_token_stream(), - TokenStream::Fallback(_) => mismatch(line!()), + TokenStream::Fallback(_) => mismatch(), } } fn unwrap_stable(self) -> fallback::TokenStream { match self { - TokenStream::Compiler(_) => mismatch(line!()), + TokenStream::Compiler(_) => mismatch(), TokenStream::Fallback(s) => s, } } @@ -122,7 +118,7 @@ impl FromStr for TokenStream { // Work around https://github.com/rust-lang/rust/issues/58736. fn proc_macro_parse(src: &str) -> Result { let result = panic::catch_unwind(|| src.parse().map_err(LexError::Compiler)); - result.unwrap_or_else(|_| Err(LexError::CompilerPanic)) + result.unwrap_or_else(|_| Err(LexError::call_site())) } impl Display for TokenStream { @@ -203,14 +199,14 @@ impl FromIterator for TokenStream { first.evaluate_now(); first.stream.extend(streams.map(|s| match s { TokenStream::Compiler(s) => s.into_token_stream(), - TokenStream::Fallback(_) => mismatch(line!()), + TokenStream::Fallback(_) => mismatch(), })); TokenStream::Compiler(first) } Some(TokenStream::Fallback(mut first)) => { first.extend(streams.map(|s| match s { TokenStream::Fallback(s) => s, - TokenStream::Compiler(_) => mismatch(line!()), + TokenStream::Compiler(_) => mismatch(), })); TokenStream::Fallback(first) } @@ -260,7 +256,7 @@ impl Debug for TokenStream { impl LexError { pub(crate) fn span(&self) -> Span { match self { - LexError::Compiler(_) | LexError::CompilerPanic => Span::call_site(), + LexError::Compiler(_) => Span::call_site(), LexError::Fallback(e) => Span::Fallback(e.span()), } } @@ -283,10 +279,6 @@ impl Debug for LexError { match self { LexError::Compiler(e) => Debug::fmt(e, f), LexError::Fallback(e) => Debug::fmt(e, f), - LexError::CompilerPanic => { - let fallback = fallback::LexError::call_site(); - Debug::fmt(&fallback, f) - } } } } @@ -294,12 +286,16 @@ impl Debug for LexError { impl Display for LexError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { + #[cfg(not(no_lexerror_display))] LexError::Compiler(e) => Display::fmt(e, f), + #[cfg(no_lexerror_display)] + LexError::Compiler(_e) => Display::fmt( + &fallback::LexError { + span: fallback::Span::call_site(), + }, + f, + ), LexError::Fallback(e) => Display::fmt(e, f), - LexError::CompilerPanic => { - let fallback = fallback::LexError::call_site(); - Display::fmt(&fallback, f) - } } } } @@ -410,6 +406,7 @@ impl Span { } } + #[cfg(not(no_hygiene))] pub fn mixed_site() -> Self { if inside_proc_macro() { Span::Compiler(proc_macro::Span::mixed_site()) @@ -429,19 +426,29 @@ impl Span { pub fn resolved_at(&self, other: Span) -> Span { match (self, other) { + #[cfg(not(no_hygiene))] (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)), + + // Name resolution affects semantics, but location is only cosmetic + #[cfg(no_hygiene)] + (Span::Compiler(_), Span::Compiler(_)) => other, + (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)), - (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()), + _ => mismatch(), } } pub fn located_at(&self, other: Span) -> Span { match (self, other) { + #[cfg(not(no_hygiene))] (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)), + + // Name resolution affects semantics, but location is only cosmetic + #[cfg(no_hygiene)] + (Span::Compiler(_), Span::Compiler(_)) => *self, + (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)), - (Span::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Span::Fallback(_), Span::Compiler(_)) => mismatch(line!()), + _ => mismatch(), } } @@ -463,6 +470,12 @@ impl Span { #[cfg(span_locations)] pub fn start(&self) -> LineColumn { match self { + #[cfg(proc_macro_span)] + Span::Compiler(s) => { + let proc_macro::LineColumn { line, column } = s.start(); + LineColumn { line, column } + } + #[cfg(not(proc_macro_span))] Span::Compiler(_) => LineColumn { line: 0, column: 0 }, Span::Fallback(s) => s.start(), } @@ -471,11 +484,33 @@ impl Span { #[cfg(span_locations)] pub fn end(&self) -> LineColumn { match self { + #[cfg(proc_macro_span)] + Span::Compiler(s) => { + let proc_macro::LineColumn { line, column } = s.end(); + LineColumn { line, column } + } + #[cfg(not(proc_macro_span))] Span::Compiler(_) => LineColumn { line: 0, column: 0 }, Span::Fallback(s) => s.end(), } } + #[cfg(super_unstable)] + pub fn before(&self) -> Span { + match self { + Span::Compiler(s) => Span::Compiler(s.before()), + Span::Fallback(s) => Span::Fallback(s.before()), + } + } + + #[cfg(super_unstable)] + pub fn after(&self) -> Span { + match self { + Span::Compiler(s) => Span::Compiler(s.after()), + Span::Fallback(s) => Span::Fallback(s.after()), + } + } + pub fn join(&self, other: Span) -> Option { let ret = match (self, other) { #[cfg(proc_macro_span)] @@ -508,7 +543,7 @@ impl Span { fn unwrap_nightly(self) -> proc_macro::Span { match self { Span::Compiler(s) => s, - Span::Fallback(_) => mismatch(line!()), + Span::Fallback(_) => mismatch(), } } } @@ -595,14 +630,20 @@ impl Group { pub fn span_open(&self) -> Span { match self { + #[cfg(not(no_group_open_close))] Group::Compiler(g) => Span::Compiler(g.span_open()), + #[cfg(no_group_open_close)] + Group::Compiler(g) => Span::Compiler(g.span()), Group::Fallback(g) => Span::Fallback(g.span_open()), } } pub fn span_close(&self) -> Span { match self { + #[cfg(not(no_group_open_close))] Group::Compiler(g) => Span::Compiler(g.span_close()), + #[cfg(no_group_open_close)] + Group::Compiler(g) => Span::Compiler(g.span()), Group::Fallback(g) => Span::Fallback(g.span_close()), } } @@ -611,15 +652,14 @@ impl Group { match (self, span) { (Group::Compiler(g), Span::Compiler(s)) => g.set_span(s), (Group::Fallback(g), Span::Fallback(s)) => g.set_span(s), - (Group::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Group::Fallback(_), Span::Compiler(_)) => mismatch(line!()), + _ => mismatch(), } } fn unwrap_nightly(self) -> proc_macro::Group { match self { Group::Compiler(g) => g, - Group::Fallback(_) => mismatch(line!()), + Group::Fallback(_) => mismatch(), } } } @@ -655,30 +695,40 @@ pub(crate) enum Ident { } impl Ident { - #[track_caller] - pub fn new_checked(string: &str, span: Span) -> Self { + pub fn new(string: &str, span: Span) -> Self { match span { Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)), - Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_checked(string, s)), + Span::Fallback(s) => Ident::Fallback(fallback::Ident::new(string, s)), } } - pub fn new_unchecked(string: &str, span: fallback::Span) -> Self { - Ident::Fallback(fallback::Ident::new_unchecked(string, span)) - } - - #[track_caller] - pub fn new_raw_checked(string: &str, span: Span) -> Self { + pub fn new_raw(string: &str, span: Span) -> Self { match span { + #[cfg(not(no_ident_new_raw))] Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new_raw(string, s)), - Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw_checked(string, s)), + #[cfg(no_ident_new_raw)] + Span::Compiler(s) => { + let _ = proc_macro::Ident::new(string, s); + // At this point the un-r#-prefixed string is known to be a + // valid identifier. Try to produce a valid raw identifier by + // running the `TokenStream` parser, and unwrapping the first + // token as an `Ident`. + let raw_prefixed = format!("r#{}", string); + if let Ok(ts) = raw_prefixed.parse::() { + let mut iter = ts.into_iter(); + if let (Some(proc_macro::TokenTree::Ident(mut id)), None) = + (iter.next(), iter.next()) + { + id.set_span(s); + return Ident::Compiler(id); + } + } + panic!("not allowed as a raw identifier: `{}`", raw_prefixed) + } + Span::Fallback(s) => Ident::Fallback(fallback::Ident::new_raw(string, s)), } } - pub fn new_raw_unchecked(string: &str, span: fallback::Span) -> Self { - Ident::Fallback(fallback::Ident::new_raw_unchecked(string, span)) - } - pub fn span(&self) -> Span { match self { Ident::Compiler(t) => Span::Compiler(t.span()), @@ -690,15 +740,14 @@ impl Ident { match (self, span) { (Ident::Compiler(t), Span::Compiler(s)) => t.set_span(s), (Ident::Fallback(t), Span::Fallback(s)) => t.set_span(s), - (Ident::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Ident::Fallback(_), Span::Compiler(_)) => mismatch(line!()), + _ => mismatch(), } } fn unwrap_nightly(self) -> proc_macro::Ident { match self { Ident::Compiler(s) => s, - Ident::Fallback(_) => mismatch(line!()), + Ident::Fallback(_) => mismatch(), } } } @@ -708,8 +757,7 @@ impl PartialEq for Ident { match (self, other) { (Ident::Compiler(t), Ident::Compiler(o)) => t.to_string() == o.to_string(), (Ident::Fallback(t), Ident::Fallback(o)) => t == o, - (Ident::Compiler(_), Ident::Fallback(_)) => mismatch(line!()), - (Ident::Fallback(_), Ident::Compiler(_)) => mismatch(line!()), + _ => mismatch(), } } } @@ -778,9 +826,9 @@ macro_rules! unsuffixed_integers { impl Literal { pub unsafe fn from_str_unchecked(repr: &str) -> Self { if inside_proc_macro() { - Literal::Compiler(proc_macro::Literal::from_str(repr).expect("invalid literal")) + Literal::Compiler(compiler_literal_from_str(repr).expect("invalid literal")) } else { - Literal::Fallback(unsafe { fallback::Literal::from_str_unchecked(repr) }) + Literal::Fallback(fallback::Literal::from_str_unchecked(repr)) } } @@ -868,8 +916,7 @@ impl Literal { match (self, span) { (Literal::Compiler(lit), Span::Compiler(s)) => lit.set_span(s), (Literal::Fallback(lit), Span::Fallback(s)) => lit.set_span(s), - (Literal::Compiler(_), Span::Fallback(_)) => mismatch(line!()), - (Literal::Fallback(_), Span::Compiler(_)) => mismatch(line!()), + _ => mismatch(), } } @@ -886,7 +933,7 @@ impl Literal { fn unwrap_nightly(self) -> proc_macro::Literal { match self { Literal::Compiler(s) => s, - Literal::Fallback(_) => mismatch(line!()), + Literal::Fallback(_) => mismatch(), } } } @@ -902,8 +949,7 @@ impl FromStr for Literal { fn from_str(repr: &str) -> Result { if inside_proc_macro() { - let literal = proc_macro::Literal::from_str(repr)?; - Ok(Literal::Compiler(literal)) + compiler_literal_from_str(repr).map(Literal::Compiler) } else { let literal = fallback::Literal::from_str(repr)?; Ok(Literal::Fallback(literal)) @@ -911,6 +957,24 @@ impl FromStr for Literal { } } +fn compiler_literal_from_str(repr: &str) -> Result { + #[cfg(not(no_literal_from_str))] + { + proc_macro::Literal::from_str(repr).map_err(LexError::Compiler) + } + #[cfg(no_literal_from_str)] + { + let tokens = proc_macro_parse(repr)?; + let mut iter = tokens.into_iter(); + if let (Some(proc_macro::TokenTree::Literal(literal)), None) = (iter.next(), iter.next()) { + if literal.to_string().len() == repr.len() { + return Ok(literal); + } + } + Err(LexError::call_site()) + } +} + impl Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { diff --git a/tests/marker.rs b/tests/marker.rs index d08fbfc..5b45733 100644 --- a/tests/marker.rs +++ b/tests/marker.rs @@ -62,6 +62,7 @@ mod semver_exempt { assert_impl!(SourceFile is not Send or Sync); } +#[cfg(not(no_libprocmacro_unwind_safe))] mod unwind_safe { use proc_macro2::{ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree, diff --git a/tests/test.rs b/tests/test.rs index b75cd55..e0af151 100644 --- a/tests/test.rs +++ b/tests/test.rs @@ -1,12 +1,12 @@ #![allow( clippy::assertions_on_result_states, clippy::items_after_statements, - clippy::non_ascii_literal, - clippy::octal_escapes + clippy::non_ascii_literal )] use proc_macro2::{Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; use std::iter; +use std::panic; use std::str::{self, FromStr}; #[test] @@ -89,9 +89,24 @@ fn lifetime_number() { } #[test] -#[should_panic(expected = r#""'a#" is not a valid Ident"#)] fn lifetime_invalid() { - Ident::new("'a#", Span::call_site()); + let result = panic::catch_unwind(|| Ident::new("'a#", Span::call_site())); + match result { + Err(box_any) => { + let message = box_any.downcast_ref::().unwrap(); + let expected1 = r#""\'a#" is not a valid Ident"#; // 1.31.0 .. 1.53.0 + let expected2 = r#""'a#" is not a valid Ident"#; // 1.53.0 .. + assert!( + message == expected1 || message == expected2, + "panic message does not match expected string\n\ + \x20 panic message: `{:?}`\n\ + \x20expected message: `{:?}`", + message, + expected2, + ); + } + Ok(_) => panic!("test did not panic as expected"), + } } #[test] @@ -99,13 +114,6 @@ fn literal_string() { assert_eq!(Literal::string("foo").to_string(), "\"foo\""); assert_eq!(Literal::string("\"").to_string(), "\"\\\"\""); assert_eq!(Literal::string("didn't").to_string(), "\"didn't\""); - assert_eq!( - Literal::string("a\00b\07c\08d\0e\0").to_string(), - "\"a\\x000b\\x007c\\08d\\0e\\0\"", - ); - - "\"\\\r\n x\"".parse::().unwrap(); - "\"\\\r\n \rx\"".parse::().unwrap_err(); } #[test] @@ -139,51 +147,6 @@ fn literal_byte_string() { Literal::byte_string(b"\0\t\n\r\"\\2\x10").to_string(), "b\"\\0\\t\\n\\r\\\"\\\\2\\x10\"", ); - assert_eq!( - Literal::byte_string(b"a\00b\07c\08d\0e\0").to_string(), - "b\"a\\x000b\\x007c\\08d\\0e\\0\"", - ); - - "b\"\\\r\n x\"".parse::().unwrap(); - "b\"\\\r\n \rx\"".parse::().unwrap_err(); - "b\"\\\r\n \u{a0}x\"".parse::().unwrap_err(); - "br\"\u{a0}\"".parse::().unwrap_err(); -} - -#[test] -fn literal_c_string() { - let strings = r###" - c"hello\x80我叫\u{1F980}" // from the RFC - cr"\" - cr##"Hello "world"!"## - c"\t\n\r\"\\" - "###; - - let mut tokens = strings.parse::().unwrap().into_iter(); - - for expected in &[ - r#"c"hello\x80我叫\u{1F980}""#, - r#"cr"\""#, - r###"cr##"Hello "world"!"##"###, - r#"c"\t\n\r\"\\""#, - ] { - match tokens.next().unwrap() { - TokenTree::Literal(literal) => { - assert_eq!(literal.to_string(), *expected); - } - unexpected => panic!("unexpected token: {:?}", unexpected), - } - } - - if let Some(unexpected) = tokens.next() { - panic!("unexpected token: {:?}", unexpected); - } - - for invalid in &[r#"c"\0""#, r#"c"\x00""#, r#"c"\u{0}""#, "c\"\0\""] { - if let Ok(unexpected) = invalid.parse::() { - panic!("unexpected token: {:?}", unexpected); - } - } } #[test] @@ -301,48 +264,6 @@ fn literal_parse() { assert!("-\"\"".parse::().is_err()); } -#[test] -fn literal_span() { - let positive = "0.1".parse::().unwrap(); - let negative = "-0.1".parse::().unwrap(); - let subspan = positive.subspan(1..2); - - #[cfg(not(span_locations))] - { - let _ = negative; - assert!(subspan.is_none()); - } - - #[cfg(span_locations)] - { - assert_eq!(positive.span().start().column, 0); - assert_eq!(positive.span().end().column, 3); - assert_eq!(negative.span().start().column, 0); - assert_eq!(negative.span().end().column, 4); - assert_eq!(subspan.unwrap().source_text().unwrap(), "."); - } - - assert!(positive.subspan(1..4).is_none()); -} - -#[cfg(span_locations)] -#[test] -fn source_text() { - let input = " 𓀕 a z "; - let mut tokens = input - .parse::() - .unwrap() - .into_iter(); - - let first = tokens.next().unwrap(); - assert_eq!("𓀕", first.span().source_text().unwrap()); - - let second = tokens.next().unwrap(); - let third = tokens.next().unwrap(); - assert_eq!("z", third.span().source_text().unwrap()); - assert_eq!("a", second.span().source_text().unwrap()); -} - #[test] fn roundtrip() { fn roundtrip(p: &str) { @@ -682,8 +603,8 @@ fn non_ascii_tokens() { check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]); check_spans(r#""abc""#, &[(1, 0, 1, 5)]); check_spans(r#""ábc""#, &[(1, 0, 1, 5)]); - check_spans(r##"r#"abc"#"##, &[(1, 0, 1, 8)]); - check_spans(r##"r#"ábc"#"##, &[(1, 0, 1, 8)]); + check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]); + check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]); check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]); check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]); check_spans("'a'", &[(1, 0, 1, 3)]); @@ -703,6 +624,7 @@ fn non_ascii_tokens() { check_spans("ábc// foo", &[(1, 0, 1, 3)]); check_spans("ábć// foo", &[(1, 0, 1, 3)]); check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]); + check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]); } #[cfg(span_locations)] @@ -733,18 +655,6 @@ fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usi } } -#[test] -fn whitespace() { - // space, horizontal tab, vertical tab, form feed, carriage return, line - // feed, non-breaking space, left-to-right mark, right-to-left mark - let various_spaces = " \t\u{b}\u{c}\r\n\u{a0}\u{200e}\u{200f}"; - let tokens = various_spaces.parse::().unwrap(); - assert_eq!(tokens.into_iter().count(), 0); - - let lone_carriage_returns = " \r \r\r\n "; - lone_carriage_returns.parse::().unwrap(); -} - #[test] fn byte_order_mark() { let string = "\u{feff}foo"; diff --git a/tests/test_fmt.rs b/tests/test_fmt.rs index 86a4c38..93dd19e 100644 --- a/tests/test_fmt.rs +++ b/tests/test_fmt.rs @@ -1,7 +1,7 @@ #![allow(clippy::from_iter_instead_of_collect)] use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree}; -use std::iter; +use std::iter::{self, FromIterator}; #[test] fn test_fmt_group() { diff --git a/tests/ui/Cargo.toml b/tests/ui/Cargo.toml new file mode 100644 index 0000000..1e65169 --- /dev/null +++ b/tests/ui/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "proc-macro2-ui-test" +version = "0.0.0" +authors = ["David Tolnay "] +edition = "2018" +publish = false + +[[test]] +name = "compiletest" +path = "compiletest.rs" + +[dev-dependencies] +proc-macro2 = { path = "../.." } +rustversion = "1.0" +trybuild = { version = "1.0.49", features = ["diff"] } diff --git a/tests/ui/compiletest.rs b/tests/ui/compiletest.rs new file mode 100644 index 0000000..c307aaf --- /dev/null +++ b/tests/ui/compiletest.rs @@ -0,0 +1,7 @@ +#[rustversion::attr(not(nightly), ignore)] +#[cfg_attr(miri, ignore)] +#[test] +fn ui() { + let t = trybuild::TestCases::new(); + t.compile_fail("test-*.rs"); +} diff --git a/tests/ui/test-not-send.rs b/tests/ui/test-not-send.rs new file mode 100644 index 0000000..171be97 --- /dev/null +++ b/tests/ui/test-not-send.rs @@ -0,0 +1,6 @@ +use proc_macro2::Span; + +fn main() { + fn requires_send() {} + requires_send::(); +} diff --git a/tests/ui/test-not-send.stderr b/tests/ui/test-not-send.stderr new file mode 100644 index 0000000..cc535a8 --- /dev/null +++ b/tests/ui/test-not-send.stderr @@ -0,0 +1,30 @@ +error[E0277]: `proc_macro::Span` cannot be sent between threads safely + --> test-not-send.rs:5:21 + | +5 | requires_send::(); + | ^^^^ `proc_macro::Span` cannot be sent between threads safely + | + = help: within `Span`, the trait `Send` is not implemented for `proc_macro::Span` + = note: required because it appears within the type `Span` + = note: required because it appears within the type `Span` +note: required by a bound in `requires_send` + --> test-not-send.rs:4:25 + | +4 | fn requires_send() {} + | ^^^^ required by this bound in `requires_send` + +error[E0277]: `Rc<()>` cannot be sent between threads safely + --> test-not-send.rs:5:21 + | +5 | requires_send::(); + | ^^^^ `Rc<()>` cannot be sent between threads safely + | + = help: within `Span`, the trait `Send` is not implemented for `Rc<()>` + = note: required because it appears within the type `ProcMacroAutoTraits` + = note: required because it appears within the type `PhantomData` + = note: required because it appears within the type `Span` +note: required by a bound in `requires_send` + --> test-not-send.rs:4:25 + | +4 | fn requires_send() {} + | ^^^^ required by this bound in `requires_send`