Backed out changeset afdd9e4681c1 (bug 1932137) by developer request. CLOSED TREE

This commit is contained in:
Alexandru Marc 2024-11-22 20:12:23 +02:00
parent 622118a729
commit 6027d1d91f
29 changed files with 338 additions and 1203 deletions

21
Cargo.lock generated
View File

@ -3110,23 +3110,14 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "1.0.3"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
dependencies = [
"idna_adapter",
"smallvec",
"utf8_iter",
]
[[package]]
name = "idna_adapter"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71"
checksum = "bd69211b9b519e98303c015e21a007e293db403b6c85b9b124e133d25e242cdd"
dependencies = [
"icu_normalizer",
"icu_properties",
"smallvec",
"utf8_iter",
]
[[package]]
@ -6748,9 +6739,9 @@ checksum = "2ace0b4755d0a2959962769239d56267f8a024fef2d9b32666b3dcd0946b0906"
[[package]]
name = "url"
version = "2.5.3"
version = "2.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada"
checksum = "f7c25da092f0a868cdf09e8674cd3b7ef3a7d92a24253e663a2fb85e2496de56"
dependencies = [
"form_urlencoded",
"idna",

View File

@ -2778,16 +2778,6 @@ criteria = "safe-to-deploy"
delta = "0.5.0 -> 1.0.2"
notes = "In the 0.5.0 to 1.0.2 delta, I, Henri Sivonen, rewrote the non-Punycode internals of the crate and made the changes to the Punycode code."
[[audits.idna]]
who = "Valentin Gosu <valentin.gosu@gmail.com>"
criteria = "safe-to-deploy"
delta = "1.0.2 -> 1.0.3"
[[audits.idna_adapter]]
who = "Valentin Gosu <valentin.gosu@gmail.com>"
criteria = "safe-to-deploy"
version = "1.2.0"
[[audits.indexmap]]
who = "Mike Hommey <mh+mozilla@glandium.org>"
criteria = "safe-to-deploy"
@ -5176,11 +5166,6 @@ who = "Henri Sivonen <hsivonen@hsivonen.fi>"
criteria = "safe-to-deploy"
delta = "2.5.0 -> 2.5.1"
[[audits.url]]
who = "Valentin Gosu <valentin.gosu@gmail.com>"
criteria = "safe-to-deploy"
delta = "2.5.1 -> 2.5.3"
[[audits.utf16_iter]]
who = "Henri Sivonen <hsivonen@hsivonen.fi>"
criteria = "safe-to-deploy"

View File

@ -31,6 +31,24 @@
[<area>: Setting <foo:///some/path>.pathname = '' Non-special URLs with an empty host can have their paths erased]
expected: FAIL
[<a>: Setting <non-spec:/>.pathname = '/.//p' Serialize /. in path]
expected: FAIL
[<area>: Setting <non-spec:/>.pathname = '/.//p' Serialize /. in path]
expected: FAIL
[<a>: Setting <non-spec:/>.pathname = '/..//p']
expected: FAIL
[<area>: Setting <non-spec:/>.pathname = '/..//p']
expected: FAIL
[<a>: Setting <non-spec:/>.pathname = '//p']
expected: FAIL
[<area>: Setting <non-spec:/>.pathname = '//p']
expected: FAIL
[url-setters-a-area.window.html?include=file]
[<a>: Setting <file://hi/path>.protocol = 's']

View File

@ -429,6 +429,18 @@
[URL: Setting <foo:///some/path>.pathname = '' Non-special URLs with an empty host can have their paths erased]
expected: FAIL
[URL: Setting <non-spec:/>.pathname = '/.//p' Serialize /. in path]
expected: FAIL
[URL: Setting <non-spec:/>.pathname = '/..//p']
expected: FAIL
[URL: Setting <non-spec:/>.pathname = '//p']
expected: FAIL
[URL: Setting <non-spec:/.//>.pathname = 'p' Drop /. from path]
expected: FAIL
[url-setters.any.html?include=file]
[URL: Setting <file://hi/path>.protocol = 's']
@ -474,6 +486,18 @@
[URL: Setting <foo:///some/path>.pathname = '' Non-special URLs with an empty host can have their paths erased]
expected: FAIL
[URL: Setting <non-spec:/>.pathname = '/.//p' Serialize /. in path]
expected: FAIL
[URL: Setting <non-spec:/>.pathname = '/..//p']
expected: FAIL
[URL: Setting <non-spec:/>.pathname = '//p']
expected: FAIL
[URL: Setting <non-spec:/.//>.pathname = 'p' Drop /. from path]
expected: FAIL
[url-setters.any.worker.html?include=file]
[URL: Setting <file://hi/path>.protocol = 's']

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"4b63930f833beebdd51043e4435625638e39f804f172914ee4da834bfd18ac68","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","README.md":"b95f383f35d2677a4aace5f816837357dca4b85f9c8cbed76d106c18faaca7e5","benches/all.rs":"cb99d454de05c3a72df82d2fca230757595ad25493e790eeb93a44ad43725cb8","src/deprecated.rs":"bdba5a73432d9755c831ec01edf4d512f9390b351dba0eb8ce7b0430fa1073ad","src/lib.rs":"6d0a03cd07e652974dd73a57b7505c9840c7b8afd84d7e223926515b99216bb4","src/punycode.rs":"86b8496e0b9fa807b456b74586d2c1bbd7dad832c67c3864e726e442646b34b3","src/uts46.rs":"f96ab9d27140a546f882200febc0bea8678d7ec320d35da9ef757045cf3d7309","tests/IdnaTestV2.txt":"d668c4ea58d60fe04e6c011df98e0b317da6abaa1273d58f42b581eb0dd7adda","tests/bad_punycode_tests.json":"ff0a15479ed2cb08f7b4b39465160da66d1ac7575e5d76990c17e7b76cb5e0f5","tests/deprecated.rs":"cce256f6616a19314330a06003d6308138aae8257136431d143f062f14ab17c7","tests/punycode.rs":"75fa73b6429ccacaeb5d72fab0b927cdf9f2173a9fc5fb366697bf7002b73921","tests/punycode_tests.json":"50859b828d14d5eeba5ab930de25fb72a35310a0b46f421f65d64c7c3e54d08a","tests/tests.rs":"ecee59f0b0be27ba1e7b24bb449c681024253d0275065f0f0e258e7ec2977d12","tests/unit.rs":"7e450599b52900baa51ea26ff0cb55a830456f60642985abbc87ec671a91b8e1","tests/unitbis.rs":"545259b767cd045aed01c1515c3b092d1b3f6b3366ce88d1593a2c8e3ffcd2af","tests/uts46.rs":"0a1c339708f1ab845d726b1f55dc1be8a423a1304b0399234391d0bd419e3fe0"},"package":"686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"}
{"files":{"Cargo.toml":"d453ab4fa012a1f5d9233aa29fa03a7d5bcff06008f2197ce0ddac7e7aa28b2b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","README.md":"dd73e159f3b31a7070f4564f9e68dca14495452e3b30d6fe4ca1d84656b69ee6","benches/all.rs":"53002f41ac38bdd5b1bb0a7ec8d5a9b49ce6cd3d073ce16c1014f9d4e90b762b","src/deprecated.rs":"bdba5a73432d9755c831ec01edf4d512f9390b351dba0eb8ce7b0430fa1073ad","src/lib.rs":"4d30605daf5c18d282d460ee561c7e5218aea76cf33fc072fd79f9617256f04e","src/punycode.rs":"2d9dda9bb6504863ea6f374e9ab4192ccc475a789a43a0fb624b15459a611fbc","src/uts46.rs":"2e719c93954930de20789896b153af7dd84c20e14edba6317f9dd80e3baaccc9","tests/IdnaTestV2.txt":"d668c4ea58d60fe04e6c011df98e0b317da6abaa1273d58f42b581eb0dd7adda","tests/bad_punycode_tests.json":"ff0a15479ed2cb08f7b4b39465160da66d1ac7575e5d76990c17e7b76cb5e0f5","tests/deprecated.rs":"cce256f6616a19314330a06003d6308138aae8257136431d143f062f14ab17c7","tests/punycode.rs":"75fa73b6429ccacaeb5d72fab0b927cdf9f2173a9fc5fb366697bf7002b73921","tests/punycode_tests.json":"50859b828d14d5eeba5ab930de25fb72a35310a0b46f421f65d64c7c3e54d08a","tests/tests.rs":"ecee59f0b0be27ba1e7b24bb449c681024253d0275065f0f0e258e7ec2977d12","tests/unit.rs":"7e450599b52900baa51ea26ff0cb55a830456f60642985abbc87ec671a91b8e1","tests/unitbis.rs":"545259b767cd045aed01c1515c3b092d1b3f6b3366ce88d1593a2c8e3ffcd2af","tests/uts46.rs":"0a1c339708f1ab845d726b1f55dc1be8a423a1304b0399234391d0bd419e3fe0"},"package":"bd69211b9b519e98303c015e21a007e293db403b6c85b9b124e133d25e242cdd"}

View File

@ -11,15 +11,11 @@
[package]
edition = "2018"
rust-version = "1.57"
rust-version = "1.67"
name = "idna"
version = "1.0.3"
version = "1.0.2"
authors = ["The rust-url developers"]
build = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "IDNA (Internationalizing Domain Names in Applications) and Punycode."
readme = "README.md"
keywords = [
@ -34,30 +30,27 @@ repository = "https://github.com/servo/rust-url/"
rustdoc-args = ["--generate-link-to-definition"]
[lib]
name = "idna"
path = "src/lib.rs"
doctest = false
[[test]]
name = "tests"
path = "tests/tests.rs"
harness = false
[[test]]
name = "unit"
path = "tests/unit.rs"
[[test]]
name = "unitbis"
path = "tests/unitbis.rs"
[[bench]]
name = "all"
path = "benches/all.rs"
harness = false
[dependencies.idna_adapter]
version = "1"
[dependencies.icu_normalizer]
version = "1.4.3"
[dependencies.icu_properties]
version = "1.4.2"
[dependencies.smallvec]
version = "1.13.1"
@ -80,7 +73,10 @@ version = "0.9"
[features]
alloc = []
compiled_data = ["idna_adapter/compiled_data"]
compiled_data = [
"icu_normalizer/compiled_data",
"icu_properties/compiled_data",
]
default = [
"std",
"compiled_data",

View File

@ -28,10 +28,6 @@ Apps that need to display host names to the user should use `uts46::Uts46::to_us
* `std` - Adds `impl std::error::Error for Errors {}` (and implies `alloc`).
* By default, all of the above are enabled.
## Alternative Unicode back ends
By default, `idna` uses [ICU4X](https://github.com/unicode-org/icu4x/) as its Unicode back end. If you wish to opt for different tradeoffs between correctness, run-time performance, binary size, compile time, and MSRV, please see the [README of the latest version of the `idna_adapter` crate](https://docs.rs/crate/idna_adapter/latest) for how to opt into a different Unicode back end.
## Breaking changes since 0.5.0
* Stricter IDNA 2008 restrictions are no longer supported. Attempting to enable them panics immediately. UTS 46 allows all the names that IDNA 2008 allows, and when transitional processing is disabled, they resolve the same way. There are additional names that IDNA 2008 disallows but UTS 46 maps to names that IDNA 2008 allows (notably, input is mapped to fold-case output). UTS 46 also allows symbols that were allowed in IDNA 2003 as well as newer symbols that are allowed according to the same principle. (Earlier versions of this crate allowed rejecting such symbols. Rejecting characters that UTS 46 maps to IDNA 2008-permitted characters wasn't supported in earlier versions, either.)

View File

@ -49,51 +49,6 @@ fn to_ascii_merged(bench: &mut Bencher) {
bench.iter(|| config.to_ascii(black_box(encoded)));
}
fn to_ascii_cow_plain(bench: &mut Bencher) {
let encoded = "example.com".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_hyphen(bench: &mut Bencher) {
let encoded = "hyphenated-example.com".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_leading_digit(bench: &mut Bencher) {
let encoded = "1test.example".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_unicode_mixed(bench: &mut Bencher) {
let encoded = "مثال.example".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_punycode_mixed(bench: &mut Bencher) {
let encoded = "xn--mgbh0fb.example".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_unicode_ltr(bench: &mut Bencher) {
let encoded = "නම.උදාහරණ".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_punycode_ltr(bench: &mut Bencher) {
let encoded = "xn--r0co.xn--ozc8dl2c3bxd".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_unicode_rtl(bench: &mut Bencher) {
let encoded = "الاسم.مثال".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_punycode_rtl(bench: &mut Bencher) {
let encoded = "xn--mgba0b1dh.xn--mgbh0fb".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
benchmark_group!(
benches,
to_unicode_puny_label,
@ -103,14 +58,5 @@ benchmark_group!(
to_ascii_already_puny_label,
to_ascii_simple,
to_ascii_merged,
to_ascii_cow_plain,
to_ascii_cow_hyphen,
to_ascii_cow_leading_digit,
to_ascii_cow_unicode_mixed,
to_ascii_cow_punycode_mixed,
to_ascii_cow_unicode_ltr,
to_ascii_cow_punycode_ltr,
to_ascii_cow_unicode_rtl,
to_ascii_cow_punycode_rtl,
);
benchmark_main!(benches);

View File

@ -74,9 +74,6 @@ impl From<Errors> for Result<(), Errors> {
#[cfg(feature = "std")]
impl std::error::Error for Errors {}
#[cfg(not(feature = "std"))]
impl core::error::Error for Errors {}
impl core::fmt::Display for Errors {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(self, f)

View File

@ -469,7 +469,7 @@ fn value_to_digit(value: u32) -> char {
fn huge_encode() {
let mut buf = String::new();
assert!(encode_into::<_, _, ExternalCaller>(
core::iter::repeat('ß').take(u32::MAX as usize + 1),
std::iter::repeat('ß').take(u32::MAX as usize + 1),
&mut buf
)
.is_err());

View File

@ -26,7 +26,13 @@ use crate::punycode::InternalCaller;
use alloc::borrow::Cow;
use alloc::string::String;
use core::fmt::Write;
use idna_adapter::*;
use icu_normalizer::properties::CanonicalCombiningClassMap;
use icu_normalizer::uts46::Uts46Mapper;
use icu_properties::maps::CodePointMapDataBorrowed;
use icu_properties::BidiClass;
use icu_properties::CanonicalCombiningClass;
use icu_properties::GeneralCategory;
use icu_properties::JoiningType;
use smallvec::SmallVec;
use utf8_iter::Utf8CharsEx;
@ -100,6 +106,79 @@ const fn ldh_mask() -> u128 {
accu
}
/// Turns a joining type into a mask for comparing with multiple type at once.
const fn joining_type_to_mask(jt: JoiningType) -> u32 {
1u32 << jt.0
}
/// Mask for checking for both left and dual joining.
const LEFT_OR_DUAL_JOINING_MASK: u32 =
joining_type_to_mask(JoiningType::LeftJoining) | joining_type_to_mask(JoiningType::DualJoining);
/// Mask for checking for both left and dual joining.
const RIGHT_OR_DUAL_JOINING_MASK: u32 = joining_type_to_mask(JoiningType::RightJoining)
| joining_type_to_mask(JoiningType::DualJoining);
/// Turns a bidi class into a mask for comparing with multiple classes at once.
const fn bidi_class_to_mask(bc: BidiClass) -> u32 {
1u32 << bc.0
}
/// Mask for checking if the domain is a bidi domain.
const RTL_MASK: u32 = bidi_class_to_mask(BidiClass::RightToLeft)
| bidi_class_to_mask(BidiClass::ArabicLetter)
| bidi_class_to_mask(BidiClass::ArabicNumber);
/// Mask for allowable bidi classes in the first character of a label
/// (either LTR or RTL) in a bidi domain.
const FIRST_BC_MASK: u32 = bidi_class_to_mask(BidiClass::LeftToRight)
| bidi_class_to_mask(BidiClass::RightToLeft)
| bidi_class_to_mask(BidiClass::ArabicLetter);
// Mask for allowable bidi classes of the last (non-Non-Spacing Mark)
// character in an LTR label in a bidi domain.
const LAST_LTR_MASK: u32 =
bidi_class_to_mask(BidiClass::LeftToRight) | bidi_class_to_mask(BidiClass::EuropeanNumber);
// Mask for allowable bidi classes of the last (non-Non-Spacing Mark)
// character in an RTL label in a bidi domain.
const LAST_RTL_MASK: u32 = bidi_class_to_mask(BidiClass::RightToLeft)
| bidi_class_to_mask(BidiClass::ArabicLetter)
| bidi_class_to_mask(BidiClass::EuropeanNumber)
| bidi_class_to_mask(BidiClass::ArabicNumber);
// Mask for allowable bidi classes of the middle characters in an LTR label in a bidi domain.
const MIDDLE_LTR_MASK: u32 = bidi_class_to_mask(BidiClass::LeftToRight)
| bidi_class_to_mask(BidiClass::EuropeanNumber)
| bidi_class_to_mask(BidiClass::EuropeanSeparator)
| bidi_class_to_mask(BidiClass::CommonSeparator)
| bidi_class_to_mask(BidiClass::EuropeanTerminator)
| bidi_class_to_mask(BidiClass::OtherNeutral)
| bidi_class_to_mask(BidiClass::BoundaryNeutral)
| bidi_class_to_mask(BidiClass::NonspacingMark);
// Mask for allowable bidi classes of the middle characters in an RTL label in a bidi domain.
const MIDDLE_RTL_MASK: u32 = bidi_class_to_mask(BidiClass::RightToLeft)
| bidi_class_to_mask(BidiClass::ArabicLetter)
| bidi_class_to_mask(BidiClass::ArabicNumber)
| bidi_class_to_mask(BidiClass::EuropeanNumber)
| bidi_class_to_mask(BidiClass::EuropeanSeparator)
| bidi_class_to_mask(BidiClass::CommonSeparator)
| bidi_class_to_mask(BidiClass::EuropeanTerminator)
| bidi_class_to_mask(BidiClass::OtherNeutral)
| bidi_class_to_mask(BidiClass::BoundaryNeutral)
| bidi_class_to_mask(BidiClass::NonspacingMark);
/// Turns a genecal category into a mask for comparing with multiple categories at once.
const fn general_category_to_mask(gc: GeneralCategory) -> u32 {
1 << (gc as u32)
}
/// Mask for the disallowed general categories of the first character in a label.
const MARK_MASK: u32 = general_category_to_mask(GeneralCategory::NonspacingMark)
| general_category_to_mask(GeneralCategory::SpacingMark)
| general_category_to_mask(GeneralCategory::EnclosingMark);
const PUNYCODE_PREFIX: u32 =
((b'-' as u32) << 24) | ((b'-' as u32) << 16) | ((b'N' as u32) << 8) | b'X' as u32;
@ -487,7 +566,11 @@ pub fn verify_dns_length(domain_name: &str, allow_trailing_dot: bool) -> bool {
/// An implementation of UTS #46.
pub struct Uts46 {
data: idna_adapter::Adapter,
mapper: Uts46Mapper,
canonical_combining_class: CanonicalCombiningClassMap,
general_category: CodePointMapDataBorrowed<'static, GeneralCategory>,
bidi_class: CodePointMapDataBorrowed<'static, BidiClass>,
joining_type: CodePointMapDataBorrowed<'static, JoiningType>,
}
#[cfg(feature = "compiled_data")]
@ -502,7 +585,11 @@ impl Uts46 {
#[cfg(feature = "compiled_data")]
pub const fn new() -> Self {
Self {
data: idna_adapter::Adapter::new(),
mapper: Uts46Mapper::new(),
canonical_combining_class: CanonicalCombiningClassMap::new(),
general_category: icu_properties::maps::general_category(),
bidi_class: icu_properties::maps::bidi_class(),
joining_type: icu_properties::maps::joining_type(),
}
}
@ -515,14 +602,14 @@ impl Uts46 {
/// # Arguments
///
/// * `domain_name` - The input domain name as UTF-8 bytes. (The UTF-8ness is checked by
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// * `ascii_deny_list` - What ASCII deny list, if any, to apply. The UTS 46
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// * `hyphens` - The UTS 46 _CheckHyphens_ flag. Most callers are probably the best
/// off by using [`Hyphens::Allow`] here.
/// off by using [`Hyphens::Allow`] here.
/// * `dns_length` - The UTS 46 _VerifyDNSLength_ flag.
pub fn to_ascii<'a>(
&self,
@ -581,14 +668,14 @@ impl Uts46 {
/// # Arguments
///
/// * `domain_name` - The input domain name as UTF-8 bytes. (The UTF-8ness is checked by
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// * `ascii_deny_list` - What ASCII deny list, if any, to apply. The UTS 46
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// * `hyphens` - The UTS 46 _CheckHyphens_ flag. Most callers are probably the best
/// off by using [`Hyphens::Allow`] here.
/// off by using [`Hyphens::Allow`] here.
pub fn to_unicode<'a>(
&self,
domain_name: &'a [u8],
@ -627,23 +714,23 @@ impl Uts46 {
/// # Arguments
///
/// * `domain_name` - The input domain name as UTF-8 bytes. (The UTF-8ness is checked by
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// * `ascii_deny_list` - What ASCII deny list, if any, to apply. The UTS 46
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// * `hyphens` - The UTS 46 _CheckHyphens_ flag. Most callers are probably the best
/// off by using [`Hyphens::Allow`] here.
/// off by using [`Hyphens::Allow`] here.
/// * `output_as_unicode` - A closure for deciding if a label should be output as Unicode
/// (as opposed to Punycode). The first argument is the label for which a decision is
/// needed (always non-empty slice). The second argument is the TLD (potentially empty).
/// The third argument is `true` iff the domain name as a whole is a bidi domain name.
/// Only non-erroneous labels that contain at least one non-ASCII character are passed
/// to the closure as the first argument. The second and third argument values are
/// guaranteed to remain the same during a single call to `process`, and the closure
/// may cache computations derived from the second and third argument (hence the
/// `FnMut` type).
/// (as opposed to Punycode). The first argument is the label for which a decision is
/// needed (always non-empty slice). The second argument is the TLD (potentially empty).
/// The third argument is `true` iff the domain name as a whole is a bidi domain name.
/// Only non-erroneous labels that contain at least one non-ASCII character are passed
/// to the closure as the first argument. The second and third argument values are
/// guaranteed to remain the same during a single call to `process`, and the closure
/// may cache computations derived from the second and third argument (hence the
/// `FnMut` type).
pub fn to_user_interface<'a, OutputUnicode: FnMut(&[char], &[char], bool) -> bool>(
&self,
domain_name: &'a [u8],
@ -679,59 +766,59 @@ impl Uts46 {
/// # Arguments
///
/// * `domain_name` - The input domain name as UTF-8 bytes. (The UTF-8ness is checked by
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// * `ascii_deny_list` - What ASCII deny list, if any, to apply. The UTS 46
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// * `hyphens` - The UTS 46 _CheckHyphens_ flag. Most callers are probably the best
/// off by using [`Hyphens::Allow`] here.
/// off by using [`Hyphens::Allow`] here.
/// * `error_policy` - Whether to fail fast or to produce output that may be rendered
/// for the user to examine in case of errors.
/// for the user to examine in case of errors.
/// * `output_as_unicode` - A closure for deciding if a label should be output as Unicode
/// (as opposed to Punycode). The first argument is the label for which a decision is
/// needed (always non-empty slice). The second argument is the TLD (potentially empty).
/// The third argument is `true` iff the domain name as a whole is a bidi domain name.
/// Only non-erroneous labels that contain at least one non-ASCII character are passed
/// to the closure as the first argument. The second and third argument values are
/// guaranteed to remain the same during a single call to `process`, and the closure
/// may cache computations derived from the second and third argument (hence the
/// `FnMut` type). To perform the _ToASCII_ operation, `|_, _, _| false` must be
/// passed as the closure. To perform the _ToUnicode_ operation, `|_, _, _| true` must
/// be passed as the closure. A more complex closure may be used to prepare a domain
/// name for display in a user interface so that labels are converted to the Unicode
/// form in general but potentially misleading labels are converted to the Punycode
/// form.
/// * `sink` - The object that receives the output (in the non-passthrough case).
/// * `ascii_sink` - A second sink that receives the _ToASCII_ form only if there
/// were no errors and `sink` received at least one character of non-ASCII output.
/// The purpose of this argument is to enable a user interface display form of the
/// domain and the _ToASCII_ form of the domain to be computed efficiently together.
/// This argument is useless when `output_as_unicode` always returns `false`, in
/// which case the _ToASCII_ form ends up in `sink` already. If `ascii_sink` receives
/// no output and the return value is `Ok(ProcessingSuccess::WroteToSink)`, use the
/// output received by `sink` also as the _ToASCII_ result.
/// (as opposed to Punycode). The first argument is the label for which a decision is
/// needed (always non-empty slice). The second argument is the TLD (potentially empty).
/// The third argument is `true` iff the domain name as a whole is a bidi domain name.
/// Only non-erroneous labels that contain at least one non-ASCII character are passed
/// to the closure as the first argument. The second and third argument values are
/// guaranteed to remain the same during a single call to `process`, and the closure
/// may cache computations derived from the second and third argument (hence the
/// `FnMut` type). To perform the _ToASCII_ operation, `|_, _, _| false` must be
/// passed as the closure. To perform the _ToUnicode_ operation, `|_, _, _| true` must
/// be passed as the closure. A more complex closure may be used to prepare a domain
/// name for display in a user interface so that labels are converted to the Unicode
/// form in general but potentially misleading labels are converted to the Punycode
/// form.
/// `sink` - The object that receives the output (in the non-passthrough case).
/// `ascii_sink` - A second sink that receives the _ToASCII_ form only if there
/// were no errors and `sink` received at least one character of non-ASCII output.
/// The purpose of this argument is to enable a user interface display form of the
/// domain and the _ToASCII_ form of the domain to be computed efficiently together.
/// This argument is useless when `output_as_unicode` always returns `false`, in
/// which case the _ToASCII_ form ends up in `sink` already. If `ascii_sink` receives
/// no output and the return value is `Ok(ProcessingSuccess::WroteToSink)`, use the
/// output received by `sink` also as the _ToASCII_ result.
///
/// # Return value
///
/// * `Ok(ProcessingSuccess::Passthrough)` - The caller must treat
/// `unsafe { core::str::from_utf8_unchecked(domain_name) }` as the output. (This
/// return value asserts that calling `core::str::from_utf8_unchecked(domain_name)`
/// is safe.)
/// `unsafe { core::str::from_utf8_unchecked(domain_name) }` as the output. (This
/// return value asserts that calling `core::str::from_utf8_unchecked(domain_name)`
/// is safe.)
/// * `Ok(ProcessingSuccess::WroteToSink)` - The caller must treat was was written
/// to `sink` as the output. If another sink was passed as `ascii_sink` but it did
/// not receive output, the caller must treat what was written to `sink` also as
/// the _ToASCII_ output. Otherwise, if `ascii_sink` received output, the caller
/// must treat what was written to `ascii_sink` as the _ToASCII_ output.
/// to `sink` as the output. If another sink was passed as `ascii_sink` but it did
/// not receive output, the caller must treat what was written to `sink` also as
/// the _ToASCII_ output. Otherwise, if `ascii_sink` received output, the caller
/// must treat what was written to `ascii_sink` as the _ToASCII_ output.
/// * `Err(ProcessingError::ValidityError)` - The input was in error and must
/// not be used for DNS lookup or otherwise in a network protocol. If `error_policy`
/// was `ErrorPolicy::MarkErrors`, the output written to `sink` may be displayed
/// to the user as an illustration of where the error was or the errors were.
/// not be used for DNS lookup or otherwise in a network protocol. If `error_policy`
/// was `ErrorPolicy::MarkErrors`, the output written to `sink` may be displayed
/// to the user as an illustration of where the error was or the errors were.
/// * `Err(ProcessingError::SinkError)` - Either `sink` or `ascii_sink` returned
/// [`core::fmt::Error`]. The partial output written to `sink` `ascii_sink` must not
/// be used. If `W` never returns [`core::fmt::Error`], this method never returns
/// `Err(ProcessingError::SinkError)`.
/// [`core::fmt::Error`]. The partial output written to `sink` `ascii_sink` must not
/// be used. If `W` never returns [`core::fmt::Error`], this method never returns
/// `Err(ProcessingError::SinkError)`.
///
/// # Safety-usable invariant
///
@ -1026,8 +1113,9 @@ impl Uts46 {
Ok(ProcessingSuccess::WroteToSink)
}
/// The part of `process` that doesn't need to be generic over the sink.
#[inline(always)]
/// The part of `process` that doesn't need to be generic over the sink and
/// can avoid monomorphizing in the interest of code size.
#[inline(never)]
fn process_inner<'a>(
&self,
domain_name: &'a [u8],
@ -1041,7 +1129,7 @@ impl Uts46 {
// performance.
let mut iter = domain_name.iter();
let mut most_recent_label_start = iter.clone();
loop {
let tail = loop {
if let Some(&b) = iter.next() {
if in_inclusive_range8(b, b'a', b'z') {
continue;
@ -1050,38 +1138,13 @@ impl Uts46 {
most_recent_label_start = iter.clone();
continue;
}
return self.process_innermost(
domain_name,
ascii_deny_list,
hyphens,
fail_fast,
domain_buffer,
already_punycode,
most_recent_label_start.as_slice(),
);
break most_recent_label_start.as_slice();
} else {
// Success! The whole input passes through on the fastest path!
return (domain_name.len(), false, false);
}
}
}
};
/// The part of `process` that doesn't need to be generic over the sink and
/// can avoid monomorphizing in the interest of code size.
/// Separating this into a different stack frame compared to `process_inner`
/// improves performance in the ICU4X case.
#[allow(clippy::too_many_arguments)]
#[inline(never)]
fn process_innermost<'a>(
&self,
domain_name: &'a [u8],
ascii_deny_list: AsciiDenyList,
hyphens: Hyphens,
fail_fast: bool,
domain_buffer: &mut SmallVec<[char; 253]>,
already_punycode: &mut SmallVec<[AlreadyAsciiLabel<'a>; 8]>,
tail: &'a [u8],
) -> (usize, bool, bool) {
let deny_list = ascii_deny_list.bits;
let deny_list_deny_dot = deny_list | DOT_MASK;
@ -1232,7 +1295,7 @@ impl Uts46 {
let mut first_needs_combining_mark_check = ascii.is_empty();
let mut needs_contextj_check = !non_ascii.is_empty();
let mut mapping = self
.data
.mapper
.map_normalize(non_ascii.chars())
.map(|c| apply_ascii_deny_list_to_lower_cased_unicode(c, deny_list));
loop {
@ -1368,8 +1431,8 @@ impl Uts46 {
if is_bidi {
for label in domain_buffer.split_mut(|c| *c == '.') {
if let Some((first, tail)) = label.split_first_mut() {
let first_bc = self.data.bidi_class(*first);
if !FIRST_BC_MASK.intersects(first_bc.to_mask()) {
let first_bc = self.bidi_class.get(*first);
if (FIRST_BC_MASK & bidi_class_to_mask(first_bc)) == 0 {
// Neither RTL label nor LTR label
if fail_fast {
return (0, false, true);
@ -1378,19 +1441,19 @@ impl Uts46 {
*first = '\u{FFFD}';
continue;
}
let is_ltr = first_bc.is_ltr();
let is_ltr = first_bc == BidiClass::LeftToRight;
// Trim NSM
let mut middle = tail;
#[allow(clippy::while_let_loop)]
loop {
if let Some((last, prior)) = middle.split_last_mut() {
let last_bc = self.data.bidi_class(*last);
if last_bc.is_nonspacing_mark() {
let last_bc = self.bidi_class.get(*last);
if last_bc == BidiClass::NonspacingMark {
middle = prior;
continue;
}
let last_mask = if is_ltr { LAST_LTR_MASK } else { LAST_RTL_MASK };
if !last_mask.intersects(last_bc.to_mask()) {
if (bidi_class_to_mask(last_bc) & last_mask) == 0 {
if fail_fast {
return (0, false, true);
}
@ -1399,8 +1462,8 @@ impl Uts46 {
}
if is_ltr {
for c in prior.iter_mut() {
let bc = self.data.bidi_class(*c);
if !MIDDLE_LTR_MASK.intersects(bc.to_mask()) {
let bc = self.bidi_class.get(*c);
if (bidi_class_to_mask(bc) & MIDDLE_LTR_MASK) == 0 {
if fail_fast {
return (0, false, true);
}
@ -1411,8 +1474,8 @@ impl Uts46 {
} else {
let mut numeral_state = RtlNumeralState::Undecided;
for c in prior.iter_mut() {
let bc = self.data.bidi_class(*c);
if !MIDDLE_RTL_MASK.intersects(bc.to_mask()) {
let bc = self.bidi_class.get(*c);
if (bidi_class_to_mask(bc) & MIDDLE_RTL_MASK) == 0 {
if fail_fast {
return (0, false, true);
}
@ -1421,14 +1484,14 @@ impl Uts46 {
} else {
match numeral_state {
RtlNumeralState::Undecided => {
if bc.is_european_number() {
if bc == BidiClass::EuropeanNumber {
numeral_state = RtlNumeralState::European;
} else if bc.is_arabic_number() {
} else if bc == BidiClass::ArabicNumber {
numeral_state = RtlNumeralState::Arabic;
}
}
RtlNumeralState::European => {
if bc.is_arabic_number() {
if bc == BidiClass::ArabicNumber {
if fail_fast {
return (0, false, true);
}
@ -1437,7 +1500,7 @@ impl Uts46 {
}
}
RtlNumeralState::Arabic => {
if bc.is_european_number() {
if bc == BidiClass::EuropeanNumber {
if fail_fast {
return (0, false, true);
}
@ -1449,9 +1512,9 @@ impl Uts46 {
}
}
if (numeral_state == RtlNumeralState::European
&& last_bc.is_arabic_number())
&& last_bc == BidiClass::ArabicNumber)
|| (numeral_state == RtlNumeralState::Arabic
&& last_bc.is_european_number())
&& last_bc == BidiClass::EuropeanNumber)
{
if fail_fast {
return (0, false, true);
@ -1486,7 +1549,7 @@ impl Uts46 {
had_errors: &mut bool,
) -> bool {
for c in self
.data
.mapper
.normalize_validate(label_buffer.iter().copied())
.map(|c| apply_ascii_deny_list_to_lower_cased_unicode(c, deny_list_deny_dot))
{
@ -1543,7 +1606,7 @@ impl Uts46 {
}
if first_needs_combining_mark_check {
if let Some(first) = mut_label.first_mut() {
if self.data.is_mark(*first) {
if (general_category_to_mask(self.general_category.get(*first)) & MARK_MASK) != 0 {
if fail_fast {
return true;
}
@ -1563,7 +1626,9 @@ impl Uts46 {
if let Some((joiner, tail)) = joiner_and_tail.split_first_mut() {
if let Some(previous) = head.last() {
if self.data.is_virama(*previous) {
if self.canonical_combining_class.get(*previous)
== CanonicalCombiningClass::Virama
{
continue;
}
} else {
@ -1621,14 +1686,14 @@ impl Uts46 {
fn has_appropriately_joining_char<I: Iterator<Item = char>>(
&self,
iter: I,
required_mask: JoiningTypeMask,
required_mask: u32,
) -> bool {
for c in iter {
let jt = self.data.joining_type(c);
if jt.to_mask().intersects(required_mask) {
let jt = self.joining_type.get(c);
if (joining_type_to_mask(jt) & required_mask) != 0 {
return true;
}
if jt.is_transparent() {
if jt == JoiningType::Transparent {
continue;
}
return false;
@ -1656,7 +1721,7 @@ impl Uts46 {
if in_inclusive_range_char(c, '\u{11000}', '\u{1E7FF}') {
continue;
}
if RTL_MASK.intersects(self.data.bidi_class(c).to_mask()) {
if (RTL_MASK & bidi_class_to_mask(self.bidi_class.get(c))) != 0 {
return true;
}
}

View File

@ -1 +0,0 @@
{"files":{"Cargo.toml":"6f8d2495f093253add30070ab41e48abb5078fd6cfe5586cf5dae64dc08f8086","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"8b43ce8accd61e9d370b5ca9e9c4f953279b5c239926c62315b40e24df51b726","README.md":"15500bedc72ff1698c58c7428b15d465ed9f5c1c6f059b9cf4fe366af9dfd811","src/lib.rs":"71feaadef8e68b2c52fcf6196eb6428a349dd61c92fc72dad9273c4229ca79c2"},"package":"daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71"}

View File

@ -1,53 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
rust-version = "1.67.0"
name = "idna_adapter"
version = "1.2.0"
authors = ["The rust-url developers"]
build = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Back end adapter for idna"
homepage = "https://docs.rs/crate/idna_adapter/latest"
documentation = "https://docs.rs/idna_adapter/latest/idna_adapter/"
readme = "README.md"
keywords = [
"unicode",
"dns",
"idna",
]
categories = [
"no-std",
"internationalization",
]
license = "Apache-2.0 OR MIT"
repository = "https://github.com/hsivonen/idna_adapter"
[lib]
name = "idna_adapter"
path = "src/lib.rs"
[dependencies.icu_normalizer]
version = "1.4.3"
[dependencies.icu_properties]
version = "1.4.2"
[features]
compiled_data = [
"icu_normalizer/compiled_data",
"icu_properties/compiled_data",
]

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,25 +0,0 @@
Copyright (c) The rust-url developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -1,29 +0,0 @@
# idna_adapter
This crate abstracts over a Unicode back end for the [`idna`](https://docs.rs/crate/idna/latest) crate.
To work around the lack of [`global-features`](https://internals.rust-lang.org/t/pre-rfc-mutually-excusive-global-features/19618) in Cargo, this crate allows the top level `Cargo.lock` to choose an alternative Unicode back end for the `idna` crate by pinning a version of this crate.
`idna` depends on version 1 of this crate. The version stream 1.2.x uses ICU4X, the version stream 1.1.x uses unicode-rs, and the version stream 1.0.x has a stub implementation without an actual Unicode back end.
It is generally a good idea to refer to the [README of the latest version](https://docs.rs/crate/idna_adapter/latest) instead of the guidance below for up-to-date information about what options are available.
## ICU4X as the default
If you take no action, Cargo will choose the 1.2.x version stream i.e. ICU4X.
## Opting to use unicode-rs
To choose unicode-rs, run `cargo update -p idna_adapter --precise 1.1.0` in the top-level directory of your application.
Compared to ICU4X, this makes build times faster, MSRV lower, binary size larger, and run-time performance slower.
## Turning off IDNA support
Since the ability to turn off actual IDNA processing has been requested again and again, an option to have no Unicode back end is provided. Choosing this option obviously breaks the `idna` crate in the sense that it cannot provide a proper implementation of UTS 46 without any Unicode data. Choosing this option makes your application reject non-ASCII domain name inputs and will fail to enforce the UTS 46 requirements on domain names that have labels in the Punycode form.
Using this option is not recommended, but to make the `idna` crate not actually support IDNA, run `cargo update -p idna_adapter --precise 1.0.0` in the top-level directory of your application.
## License
Apache-2.0 OR MIT

View File

@ -1,282 +0,0 @@
// Copyright The rust-url developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This crate abstracts over a Unicode back end for the [`idna`][1]
//! crate.
//!
//! To work around the lack of [`global-features`][2] in Cargo, this
//! crate allows the top level `Cargo.lock` to choose an alternative
//! Unicode back end for the `idna` crate by pinning a version of this
//! crate.
//!
//! See the [README of the latest version][3] for more details.
//!
//! [1]: https://docs.rs/crate/idna/latest
//! [2]: https://internals.rust-lang.org/t/pre-rfc-mutually-excusive-global-features/19618
//! [3]: https://docs.rs/crate/idna_adapter/latest
#![no_std]
use icu_normalizer::properties::CanonicalCombiningClassMap;
use icu_normalizer::uts46::Uts46Mapper;
use icu_properties::maps::CodePointMapDataBorrowed;
use icu_properties::CanonicalCombiningClass;
use icu_properties::GeneralCategory;
/// Turns a joining type into a mask for comparing with multiple type at once.
const fn joining_type_to_mask(jt: icu_properties::JoiningType) -> u32 {
1u32 << jt.0
}
/// Mask for checking for both left and dual joining.
pub const LEFT_OR_DUAL_JOINING_MASK: JoiningTypeMask = JoiningTypeMask(
joining_type_to_mask(icu_properties::JoiningType::LeftJoining)
| joining_type_to_mask(icu_properties::JoiningType::DualJoining),
);
/// Mask for checking for both left and dual joining.
pub const RIGHT_OR_DUAL_JOINING_MASK: JoiningTypeMask = JoiningTypeMask(
joining_type_to_mask(icu_properties::JoiningType::RightJoining)
| joining_type_to_mask(icu_properties::JoiningType::DualJoining),
);
/// Turns a bidi class into a mask for comparing with multiple classes at once.
const fn bidi_class_to_mask(bc: icu_properties::BidiClass) -> u32 {
1u32 << bc.0
}
/// Mask for checking if the domain is a bidi domain.
pub const RTL_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::RightToLeft)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicLetter)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicNumber),
);
/// Mask for allowable bidi classes in the first character of a label
/// (either LTR or RTL) in a bidi domain.
pub const FIRST_BC_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::LeftToRight)
| bidi_class_to_mask(icu_properties::BidiClass::RightToLeft)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicLetter),
);
// Mask for allowable bidi classes of the last (non-Non-Spacing Mark)
// character in an LTR label in a bidi domain.
pub const LAST_LTR_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::LeftToRight)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanNumber),
);
// Mask for allowable bidi classes of the last (non-Non-Spacing Mark)
// character in an RTL label in a bidi domain.
pub const LAST_RTL_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::RightToLeft)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicLetter)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanNumber)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicNumber),
);
// Mask for allowable bidi classes of the middle characters in an LTR label in a bidi domain.
pub const MIDDLE_LTR_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::LeftToRight)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanNumber)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanSeparator)
| bidi_class_to_mask(icu_properties::BidiClass::CommonSeparator)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanTerminator)
| bidi_class_to_mask(icu_properties::BidiClass::OtherNeutral)
| bidi_class_to_mask(icu_properties::BidiClass::BoundaryNeutral)
| bidi_class_to_mask(icu_properties::BidiClass::NonspacingMark),
);
// Mask for allowable bidi classes of the middle characters in an RTL label in a bidi domain.
pub const MIDDLE_RTL_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::RightToLeft)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicLetter)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicNumber)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanNumber)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanSeparator)
| bidi_class_to_mask(icu_properties::BidiClass::CommonSeparator)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanTerminator)
| bidi_class_to_mask(icu_properties::BidiClass::OtherNeutral)
| bidi_class_to_mask(icu_properties::BidiClass::BoundaryNeutral)
| bidi_class_to_mask(icu_properties::BidiClass::NonspacingMark),
);
/// Turns a genecal category into a mask for comparing with multiple categories at once.
const fn general_category_to_mask(gc: GeneralCategory) -> u32 {
1 << (gc as u32)
}
/// Mask for the disallowed general categories of the first character in a label.
const MARK_MASK: u32 = general_category_to_mask(GeneralCategory::NonspacingMark)
| general_category_to_mask(GeneralCategory::SpacingMark)
| general_category_to_mask(GeneralCategory::EnclosingMark);
/// Value for the Joining_Type Unicode property.
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct JoiningType(icu_properties::JoiningType);
impl JoiningType {
/// Returns the corresponding `JoiningTypeMask`.
#[inline(always)]
pub fn to_mask(self) -> JoiningTypeMask {
JoiningTypeMask(joining_type_to_mask(self.0))
}
// `true` iff this value is the Transparent value.
#[inline(always)]
pub fn is_transparent(self) -> bool {
self.0 == icu_properties::JoiningType::Transparent
}
}
/// A mask representing potentially multiple `JoiningType`
/// values.
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct JoiningTypeMask(u32);
impl JoiningTypeMask {
/// `true` iff both masks have at `JoiningType` in common.
#[inline(always)]
pub fn intersects(self, other: JoiningTypeMask) -> bool {
self.0 & other.0 != 0
}
}
/// Value for the Bidi_Class Unicode property.
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct BidiClass(icu_properties::BidiClass);
impl BidiClass {
/// Returns the corresponding `BidiClassMask`.
#[inline(always)]
pub fn to_mask(self) -> BidiClassMask {
BidiClassMask(bidi_class_to_mask(self.0))
}
/// `true` iff this value is Left_To_Right
#[inline(always)]
pub fn is_ltr(self) -> bool {
self.0 == icu_properties::BidiClass::LeftToRight
}
/// `true` iff this value is Nonspacing_Mark
#[inline(always)]
pub fn is_nonspacing_mark(self) -> bool {
self.0 == icu_properties::BidiClass::NonspacingMark
}
/// `true` iff this value is European_Number
#[inline(always)]
pub fn is_european_number(self) -> bool {
self.0 == icu_properties::BidiClass::EuropeanNumber
}
/// `true` iff this value is Arabic_Number
#[inline(always)]
pub fn is_arabic_number(self) -> bool {
self.0 == icu_properties::BidiClass::ArabicNumber
}
}
/// A mask representing potentially multiple `BidiClass`
/// values.
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct BidiClassMask(u32);
impl BidiClassMask {
/// `true` iff both masks have at `BidiClass` in common.
#[inline(always)]
pub fn intersects(self, other: BidiClassMask) -> bool {
self.0 & other.0 != 0
}
}
/// An adapter between a Unicode back end an the `idna` crate.
pub struct Adapter {
mapper: Uts46Mapper,
canonical_combining_class: CanonicalCombiningClassMap,
general_category: CodePointMapDataBorrowed<'static, GeneralCategory>,
bidi_class: CodePointMapDataBorrowed<'static, icu_properties::BidiClass>,
joining_type: CodePointMapDataBorrowed<'static, icu_properties::JoiningType>,
}
#[cfg(feature = "compiled_data")]
impl Default for Adapter {
fn default() -> Self {
Self::new()
}
}
impl Adapter {
/// Constructor using data compiled into the binary.
#[cfg(feature = "compiled_data")]
#[inline(always)]
pub const fn new() -> Self {
Self {
mapper: Uts46Mapper::new(),
canonical_combining_class: CanonicalCombiningClassMap::new(),
general_category: icu_properties::maps::general_category(),
bidi_class: icu_properties::maps::bidi_class(),
joining_type: icu_properties::maps::joining_type(),
}
}
/// `true` iff the Canonical_Combining_Class of `c` is Virama.
#[inline(always)]
pub fn is_virama(&self, c: char) -> bool {
self.canonical_combining_class.get(c) == CanonicalCombiningClass::Virama
}
/// `true` iff the General_Category of `c` is Mark, i.e. any of Nonspacing_Mark,
/// Spacing_Mark, or Enclosing_Mark.
#[inline(always)]
pub fn is_mark(&self, c: char) -> bool {
(general_category_to_mask(self.general_category.get(c)) & MARK_MASK) != 0
}
/// Returns the Bidi_Class of `c`.
#[inline(always)]
pub fn bidi_class(&self, c: char) -> BidiClass {
BidiClass(self.bidi_class.get(c))
}
/// Returns the Joining_Type of `c`.
#[inline(always)]
pub fn joining_type(&self, c: char) -> JoiningType {
JoiningType(self.joining_type.get(c))
}
/// See the [method of the same name in `icu_normalizer`][1] for the
/// exact semantics.
///
/// [1]: https://docs.rs/icu_normalizer/latest/icu_normalizer/uts46/struct.Uts46Mapper.html#method.map_normalize
#[inline(always)]
pub fn map_normalize<'delegate, I: Iterator<Item = char> + 'delegate>(
&'delegate self,
iter: I,
) -> impl Iterator<Item = char> + 'delegate {
self.mapper.map_normalize(iter)
}
/// See the [method of the same name in `icu_normalizer`][1] for the
/// exact semantics.
///
/// [1]: https://docs.rs/icu_normalizer/latest/icu_normalizer/uts46/struct.Uts46Mapper.html#method.normalize_validate
#[inline(always)]
pub fn normalize_validate<'delegate, I: Iterator<Item = char> + 'delegate>(
&'delegate self,
iter: I,
) -> impl Iterator<Item = char> + 'delegate {
self.mapper.normalize_validate(iter)
}
}

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"c9d0e4187befd45d7046da8cb2c965fa897fbf28ecd43c3254c855e9c7eb1ecc","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","README.md":"141354cfa79fe3a7e739834bf6e001ff1293169e1809cf9eb6d5c8af0b7a191f","src/host.rs":"ba9504704f9c33517cf700d69961e251f930d68b7d4f47895745baf6d7eed1ed","src/lib.rs":"cc39b9745be6a0c4ccd8be4734481b40e266c83950c54424ebf97a73178287c9","src/origin.rs":"7578bfbd8da63f60f022040973c833bb615bb7c4c9f9a34022bf463386867034","src/parser.rs":"a703db38e1f38a421c1795cd928fa261e7f485b1576794a5bdb1afa9d054c7d6","src/path_segments.rs":"ae8c618ecb99d14430c79b34753d14712e221ff442bd9d1cea6de901b0c13cda","src/quirks.rs":"8ddb717579dbd4d171e4146c20a084f941cba30d43548705b98cb3d92274c81b","src/slicing.rs":"b59bf2fa4cbfd31619b860766268d4e99b46537d3b9702b73f6d0a979a7de24a","tests/expected_failures.txt":"7da063ac6aa01eae311b8e936fe4ec8ad1ac7dc3c6f97930fc2e190fba0e812d","tests/setters_tests.json":"a3a4cbd7b798bc2c4d9656dc50be7397a5a5ed1f0b52daa1da1ad654d38c1dcd","tests/unit.rs":"fd597d2ca9d890856f35271e3896f27713c33d4afd89737f7379ac71f3470586","tests/urltestdata.json":"58d67bea710d5f46324fe6841df5fd82090fe4ec2d882bc0fc7c1784d4771884","tests/wpt.rs":"8781251116a9de8169327ed40a0237ac6ff2f84e3d579d6fb6d7353362f9a48a"},"package":"8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada"}
{"files":{"Cargo.toml":"4108358208f628a0e61af3ebe88aedbe585983c518a456644df398012781f136","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","README.md":"6111161470aa4d5f2e3806936df0c874b8eca5f8c8cd2d71a60eb6c2cbb776ab","src/host.rs":"9de249e8af8fcd0caf673b37a66ba070dfa1b231ee06a981526a9f863c3acf13","src/lib.rs":"4b7ec6a4f2ee7a63ac332f4609c4f9f648861e7ea0967b80efdf27c52a07f154","src/origin.rs":"19a4b451e8615bfef7239d2fc719c489398fe5044edb0df7c84b54eef4ceba1b","src/parser.rs":"ca317fdf927628351991c73437aa91d36e26637574e6551200125e32f46e60cd","src/path_segments.rs":"29db87b6902da4ab1ae925b3874afdeff42b8ddfb46356af6a83b86f34e03b14","src/quirks.rs":"79818bd168b138e8edd30011033c1f6defb847fe96f8a57381cf9251c27e866b","src/slicing.rs":"3b1aaad36ba7e89f50c90d1ceddda1f8ba52a364c153541ac5c9ce54dacb6724","tests/expected_failures.txt":"fc4f619316f1fb117b01d8089c04b925b8db0652f46b8534a87e115c5544881b","tests/setters_tests.json":"a3a4cbd7b798bc2c4d9656dc50be7397a5a5ed1f0b52daa1da1ad654d38c1dcd","tests/unit.rs":"c895675581e737ad8e1536786f80385df0426495074ee6cc011830f45f16f6f7","tests/urltestdata.json":"58d67bea710d5f46324fe6841df5fd82090fe4ec2d882bc0fc7c1784d4771884","tests/wpt.rs":"8781251116a9de8169327ed40a0237ac6ff2f84e3d579d6fb6d7353362f9a48a"},"package":"f7c25da092f0a868cdf09e8674cd3b7ef3a7d92a24253e663a2fb85e2496de56"}

View File

@ -11,21 +11,16 @@
[package]
edition = "2018"
rust-version = "1.57"
rust-version = "1.67"
name = "url"
version = "2.5.3"
version = "2.5.1"
authors = ["The rust-url developers"]
build = false
include = [
"src/**/*",
"LICENSE-*",
"README.md",
"tests/**",
]
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "URL library for Rust, based on the WHATWG URL Standard"
documentation = "https://docs.rs/url"
readme = "README.md"
@ -37,7 +32,6 @@ categories = [
"parser-implementations",
"web-programming",
"encoding",
"no-std",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/servo/rust-url"
@ -49,36 +43,24 @@ rustdoc-args = ["--generate-link-to-definition"]
[package.metadata.playground]
features = ["serde"]
[lib]
name = "url"
path = "src/lib.rs"
[[test]]
name = "unit"
path = "tests/unit.rs"
[[test]]
name = "url_wpt"
path = "tests/wpt.rs"
harness = false
[[bench]]
name = "parse_url"
path = "benches/parse_url.rs"
harness = false
[dependencies.form_urlencoded]
version = "1.2.1"
features = ["alloc"]
default-features = false
[dependencies.idna]
version = "1.0.3"
features = [
"alloc",
"compiled_data",
]
default-features = false
version = "1.0.0"
[dependencies.percent-encoding]
version = "2.3.1"
features = ["alloc"]
default-features = false
[dependencies.serde]
version = "1.0"
@ -97,13 +79,8 @@ version = "1.0"
[features]
debugger_visualizer = []
default = ["std"]
default = []
expose_internals = []
std = [
"idna/std",
"percent-encoding/std",
"form_urlencoded/std",
]
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dev-dependencies.wasm-bindgen-test]
[target."cfg(all(target_arch = \"wasm32\", target_os = \"unknown\"))".dev-dependencies.wasm-bindgen-test]
version = "0.3"

View File

@ -12,7 +12,3 @@ URL library for Rust, based on the [URL Standard](https://url.spec.whatwg.org/).
[Documentation](https://docs.rs/url)
Please see [UPGRADING.md](https://github.com/servo/rust-url/blob/main/UPGRADING.md) if you are upgrading from a previous version.
## Alternative Unicode back ends
`url` depends on the `idna` crate. By default, `idna` uses [ICU4X](https://github.com/unicode-org/icu4x/) as its Unicode back end. If you wish to opt for different tradeoffs between correctness, run-time performance, binary size, compile time, and MSRV, please see the [README of the latest version of the `idna_adapter` crate](https://docs.rs/crate/idna_adapter/latest) for how to opt into a different Unicode back end.

View File

@ -6,14 +6,10 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use crate::net::{Ipv4Addr, Ipv6Addr};
use alloc::borrow::Cow;
use alloc::borrow::ToOwned;
use alloc::string::String;
use alloc::string::ToString;
use alloc::vec::Vec;
use core::cmp;
use core::fmt::{self, Formatter};
use std::borrow::Cow;
use std::cmp;
use std::fmt::{self, Formatter};
use std::net::{Ipv4Addr, Ipv6Addr};
use percent_encoding::{percent_decode, utf8_percent_encode, CONTROLS};
#[cfg(feature = "serde")]
@ -313,7 +309,7 @@ fn parse_ipv4addr(input: &str) -> ParseResult<Ipv4Addr> {
}
let mut ipv4 = numbers.pop().expect("a non-empty list of numbers");
// Equivalent to: ipv4 >= 256 ** (4 numbers.len())
if ipv4 > u32::MAX >> (8 * numbers.len() as u32) {
if ipv4 > u32::max_value() >> (8 * numbers.len() as u32) {
return Err(ParseError::InvalidIpv4Address);
}
if numbers.iter().any(|x| *x > 255) {

View File

@ -73,14 +73,6 @@ assert!(data_url.fragment() == Some(""));
# run().unwrap();
```
## Default Features
Versions `<= 2.5.2` of the crate have no default features. Versions `> 2.5.2` have the default feature 'std'.
If you are upgrading across this boundary and you have specified `default-features = false`, then
you will need to add the 'std' feature or the 'alloc' feature to your dependency.
The 'std' feature has the same behavior as the previous versions. The 'alloc' feature
provides no_std support.
## Serde
Enable the `serde` feature to include `Deserialize` and `Serialize` implementations for `url::Url`.
@ -142,8 +134,7 @@ url = { version = "2", features = ["debugger_visualizer"] }
*/
#![no_std]
#![doc(html_root_url = "https://docs.rs/url/2.5.3")]
#![doc(html_root_url = "https://docs.rs/url/2.5.1")]
#![cfg_attr(
feature = "debugger_visualizer",
debugger_visualizer(natvis_file = "../../debug_metadata/url.natvis")
@ -151,60 +142,29 @@ url = { version = "2", features = ["debugger_visualizer"] }
pub use form_urlencoded;
// For forwards compatibility
#[cfg(feature = "std")]
extern crate std;
#[macro_use]
extern crate alloc;
#[cfg(feature = "serde")]
extern crate serde;
use crate::host::HostInternal;
use crate::net::IpAddr;
#[cfg(feature = "std")]
#[cfg(any(
unix,
windows,
target_os = "redox",
target_os = "wasi",
target_os = "hermit"
))]
use crate::net::{SocketAddr, ToSocketAddrs};
use crate::parser::{to_u32, Context, Parser, SchemeType, USERINFO};
use alloc::borrow::ToOwned;
use alloc::str;
use alloc::string::{String, ToString};
use core::borrow::Borrow;
use core::convert::TryFrom;
use core::fmt::Write;
use core::ops::{Range, RangeFrom, RangeTo};
use core::{cmp, fmt, hash, mem};
use percent_encoding::utf8_percent_encode;
#[cfg(feature = "std")]
#[cfg(any(
unix,
windows,
target_os = "redox",
target_os = "wasi",
target_os = "hermit"
))]
use crate::parser::{
to_u32, Context, Parser, SchemeType, PATH_SEGMENT, SPECIAL_PATH_SEGMENT, USERINFO,
};
use percent_encoding::{percent_decode, percent_encode, utf8_percent_encode};
use std::borrow::Borrow;
use std::cmp;
use std::fmt::{self, Write};
use std::hash;
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
use std::io;
#[cfg(feature = "std")]
use std::mem;
use std::net::IpAddr;
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
use std::net::{SocketAddr, ToSocketAddrs};
use std::ops::{Range, RangeFrom, RangeTo};
use std::path::{Path, PathBuf};
use std::str;
/// `std` version of `net`
#[cfg(feature = "std")]
pub(crate) mod net {
pub use std::net::*;
}
/// `no_std` nightly version of `net`
#[cfg(not(feature = "std"))]
pub(crate) mod net {
pub use core::net::*;
}
use std::convert::TryFrom;
pub use crate::host::Host;
pub use crate::origin::{OpaqueOrigin, Origin};
@ -416,10 +376,10 @@ impl Url {
/// # Notes
///
/// - A trailing slash is significant.
/// Without it, the last path component is considered to be a “file” name
/// to be removed to get at the “directory” that is used as the base.
/// Without it, the last path component is considered to be a “file” name
/// to be removed to get at the “directory” that is used as the base.
/// - A [scheme relative special URL](https://url.spec.whatwg.org/#scheme-relative-special-url-string)
/// as input replaces everything in the base URL after the scheme.
/// as input replaces everything in the base URL after the scheme.
/// - An absolute URL (with a scheme) as input replaces the whole base URL (even the scheme).
///
/// # Examples
@ -1316,18 +1276,11 @@ impl Url {
/// })
/// }
/// ```
#[cfg(feature = "std")]
#[cfg(any(
unix,
windows,
target_os = "redox",
target_os = "wasi",
target_os = "hermit"
))]
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
pub fn socket_addrs(
&self,
default_port_number: impl Fn() -> Option<u16>,
) -> io::Result<alloc::vec::Vec<SocketAddr>> {
) -> io::Result<Vec<SocketAddr>> {
// Note: trying to avoid the Vec allocation by returning `impl AsRef<[SocketAddr]>`
// causes borrowck issues because the return value borrows `default_port_number`:
//
@ -1397,11 +1350,7 @@ impl Url {
///
/// ```
/// use url::Url;
///
/// # #[cfg(feature = "std")]
/// # use std::error::Error;
/// # #[cfg(not(feature = "std"))]
/// # use core::error::Error;
///
/// # fn run() -> Result<(), Box<dyn Error>> {
/// let url = Url::parse("https://example.com/foo/bar")?;
@ -1757,39 +1706,6 @@ impl Url {
let old_after_path_pos = to_u32(self.serialization.len()).unwrap();
let cannot_be_a_base = self.cannot_be_a_base();
let scheme_type = SchemeType::from(self.scheme());
let mut path_empty = false;
// Check ':' and then see if the next character is '/'
let mut has_host = if let Some(index) = self.serialization.find(":") {
if self.serialization.len() > index + 1
&& self.serialization.as_bytes().get(index + 1) == Some(&b'/')
{
let rest = &self.serialization[(index + ":/".len())..];
let host_part = rest.split('/').next().unwrap_or("");
path_empty = rest.is_empty();
!host_part.is_empty() && !host_part.contains('@')
} else {
false
}
} else {
false
};
// Ensure the path length is greater than 1 to account
// for cases where "/." is already appended from serialization
// If we set path, then we already checked the other two conditions:
// https://url.spec.whatwg.org/#url-serializing
// 1. The host is null
// 2. the first segment of the URL's path is an empty string
if path.len() > 1 {
if let Some(index) = self.serialization.find(":") {
let removal_start = index + ":".len();
if self.serialization[removal_start..].starts_with("/.") {
self.path_start -= "/.".len() as u32;
}
}
}
self.serialization.truncate(self.path_start as usize);
self.mutate(|parser| {
if cannot_be_a_base {
@ -1799,6 +1715,7 @@ impl Url {
}
parser.parse_cannot_be_a_base_path(parser::Input::new_no_trim(path));
} else {
let mut has_host = true; // FIXME
parser.parse_path_start(
scheme_type,
&mut has_host,
@ -1806,26 +1723,6 @@ impl Url {
);
}
});
// For cases where normalization is applied across both the serialization and the path.
// Append "/." immediately after the scheme (up to ":")
// This is done if three conditions are met.
// https://url.spec.whatwg.org/#url-serializing
// 1. The host is null
// 2. The url's path length is greater than 1
// 3. the first segment of the URL's path is an empty string
if !has_host && path.len() > 1 && path_empty {
if let Some(index) = self.serialization.find(":") {
if self.serialization.len() > index + 2
&& self.serialization.as_bytes().get(index + 1) == Some(&b'/')
&& self.serialization.as_bytes().get(index + 2) == Some(&b'/')
{
self.serialization.insert_str(index + ":".len(), "/.");
self.path_start += "/.".len() as u32;
}
}
}
self.restore_after_path(old_after_path_pos, &after_path);
}
@ -1867,11 +1764,7 @@ impl Url {
///
/// ```
/// use url::Url;
///
/// # #[cfg(feature = "std")]
/// # use std::error::Error;
/// # #[cfg(not(feature = "std"))]
/// # use core::error::Error;
///
/// # fn run() -> Result<(), Box<dyn Error>> {
/// let mut url = Url::parse("ssh://example.net:2048/")?;
@ -1890,11 +1783,7 @@ impl Url {
///
/// ```rust
/// use url::Url;
///
/// # #[cfg(feature = "std")]
/// # use std::error::Error;
/// # #[cfg(not(feature = "std"))]
/// # use core::error::Error;
///
/// # fn run() -> Result<(), Box<dyn Error>> {
/// let mut url = Url::parse("https://example.org/")?;
@ -2577,20 +2466,9 @@ impl Url {
/// # run().unwrap();
/// # }
/// ```
///
/// This method is only available if the `std` Cargo feature is enabled.
#[cfg(all(
feature = "std",
any(
unix,
windows,
target_os = "redox",
target_os = "wasi",
target_os = "hermit"
)
))]
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
#[allow(clippy::result_unit_err)]
pub fn from_file_path<P: AsRef<std::path::Path>>(path: P) -> Result<Url, ()> {
pub fn from_file_path<P: AsRef<Path>>(path: P) -> Result<Url, ()> {
let mut serialization = "file://".to_owned();
let host_start = serialization.len() as u32;
let (host_end, host) = path_to_file_url_segments(path.as_ref(), &mut serialization)?;
@ -2625,20 +2503,9 @@ impl Url {
///
/// Note that `std::path` does not consider trailing slashes significant
/// and usually does not include them (e.g. in `Path::parent()`).
///
/// This method is only available if the `std` Cargo feature is enabled.
#[cfg(all(
feature = "std",
any(
unix,
windows,
target_os = "redox",
target_os = "wasi",
target_os = "hermit"
)
))]
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
#[allow(clippy::result_unit_err)]
pub fn from_directory_path<P: AsRef<std::path::Path>>(path: P) -> Result<Url, ()> {
pub fn from_directory_path<P: AsRef<Path>>(path: P) -> Result<Url, ()> {
let mut url = Url::from_file_path(path)?;
if !url.serialization.ends_with('/') {
url.serialization.push('/')
@ -2700,7 +2567,7 @@ impl Url {
where
D: serde::Deserializer<'de>,
{
use serde::de::{Deserialize, Error};
use serde::de::{Deserialize, Error, Unexpected};
let (
serialization,
scheme_end,
@ -2726,8 +2593,10 @@ impl Url {
fragment_start,
};
if cfg!(debug_assertions) {
url.check_invariants()
.map_err(|reason| Error::custom(reason))?
url.check_invariants().map_err(|reason| {
let reason: &str = &reason;
Error::invalid_value(Unexpected::Other("value"), &reason)
})?
}
Ok(url)
}
@ -2750,19 +2619,8 @@ impl Url {
/// or if `Path::new_opt()` returns `None`.
/// (That is, if the percent-decoded path contains a NUL byte or,
/// for a Windows path, is not UTF-8.)
///
/// This method is only available if the `std` Cargo feature is enabled.
#[inline]
#[cfg(all(
feature = "std",
any(
unix,
windows,
target_os = "redox",
target_os = "wasi",
target_os = "hermit"
)
))]
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
#[allow(clippy::result_unit_err)]
pub fn to_file_path(&self) -> Result<PathBuf, ()> {
if let Some(segments) = self.path_segments() {
@ -2940,7 +2798,7 @@ impl<'de> serde::Deserialize<'de> for Url {
where
D: serde::Deserializer<'de>,
{
use serde::de::{Error, Visitor};
use serde::de::{Error, Unexpected, Visitor};
struct UrlVisitor;
@ -2955,7 +2813,10 @@ impl<'de> serde::Deserialize<'de> for Url {
where
E: Error,
{
Url::parse(s).map_err(|err| Error::custom(format!("{}: {:?}", err, s)))
Url::parse(s).map_err(|err| {
let err_s = format!("{}", err);
Error::invalid_value(Unexpected::Str(s), &err_s.as_str())
})
}
}
@ -2963,20 +2824,15 @@ impl<'de> serde::Deserialize<'de> for Url {
}
}
#[cfg(all(
feature = "std",
any(unix, target_os = "redox", target_os = "wasi", target_os = "hermit")
))]
#[cfg(any(unix, target_os = "redox", target_os = "wasi"))]
fn path_to_file_url_segments(
path: &Path,
serialization: &mut String,
) -> Result<(u32, HostInternal), ()> {
use parser::SPECIAL_PATH_SEGMENT;
use percent_encoding::percent_encode;
#[cfg(target_os = "hermit")]
use std::os::hermit::ffi::OsStrExt;
#[cfg(any(unix, target_os = "redox"))]
use std::os::unix::prelude::OsStrExt;
#[cfg(target_os = "wasi")]
use std::os::wasi::prelude::OsStrExt;
if !path.is_absolute() {
return Err(());
}
@ -2986,16 +2842,10 @@ fn path_to_file_url_segments(
for component in path.components().skip(1) {
empty = false;
serialization.push('/');
#[cfg(not(target_os = "wasi"))]
serialization.extend(percent_encode(
component.as_os_str().as_bytes(),
SPECIAL_PATH_SEGMENT,
));
#[cfg(target_os = "wasi")]
serialization.extend(percent_encode(
component.as_os_str().to_string_lossy().as_bytes(),
SPECIAL_PATH_SEGMENT,
));
}
if empty {
// An URLs path must not be empty.
@ -3004,7 +2854,7 @@ fn path_to_file_url_segments(
Ok((host_end, HostInternal::None))
}
#[cfg(all(feature = "std", windows))]
#[cfg(windows)]
fn path_to_file_url_segments(
path: &Path,
serialization: &mut String,
@ -3013,14 +2863,11 @@ fn path_to_file_url_segments(
}
// Build this unconditionally to alleviate https://github.com/servo/rust-url/issues/102
#[cfg(feature = "std")]
#[cfg_attr(not(windows), allow(dead_code))]
fn path_to_file_url_segments_windows(
path: &Path,
serialization: &mut String,
) -> Result<(u32, HostInternal), ()> {
use crate::parser::PATH_SEGMENT;
use percent_encoding::percent_encode;
use std::path::{Component, Prefix};
if !path.is_absolute() {
return Err(());
@ -3079,23 +2926,16 @@ fn path_to_file_url_segments_windows(
Ok((host_end, host_internal))
}
#[cfg(all(
feature = "std",
any(unix, target_os = "redox", target_os = "wasi", target_os = "hermit")
))]
#[cfg(any(unix, target_os = "redox", target_os = "wasi"))]
fn file_url_segments_to_pathbuf(
host: Option<&str>,
segments: str::Split<'_, char>,
) -> Result<PathBuf, ()> {
use alloc::vec::Vec;
use percent_encoding::percent_decode;
#[cfg(not(target_os = "wasi"))]
use std::ffi::OsStr;
#[cfg(target_os = "hermit")]
use std::os::hermit::ffi::OsStrExt;
#[cfg(any(unix, target_os = "redox"))]
use std::os::unix::prelude::OsStrExt;
use std::path::PathBuf;
#[cfg(target_os = "wasi")]
use std::os::wasi::prelude::OsStrExt;
if host.is_some() {
return Err(());
@ -3120,12 +2960,8 @@ fn file_url_segments_to_pathbuf(
bytes.push(b'/');
}
#[cfg(not(target_os = "wasi"))]
let path = PathBuf::from(OsStr::from_bytes(&bytes));
#[cfg(target_os = "wasi")]
let path = String::from_utf8(bytes)
.map(|path| PathBuf::from(path))
.map_err(|_| ())?;
let os_str = OsStr::from_bytes(&bytes);
let path = PathBuf::from(os_str);
debug_assert!(
path.is_absolute(),
@ -3135,7 +2971,7 @@ fn file_url_segments_to_pathbuf(
Ok(path)
}
#[cfg(all(feature = "std", windows))]
#[cfg(windows)]
fn file_url_segments_to_pathbuf(
host: Option<&str>,
segments: str::Split<char>,
@ -3144,13 +2980,11 @@ fn file_url_segments_to_pathbuf(
}
// Build this unconditionally to alleviate https://github.com/servo/rust-url/issues/102
#[cfg(feature = "std")]
#[cfg_attr(not(windows), allow(dead_code))]
fn file_url_segments_to_pathbuf_windows(
host: Option<&str>,
mut segments: str::Split<'_, char>,
) -> Result<PathBuf, ()> {
use percent_encoding::percent_decode;
let mut string = if let Some(host) = host {
r"\\".to_owned() + host
} else {

View File

@ -9,10 +9,7 @@
use crate::host::Host;
use crate::parser::default_port;
use crate::Url;
use alloc::borrow::ToOwned;
use alloc::format;
use alloc::string::String;
use core::sync::atomic::{AtomicUsize, Ordering};
use std::sync::atomic::{AtomicUsize, Ordering};
pub fn url_origin(url: &Url) -> Origin {
let scheme = url.scheme();

View File

@ -6,10 +6,9 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use alloc::string::String;
use alloc::string::ToString;
use core::fmt::{self, Formatter, Write};
use core::str;
use std::error::Error;
use std::fmt::{self, Formatter, Write};
use std::str;
use crate::host::{Host, HostInternal};
use crate::Url;
@ -73,11 +72,7 @@ macro_rules! simple_enum_error {
}
}
#[cfg(feature = "std")]
impl std::error::Error for ParseError {}
#[cfg(not(feature = "std"))]
impl core::error::Error for ParseError {}
impl Error for ParseError {}
simple_enum_error! {
EmptyHost => "empty host",
@ -1114,7 +1109,7 @@ impl<'a> Parser<'a> {
while let (Some(c), remaining) = input.split_first() {
if let Some(digit) = c.to_digit(10) {
port = port * 10 + digit;
if port > u16::MAX as u32 {
if port > ::std::u16::MAX as u32 {
return Err(ParseError::InvalidPort);
}
has_any_digit = true;
@ -1125,11 +1120,6 @@ impl<'a> Parser<'a> {
}
input = remaining;
}
if !has_any_digit && context == Context::Setter && !input.is_empty() {
return Err(ParseError::InvalidPort);
}
let mut opt_port = Some(port as u16);
if !has_any_digit || opt_port == default_port() {
opt_port = None;
@ -1600,7 +1590,7 @@ pub fn ascii_alpha(ch: char) -> bool {
#[inline]
pub fn to_u32(i: usize) -> ParseResult<u32> {
if i <= u32::MAX as usize {
if i <= ::std::u32::MAX as usize {
Ok(i as u32)
} else {
Err(ParseError::Overflow)

View File

@ -8,8 +8,7 @@
use crate::parser::{self, to_u32, SchemeType};
use crate::Url;
use alloc::string::String;
use core::str;
use std::str;
/// Exposes methods to manipulate the path of an URL that is not cannot-be-base.
///
@ -20,11 +19,7 @@ use core::str;
///
/// ```rust
/// use url::Url;
///
/// # #[cfg(feature = "std")]
/// # use std::error::Error;
/// # #[cfg(not(feature = "std"))]
/// # use core::error::Error;
///
/// # fn run() -> Result<(), Box<dyn Error>> {
/// let mut url = Url::parse("mailto:me@example.com")?;
@ -83,11 +78,7 @@ impl<'a> PathSegmentsMut<'a> {
///
/// ```rust
/// use url::Url;
///
/// # #[cfg(feature = "std")]
/// # use std::error::Error;
/// # #[cfg(not(feature = "std"))]
/// # use core::error::Error;
///
/// # fn run() -> Result<(), Box<dyn Error>> {
/// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
@ -115,11 +106,7 @@ impl<'a> PathSegmentsMut<'a> {
///
/// ```rust
/// use url::Url;
///
/// # #[cfg(feature = "std")]
/// # use std::error::Error;
/// # #[cfg(not(feature = "std"))]
/// # use core::error::Error;
///
/// # fn run() -> Result<(), Box<dyn Error>> {
/// let mut url = Url::parse("https://github.com/servo/rust-url/")?;
@ -194,11 +181,7 @@ impl<'a> PathSegmentsMut<'a> {
///
/// ```rust
/// use url::Url;
///
/// # #[cfg(feature = "std")]
/// # use std::error::Error;
/// # #[cfg(not(feature = "std"))]
/// # use core::error::Error;
///
/// # fn run() -> Result<(), Box<dyn Error>> {
/// let mut url = Url::parse("https://github.com/")?;
@ -218,11 +201,7 @@ impl<'a> PathSegmentsMut<'a> {
///
/// ```rust
/// use url::Url;
///
/// # #[cfg(feature = "std")]
/// # use std::error::Error;
/// # #[cfg(not(feature = "std"))]
/// # use core::error::Error;
///
/// # fn run() -> Result<(), Box<dyn Error>> {
/// let mut url = Url::parse("https://github.com/servo")?;

View File

@ -13,8 +13,6 @@
use crate::parser::{default_port, Context, Input, Parser, SchemeType};
use crate::{Host, ParseError, Position, Url};
use alloc::string::String;
use alloc::string::ToString;
/// Internal components / offsets of a URL.
///

View File

@ -6,9 +6,8 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::ops::{Index, Range, RangeFrom, RangeFull, RangeTo};
use crate::Url;
use std::ops::{Index, Range, RangeFrom, RangeFull, RangeTo};
impl Index<RangeFull> for Url {
type Output = str;

View File

@ -39,7 +39,12 @@
<non-spec:/.//p> set hostname to <h>
<non-spec:/.//p> set hostname to <>
<foo:///some/path> set pathname to <>
<http://example.net:8080/path> set port to <randomstring>
<file:///var/log/system.log> set href to <http://0300.168.0xF0>
<file://monkey/> set pathname to <\\\\>
<file:///unicorn> set pathname to <//\\/>
<file:///unicorn> set pathname to <//monkey/..//>
<non-spec:/> set pathname to </.//p>
<non-spec:/> set pathname to </..//p>
<non-spec:/> set pathname to <//p>
<non-spec:/.//> set pathname to <p>

View File

@ -7,39 +7,13 @@
// except according to those terms.
//! Unit tests
#![no_std]
#[cfg(feature = "std")]
extern crate std;
#[macro_use]
extern crate alloc;
use alloc::borrow::Cow;
use alloc::borrow::ToOwned;
use alloc::string::{String, ToString};
use alloc::vec::Vec;
use core::cell::{Cell, RefCell};
#[cfg(feature = "std")]
use std::dbg;
use url::{form_urlencoded, Host, Origin, Url};
/// `std` version of `net`
#[cfg(feature = "std")]
pub(crate) mod net {
pub use std::net::*;
}
/// `no_std` nightly version of `net`
#[cfg(not(feature = "std"))]
pub(crate) mod net {
pub use core::net::*;
}
use crate::net::{Ipv4Addr, Ipv6Addr};
#[cfg(feature = "std")]
use std::borrow::Cow;
use std::cell::{Cell, RefCell};
use std::net::{Ipv4Addr, Ipv6Addr};
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
use std::path::{Path, PathBuf};
use url::{form_urlencoded, Host, Origin, Url};
// https://rustwasm.github.io/wasm-bindgen/wasm-bindgen-test/usage.html
#[cfg(all(target_arch = "wasm32", target_os = "unknown"))]
@ -49,7 +23,7 @@ wasm_bindgen_test_configure!(run_in_browser);
#[test]
fn size() {
use core::mem::size_of;
use std::mem::size_of;
assert_eq!(size_of::<Url>(), size_of::<Option<Url>>());
}
@ -150,7 +124,6 @@ fn test_set_empty_query() {
assert_eq!(base.as_str(), "moz://example.com/path");
}
#[cfg(feature = "std")]
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
macro_rules! assert_from_file_path {
($path: expr) => {
@ -165,7 +138,6 @@ macro_rules! assert_from_file_path {
}
#[test]
#[cfg(feature = "std")]
#[cfg(any(unix, windows))]
fn new_file_paths() {
if cfg!(unix) {
@ -188,7 +160,7 @@ fn new_file_paths() {
}
#[test]
#[cfg(all(feature = "std", unix))]
#[cfg(unix)]
fn new_path_bad_utf8() {
use std::ffi::OsStr;
use std::os::unix::prelude::*;
@ -199,7 +171,7 @@ fn new_path_bad_utf8() {
}
#[test]
#[cfg(all(feature = "std", windows))]
#[cfg(windows)]
fn new_path_windows_fun() {
assert_from_file_path!(r"C:\foo\bar", "/C:/foo/bar");
assert_from_file_path!("C:\\foo\\ba\0r", "/C:/foo/ba%00r");
@ -220,10 +192,6 @@ fn new_path_windows_fun() {
}
#[test]
#[cfg(all(
feature = "std",
any(unix, windows, target_os = "redox", target_os = "wasi")
))]
#[cfg(any(unix, windows))]
fn new_directory_paths() {
if cfg!(unix) {
@ -290,7 +258,6 @@ fn issue_124() {
}
#[test]
#[cfg(feature = "std")]
fn test_equality() {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
@ -481,7 +448,6 @@ fn issue_61() {
}
#[test]
#[cfg(feature = "std")]
#[cfg(any(unix, target_os = "redox", target_os = "wasi"))]
#[cfg(not(windows))]
/// https://github.com/servo/rust-url/issues/197
@ -573,7 +539,6 @@ fn test_leading_dots() {
}
#[test]
#[cfg(feature = "std")]
/// https://github.com/servo/rust-url/issues/302
fn test_origin_hash() {
use std::collections::hash_map::DefaultHasher;
@ -668,7 +633,6 @@ fn test_origin_unicode_serialization() {
}
#[test]
#[cfg(feature = "std")]
#[cfg(any(unix, windows, target_os = "redox", target_os = "wasi"))]
fn test_socket_addrs() {
use std::net::ToSocketAddrs;
@ -851,7 +815,6 @@ fn test_expose_internals() {
}
#[test]
#[cfg(feature = "std")]
#[cfg(windows)]
fn test_windows_unc_path() {
let url = Url::from_file_path(Path::new(r"\\host\share\path\file.txt")).unwrap();
@ -960,8 +923,8 @@ fn test_options_reuse() {
}
/// https://github.com/servo/rust-url/issues/505
#[cfg(windows)]
#[test]
#[cfg(all(feature = "std", windows))]
fn test_url_from_file_path() {
use std::path::PathBuf;
use url::Url;
@ -973,7 +936,6 @@ fn test_url_from_file_path() {
}
/// https://github.com/servo/rust-url/issues/505
#[cfg(feature = "std")]
#[cfg(any(unix, target_os = "redox", target_os = "wasi"))]
#[cfg(not(windows))]
#[test]
@ -1347,7 +1309,6 @@ fn test_file_with_drive_and_path() {
assert_eq!(url2.to_string(), "file:///p:/a");
}
#[cfg(feature = "std")]
#[test]
fn issue_864() {
let mut url = url::Url::parse("file://").unwrap();
@ -1355,27 +1316,3 @@ fn issue_864() {
url.set_path("x");
dbg!(&url);
}
#[test]
fn issue_974() {
let mut url = url::Url::parse("http://example.com:8000").unwrap();
let _ = url::quirks::set_port(&mut url, "\u{0000}9000");
assert_eq!(url.port(), Some(8000));
}
#[cfg(feature = "serde")]
#[test]
fn serde_error_message() {
use serde::Deserialize;
#[derive(Debug, Deserialize)]
#[allow(dead_code)]
struct TypeWithUrl {
url: Url,
}
let err = serde_json::from_str::<TypeWithUrl>(r#"{"url": "§invalid#+#*Ä"}"#).unwrap_err();
assert_eq!(
err.to_string(),
r#"relative URL without a base: "§invalid#+#*Ä" at line 1 column 25"#
);
}