Merge inbound to mozilla-central. a=merge

This commit is contained in:
Noemi Erli 2019-04-09 12:55:19 +03:00
commit dd7e27f4a8
403 changed files with 62590 additions and 142316 deletions

2
.gitignore vendored
View File

@ -155,5 +155,3 @@ lextab.py
!.vscode/extensions.json
!.vscode/tasks.json
# Ignore file generated by lalrpop at build time.
third_party/rust/lalrpop/src/parser/lrgrammar.rs

View File

@ -186,8 +186,5 @@ tps_result\.json
^testing/raptor/raptor/tests/.*.json
^testing/raptor/webext/raptor/auto_gen_test_config.js
# Ignore file generated by lalrpop at build time.
^third_party/rust/lalrpop/src/parser/lrgrammar.rs
# Ignore the build directories of WebRender standalone builds.
gfx/wr/target/

136
Cargo.lock generated
View File

@ -242,7 +242,7 @@ dependencies = [
"peeking_take_while 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.27 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"which 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -304,7 +304,7 @@ dependencies = [
"guid_win 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -324,15 +324,6 @@ dependencies = [
"constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "block-buffer"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"arrayref 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
"byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "block-buffer"
version = "0.7.0"
@ -379,11 +370,6 @@ name = "build_const"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byte-tools"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byte-tools"
version = "0.3.0"
@ -442,7 +428,7 @@ dependencies = [
"nserror 0.1.0",
"nsstring 0.1.0",
"rkv 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)",
"sha2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"style 0.0.1",
"thin-vec 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
@ -875,14 +861,6 @@ name = "diff"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "digest"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "digest"
version = "0.8.0"
@ -903,14 +881,14 @@ dependencies = [
[[package]]
name = "docopt"
version = "0.8.3"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.88 (git+https://github.com/servo/serde?branch=deserialize_from_enums10)",
"strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -961,7 +939,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "ena"
version = "0.9.3"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1167,7 +1145,7 @@ dependencies = [
"mozprofile 0.5.0",
"mozrunner 0.9.0",
"mozversion 0.2.0",
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.88 (git+https://github.com/servo/serde?branch=deserialize_from_enums10)",
"serde_json 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1197,14 +1175,6 @@ dependencies = [
"to_shmem 0.0.1",
]
[[package]]
name = "generic-array"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "generic-array"
version = "0.12.0"
@ -1508,45 +1478,23 @@ dependencies = [
[[package]]
name = "lalrpop"
version = "0.16.0"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-snap 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.88 (git+https://github.com/servo/serde?branch=deserialize_from_enums10)",
"sha2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "lalrpop-snap"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ascii-canvas 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1554,7 +1502,7 @@ dependencies = [
[[package]]
name = "lalrpop-util"
version = "0.16.0"
version = "0.16.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -1878,7 +1826,7 @@ dependencies = [
name = "mozversion"
version = "0.2.0"
dependencies = [
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rust-ini 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2335,19 +2283,19 @@ dependencies = [
"aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex"
version = "1.0.0"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"aho-corasick 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2568,17 +2516,6 @@ dependencies = [
"stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "sha2"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "sha2"
version = "0.8.0"
@ -2690,11 +2627,6 @@ name = "string_cache_shared"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "strsim"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "strsim"
version = "0.7.0"
@ -2735,7 +2667,7 @@ dependencies = [
"parking_lot 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"selectors 0.21.0",
"servo_arc 0.1.1",
"smallbitvec 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2923,11 +2855,10 @@ dependencies = [
[[package]]
name = "thread_local"
version = "0.3.5"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -3286,7 +3217,7 @@ dependencies = [
"hyper 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.88 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.88 (git+https://github.com/servo/serde?branch=deserialize_from_enums10)",
"serde_json 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3301,8 +3232,8 @@ name = "webidl"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"lalrpop 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-util 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -3538,12 +3469,10 @@ dependencies = [
"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
"checksum bitreader 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "80b13e2ab064ff3aa0bdbf1eff533f9822dc37899821f5f98c67f263eab51707"
"checksum blake2-rfc 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6d530bdd2d52966a6d03b7a964add7ae1a288d25214066fd4b600f0f796400"
"checksum block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"
"checksum block-buffer 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "49665c62e0e700857531fa5d3763e91b539ff1abeebd56808d378b495870d60d"
"checksum block-padding 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4fc4358306e344bf9775d0197fd00d2603e5afb0771bb353538630f022068ea3"
"checksum boxfnonce 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8380105befe91099e6f69206164072c05bc92427ff6aa8a5171388317346dd75"
"checksum build_const 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e90dc84f5e62d2ebe7676b83c22d33b6db8bd27340fb6ffbff0a364efa0cb9c9"
"checksum byte-tools 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40"
"checksum byte-tools 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "980479e6fde23246dfb54d47580d66b4e99202e7579c5eaa9fe10ecb5ebd2182"
"checksum byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "94f88df23a25417badc922ab0f5716cc1330e87f71ddd9203b3a3ccd9cedf75d"
"checksum bytes 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)" = "e178b8e0e239e844b083d5a0d4a156b2654e67f9f80144d48398fcd736a24fb8"
@ -3594,16 +3523,15 @@ dependencies = [
"checksum derive_more 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3f57d78cf3bd45270dad4e70c21ec77a960b36c7a841ff9db76aaa775a8fb871"
"checksum devd-rs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e7c9ac481c38baf400d3b732e4a06850dfaa491d1b6379a249d9d40d14c2434c"
"checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
"checksum digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"
"checksum digest 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "05f47366984d3ad862010e22c7ce81a7dbcaebbdfb37241a620f8b6596ee135c"
"checksum dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "88972de891f6118092b643d85a0b28e0678e0f948d7f879aa32f2d5aafe97d2a"
"checksum docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d8acd393692c503b168471874953a2531df0e9ab77d0b6bbc582395743300a4a"
"checksum docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db2906c2579b5b7207fc1e328796a9a8835dc44e22dbe8e460b1d636f9a7b225"
"checksum dogear 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6d54506b6b209740d0a7a35ca5976db1ad2ed1aa168acc3561efc6a84fa95afe"
"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
"checksum dtoa-short 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "068d4026697c1a18f0b0bb8cfcad1b0c151b90d8edb9bf4c235ad68128920d1d"
"checksum dwrote 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c31c624339dab99c223a4b26c2e803b7c248adaca91549ce654c76f39a03f5c8"
"checksum either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a"
"checksum ena 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "88dc8393b3c7352f94092497f6b52019643e493b6b890eb417cdb7c46117e621"
"checksum ena 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "25b4e5febb25f08c49f1b07dc33a182729a6b21edfb562b5aef95f78e0dbe5bb"
"checksum encoding_c 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "769ecb8b33323998e482b218c0d13cd64c267609023b4b7ec3ee740714c318ee"
"checksum encoding_rs 0.8.16 (registry+https://github.com/rust-lang/crates.io-index)" = "0535f350c60aac0b87ccf28319abc749391e912192255b0c00a2c12c6917bd73"
"checksum env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0561146661ae44c579e993456bc76d11ce1e0c7d745e57b2fa7146b6e49fa2ad"
@ -3627,7 +3555,6 @@ dependencies = [
"checksum fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
"checksum gcc 0.3.54 (registry+https://github.com/rust-lang/crates.io-index)" = "5e33ec290da0d127825013597dbdfc28bee4964690c7ce1166cbc2a7bd08b1bb"
"checksum generic-array 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c0f28c2f5bfb5960175af447a2da7c18900693738343dc896ffbcabd9839592"
"checksum generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"
"checksum gl_generator 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "39a23d5e872a275135d66895d954269cf5e8661d234eb1c2480f4ce0d586acbd"
"checksum gleam 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)" = "39bb69499005e11b7b7cc0af38404a1bc0f53d954bffa8adcdb6e8d5b14f75d5"
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
@ -3647,9 +3574,8 @@ dependencies = [
"checksum itoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c069bbec61e1ca5a596166e55dfe4773ff745c3d16b700013bcaff9a6df2c682"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum khronos_api 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2db585e1d738fc771bf08a151420d3ed193d9d895a36df7f6f8a9456b911ddc"
"checksum lalrpop 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f7014afd5642680074fd5dcc624d544f9eabfa281cba2c3ac56c3db6d21ad1b"
"checksum lalrpop-snap 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0b85aa455529344133d7ecaaac04c01ed87f459deeaa0fe5422885e2095d8cdc"
"checksum lalrpop-util 0.16.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2400aeebcd11259370d038c24821b93218dd2f33a53f53e9c8fcccca70be6696"
"checksum lalrpop 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02888049e197dff0c5c9fd503bd2458ea373c5e845c2f5460db1f9e43050d55e"
"checksum lalrpop-util 0.16.2 (registry+https://github.com/rust-lang/crates.io-index)" = "488da0d45c65af229321623c62660627d02b0e7fbc768a4c3fcd121815404ef1"
"checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1"
"checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b"
"checksum lazycell 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a6f08839bc70ef4a3fe1d566d5350f519c5912ea86be0df1740a7d247c7fc0ef"
@ -3724,7 +3650,7 @@ dependencies = [
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
"checksum redox_users 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "214a97e49be64fd2c86f568dd0cb2c757d2cc53de95b273b6ad0a1c908482f26"
"checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b"
"checksum regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75ecf88252dce580404a22444fc7d626c01815debba56a7f4f536772a5ff19d3"
"checksum regex 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3d8c9f33201f46669484bacc312b00e7541bed6aaf296dffe2bb4e0ac6b8ce2a"
"checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db"
"checksum regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8f1ac0f60d675cc6cf13a20ec076568254472551051ad5dd050364d70671bf6b"
"checksum rkv 0.9.4 (registry+https://github.com/rust-lang/crates.io-index)" = "238764bd8750927754d91e4a27155ac672ba88934a2bf698c992d55e5ae25e5b"
@ -3748,7 +3674,6 @@ dependencies = [
"checksum serde_bytes 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)" = "adb6e51a6b3696b301bc221d785f898b4457c619b51d7ce195a6d20baecb37b3"
"checksum serde_derive 1.0.88 (git+https://github.com/servo/serde?branch=deserialize_from_enums10)" = "<none>"
"checksum serde_json 1.0.26 (registry+https://github.com/rust-lang/crates.io-index)" = "44dd2cfde475037451fa99b7e5df77aa3cfd1536575fa8e7a538ab36dcde49ae"
"checksum sha2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9eb6be24e4c23a84d7184280d2722f7f2731fcdd4a9d886efbfe4413e4847ea0"
"checksum sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b4d8bfd0e469f417657573d8451fb33d16cfe0989359b93baf3a1ffc639543d"
"checksum shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
"checksum siphasher 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ffc669b726f2bc9a3bcff66e5e23b56ba6bf70e22a34c3d7b6d0b3450b65b84"
@ -3761,7 +3686,6 @@ dependencies = [
"checksum string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "25d70109977172b127fe834e5449e5ab1740b9ba49fa18a2020f509174f25423"
"checksum string_cache_codegen 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1eea1eee654ef80933142157fdad9dd8bc43cf7c74e999e369263496f04ff4da"
"checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "91b52877572087400e83d24b9178488541e3d535259e04ff17a63df1e5ceff59"
"checksum syn 0.14.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4e4b5274d4a0a3d2749d5c158dc64d3403e60554dc61194648787ada5212473d"
@ -3776,7 +3700,7 @@ dependencies = [
"checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
"checksum thin-slice 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c"
"checksum thin-vec 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "73fdf4b84c65a85168477b7fb6c498e0716bc9487fba24623389ea7f51708044"
"checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963"
"checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b"
"checksum thread_profiler 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf947d192a9be60ef5131cc7a4648886ba89d712f16700ebbf80c8a69d05d48f"
"checksum threadbound 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d23e87ee7a1ef5bd2d38cef24ff360f6e02beee13c6a7eb64dddde4a3da427a3"
"checksum time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "d825be0eb33fda1a7e68012d51e9c7f451dc1a69391e7fdc197060bb8c56667b"

View File

@ -6,8 +6,6 @@
const {Cc, Ci, Cu} = require("chrome");
const ReplayInspector = require("devtools/server/actors/replay/inspector");
loader.lazyRequireGetter(this, "isShadowRoot", "devtools/shared/layout/utils", true);
loader.lazyRequireGetter(this, "nodeFilterConstants", "devtools/shared/dom-node-filter-constants");
loader.lazyRequireGetter(this, "standardTreeWalkerFilter", "devtools/server/actors/inspector/utils", true);
@ -50,12 +48,8 @@ function DocumentWalker(node, rootWin,
throw new Error("Got an invalid root window in DocumentWalker");
}
if (isReplaying) {
this.walker = ReplayInspector.newDeepTreeWalker();
} else {
this.walker = Cc["@mozilla.org/inspector/deep-tree-walker;1"]
.createInstance(Ci.inIDeepTreeWalker);
}
this.walker = Cc["@mozilla.org/inspector/deep-tree-walker;1"]
.createInstance(Ci.inIDeepTreeWalker);
this.walker.showAnonymousContent = showAnonymousContent;
this.walker.showSubDocuments = true;
this.walker.showDocumentsAsNodes = true;

View File

@ -253,18 +253,17 @@ class MainEventCollector {
* An array of unfiltered event listeners or an empty array
*/
getDOMListeners(node) {
const els = isReplaying ? ReplayInspector.els : Services.els;
if (typeof node.nodeName !== "undefined" && node.nodeName.toLowerCase() === "html") {
const winListeners =
els.getListenerInfoFor(node.ownerGlobal) || [];
Services.els.getListenerInfoFor(node.ownerGlobal) || [];
const docElementListeners =
els.getListenerInfoFor(node) || [];
Services.els.getListenerInfoFor(node) || [];
const docListeners =
els.getListenerInfoFor(node.parentNode) || [];
Services.els.getListenerInfoFor(node.parentNode) || [];
return [...winListeners, ...docElementListeners, ...docListeners];
}
return els.getListenerInfoFor(node) || [];
return Services.els.getListenerInfoFor(node) || [];
}
getJQuery(node) {

View File

@ -42,13 +42,6 @@ const ReplayInspector = {
return gWindow;
},
// Return a proxy for a new tree walker in the replaying process.
newDeepTreeWalker() {
const data = dbg()._sendRequestAllowDiverge({ type: "newDeepTreeWalker" });
const obj = dbg()._getObject(data.id);
return wrapObject(obj);
},
// Create the InspectorUtils object to bind for other server users.
createInspectorUtils(utils) {
// Overwrite some APIs that will fail if called on proxies from the
@ -61,19 +54,11 @@ const ReplayInspector = {
};
},
// Modified EventListenerService to use when replaying. It would be nice to
// bind a special Services object for other server users, but doing so creates
// initialization problems.
els: {
getListenerInfoFor(node) {
const id = unwrapValue(node)._data.id;
const rv = dbg()._sendRequestAllowDiverge({
type: "getListenerInfoFor",
id,
});
const obj = dbg()._getObject(rv.id);
return wrapValue(obj);
},
wrapRequireHook(requireHook) {
return (id, require) => {
const rv = requireHook(id, require);
return substituteRequire(id, rv);
};
},
// Find the element in the replaying process which is being targeted by a
@ -94,6 +79,96 @@ const ReplayInspector = {
},
};
///////////////////////////////////////////////////////////////////////////////
// Require Substitutions
///////////////////////////////////////////////////////////////////////////////
// Server code in this process can try to interact with our replaying object
// proxies using various chrome interfaces. We swap these out for our own
// equivalent implementations so that things work smoothly.
function newSubstituteProxy(target, mapping) {
return new Proxy({}, {
get(_, name) {
if (mapping[name]) {
return mapping[name];
}
return target[name];
},
});
}
function createSubstituteChrome(chrome) {
const { Cc, Cu } = chrome;
return {
...chrome,
Cc: newSubstituteProxy(Cc, {
"@mozilla.org/inspector/deep-tree-walker;1": {
createInstance() {
// Return a proxy for a new tree walker in the replaying process.
const data = dbg()._sendRequestAllowDiverge({ type: "newDeepTreeWalker" });
const obj = dbg()._getObject(data.id);
return wrapObject(obj);
},
},
}),
Cu: newSubstituteProxy(Cu, {
isDeadWrapper(node) {
let unwrapped = proxyMap.get(node);
if (!unwrapped) {
return Cu.isDeadWrapper(node);
}
assert(unwrapped instanceof ReplayDebugger.Object);
// Objects are considered dead if we have unpaused since creating them
// and they are not one of the fixed proxies. This prevents the
// inspector from trying to continue using them.
if (!unwrapped._data) {
updateFixedProxies();
unwrapped = proxyMap.get(node);
return !unwrapped._data;
}
return false;
},
}),
};
}
function createSubstituteServices(Services) {
return newSubstituteProxy(Services, {
els: {
getListenerInfoFor(node) {
const id = unwrapValue(node)._data.id;
const rv = dbg()._sendRequestAllowDiverge({
type: "getListenerInfoFor",
id,
});
const obj = dbg()._getObject(rv.id);
return wrapValue(obj);
},
},
});
}
function createSubstitute(id, rv) {
switch (id) {
case "chrome": return createSubstituteChrome(rv);
case "Services": return createSubstituteServices(rv);
}
return null;
}
const substitutes = new Map();
function substituteRequire(id, rv) {
if (substitutes.has(id)) {
return substitutes.get(id) || rv;
}
const newrv = createSubstitute(id, rv);
substitutes.set(id, newrv);
return newrv || rv;
}
///////////////////////////////////////////////////////////////////////////////
// Replaying Object Proxies
///////////////////////////////////////////////////////////////////////////////

View File

@ -607,11 +607,31 @@ function forwardToScript(name) {
return request => gScripts.getObject(request.id)[name](request.value);
}
function unknownObjectProperties(why) {
return [{
name: "Unknown properties",
desc: {
value: why,
enumerable: true,
},
}];
}
function getObjectProperties(object) {
const names = object.getOwnPropertyNames();
let names;
try {
names = object.getOwnPropertyNames();
} catch (e) {
return unknownObjectProperties(e.toString());
}
return names.map(name => {
const desc = object.getOwnPropertyDescriptor(name);
let desc;
try {
desc = object.getOwnPropertyDescriptor(name);
} catch (e) {
return { name, desc: { value: "Unknown: " + e, enumerable: true } };
}
if ("value" in desc) {
desc.value = convertValue(desc.value);
}
@ -735,13 +755,7 @@ const gRequestHandlers = {
getObjectProperties(request) {
if (!RecordReplayControl.maybeDivergeFromRecording()) {
return [{
name: "Unknown properties",
desc: {
value: "Recording divergence in getObjectProperties",
enumerable: true,
},
}];
return unknownObjectProperties("Recording divergence in getObjectProperties");
}
const object = gPausedObjects.getObject(request.id);

View File

@ -186,6 +186,15 @@ DevToolsLoader.prototype = {
this.lazyImporter = globals.loader.lazyImporter;
this.lazyServiceGetter = globals.loader.lazyServiceGetter;
this.lazyRequireGetter = globals.loader.lazyRequireGetter;
// When replaying, modify the require hook to allow the ReplayInspector to
// replace chrome interfaces with alternatives that understand the proxies
// created for objects in the recording/replaying process.
if (globals.isReplaying) {
const oldHook = this._provider.loader.requireHook;
const ReplayInspector = this.require("devtools/server/actors/replay/inspector");
this._provider.loader.requireHook = ReplayInspector.wrapRequireHook(oldHook);
}
},
/**

View File

@ -606,7 +606,7 @@ function Loader(options) {
// Whether the modules loaded should be ignored by the debugger
invisibleToDebugger: { enumerable: false,
value: options.invisibleToDebugger || false },
requireHook: { enumerable: false, value: options.requireHook },
requireHook: { enumerable: false, writable: true, value: options.requireHook },
};
return Object.create(null, returnObj);

View File

@ -67,16 +67,12 @@ void HTMLLIElement::MapAttributesIntoRule(const nsMappedAttributes* aAttributes,
value->GetEnumValue());
}
// Map <li value=INTEGER> to 'counter-set: list-item INTEGER;
// counter-increment: list-item 0;'.
// Map <li value=INTEGER> to 'counter-set: list-item INTEGER'.
const nsAttrValue* attrVal = aAttributes->GetAttr(nsGkAtoms::value);
if (attrVal && attrVal->Type() == nsAttrValue::eInteger) {
if (!aDecls.PropertyIsSet(eCSSProperty_counter_set)) {
aDecls.SetCounterSetListItem(attrVal->GetIntegerValue());
}
if (!aDecls.PropertyIsSet(eCSSProperty_counter_increment)) {
aDecls.SetCounterIncrementListItem(0);
}
}
nsGenericHTMLElement::MapCommonAttributesInto(aAttributes, aDecls);

View File

@ -215,15 +215,15 @@ bool nsCounterManager::AddCounterChanges(nsIFrame* aFrame) {
dirty |= AddCounterChangeNode(aFrame, i, styleContent->CounterResetAt(i),
nsCounterChangeNode::RESET);
}
for (i = 0, i_end = styleContent->CounterSetCount(); i != i_end; ++i) {
dirty |= AddCounterChangeNode(aFrame, i, styleContent->CounterSetAt(i),
nsCounterChangeNode::SET);
}
for (i = 0, i_end = styleContent->CounterIncrementCount(); i != i_end; ++i) {
dirty |=
AddCounterChangeNode(aFrame, i, styleContent->CounterIncrementAt(i),
nsCounterChangeNode::INCREMENT);
}
for (i = 0, i_end = styleContent->CounterSetCount(); i != i_end; ++i) {
dirty |= AddCounterChangeNode(aFrame, i, styleContent->CounterSetAt(i),
nsCounterChangeNode::SET);
}
return dirty;
}

View File

@ -22,8 +22,8 @@ struct nsCounterChangeNode;
struct nsCounterNode : public nsGenConNode {
enum Type {
RESET, // a "counter number" pair in 'counter-reset'
SET, // a "counter number" pair in 'counter-set'
INCREMENT, // a "counter number" pair in 'counter-increment'
SET, // a "counter number" pair in 'counter-set'
USE // counter() or counters() in 'content'
};
@ -57,11 +57,11 @@ struct nsCounterNode : public nsGenConNode {
inline nsCounterUseNode* UseNode();
inline nsCounterChangeNode* ChangeNode();
// For RESET, SET and INCREMENT nodes, aPseudoFrame need not be a
// For RESET, INCREMENT and SET nodes, aPseudoFrame need not be a
// pseudo-element, and aContentIndex represents the index within the
// 'counter-reset', 'counter-set' or 'counter-increment' property
// 'counter-reset', 'counter-increment' or 'counter-set' property
// instead of within the 'content' property but offset to ensure
// that (reset, set, increment, use) sort in that order.
// that (reset, increment, set, use) sort in that order.
// (This slight weirdness allows sharing a lot of code with 'quotes'.)
nsCounterNode(int32_t aContentIndex, Type aType)
: nsGenConNode(aContentIndex),
@ -112,18 +112,18 @@ struct nsCounterChangeNode : public nsCounterNode {
// since it is for every other subclass of nsGenConNode, we follow
// the naming convention here.
// |aPropIndex| is the index of the value within the list in the
// 'counter-increment', 'counter-set' or 'counter-reset' property.
// 'counter-increment', 'counter-reset' or 'counter-set' property.
nsCounterChangeNode(nsIFrame* aPseudoFrame, nsCounterNode::Type aChangeType,
int32_t aChangeValue,
int32_t aPropIndex)
: nsCounterNode( // Fake a content index for resets, sets and increments
: nsCounterNode( // Fake a content index for resets, increments and sets
// that comes before all the real content, with
// the resets first, in order, and then the sets and
// then the increments.
// the resets first, in order, and then the increments and
// then the sets.
aPropIndex + (aChangeType == RESET
? (INT32_MIN)
: (aChangeType == SET ? ((INT32_MIN / 3) * 2)
: INT32_MIN / 3)),
: (aChangeType == INCREMENT ? ((INT32_MIN / 3) * 2)
: INT32_MIN / 3)),
aChangeType),
mChangeValue(aChangeValue) {
NS_ASSERTION(aPropIndex >= 0, "out of range");

View File

@ -0,0 +1,14 @@
<script>
document.addEventListener("DOMContentLoaded", function() {
var o=document.getElementById('b');
o.parentNode.removeChild(o);
window.frames[0].document.body.appendChild(document.getElementById('a'));
})
</script>
<ol>
<li>
<table id='a'>
</table>
<iframe></iframe>
</li>
<ul id='b'>

View File

@ -726,3 +726,4 @@ pref(layout.css.column-span.enabled,true) load 1517033.html
pref(layout.css.column-span.enabled,true) load 1517297.html
load 1520798-1.xul
load 1520798-2.html
load 1539656.html

View File

@ -6770,6 +6770,7 @@ bool nsBlockFrame::MarkerIsEmpty() const {
marker->StyleContent()->ContentCount() == 0;
}
#ifdef ACCESSIBILITY
void nsBlockFrame::GetSpokenMarkerText(nsAString& aText) const {
const nsStyleList* myList = StyleList();
if (myList->GetListStyleImage()) {
@ -6791,6 +6792,7 @@ void nsBlockFrame::GetSpokenMarkerText(nsAString& aText) const {
}
}
}
#endif
void nsBlockFrame::ReflowOutsideMarker(nsIFrame* aMarkerFrame,
BlockReflowInput& aState,

View File

@ -234,10 +234,12 @@ class nsBlockFrame : public nsContainerFrame {
// not 'none', and no 'content'?
bool MarkerIsEmpty() const;
#ifdef ACCESSIBILITY
/**
* Return the ::marker text equivalent.
* Return the ::marker text equivalent, without flushing.
*/
void GetSpokenMarkerText(nsAString& aText) const;
#endif
/**
* Return true if this frame has a ::marker frame.

View File

@ -824,11 +824,11 @@ ImgDrawResult nsBulletFrame::PaintBullet(gfxContext& aRenderingContext,
aDisableSubpixelAA, this);
}
int32_t nsBulletFrame::Ordinal() const {
int32_t nsBulletFrame::Ordinal(bool aDebugFromA11y) const {
auto* fc = PresShell()->FrameConstructor();
auto* cm = fc->CounterManager();
auto* list = cm->CounterListFor(NS_LITERAL_STRING("list-item"));
MOZ_ASSERT(list && !list->IsDirty());
MOZ_ASSERT(aDebugFromA11y || (list && !list->IsDirty()));
nsIFrame* listItem = GetParent()->GetContent()->GetPrimaryFrame();
int32_t value = 0;
for (auto* node = list->First(); node; node = list->Next(node)) {
@ -1263,12 +1263,13 @@ nscoord nsBulletFrame::GetLogicalBaseline(WritingMode aWritingMode) const {
return ascent + GetLogicalUsedMargin(aWritingMode).BStart(aWritingMode);
}
#ifdef ACCESSIBILITY
void nsBulletFrame::GetSpokenText(nsAString& aText) {
CounterStyle* style =
PresContext()->CounterStyleManager()->ResolveCounterStyle(
StyleList()->mCounterStyle);
bool isBullet;
style->GetSpokenCounterText(Ordinal(), GetWritingMode(), aText, isBullet);
style->GetSpokenCounterText(Ordinal(true), GetWritingMode(), aText, isBullet);
if (isBullet) {
if (!style->IsNone()) {
aText.Append(' ');
@ -1280,6 +1281,7 @@ void nsBulletFrame::GetSpokenText(nsAString& aText) {
aText = prefix + aText + suffix;
}
}
#endif
void nsBulletFrame::RegisterImageRequest(bool aKnownToBeAnimated) {
if (mImageRequest) {

View File

@ -93,7 +93,9 @@ class nsBulletFrame final : public nsFrame {
static void GetListItemText(mozilla::CounterStyle*, mozilla::WritingMode,
int32_t aOrdinal, nsAString& aResult);
#ifdef ACCESSIBILITY
void GetSpokenText(nsAString& aText);
#endif
Maybe<BulletRenderer> CreateBulletRenderer(gfxContext& aRenderingContext,
nsPoint aPt);
@ -112,7 +114,8 @@ class nsBulletFrame final : public nsFrame {
}
void SetFontSizeInflation(float aInflation);
int32_t Ordinal() const;
// aDebugFromA11y should not be used
int32_t Ordinal(bool aDebugFromA11y = false) const;
already_AddRefed<imgIContainer> GetImage() const;

View File

@ -106,11 +106,6 @@ class MappedDeclarations final {
Servo_DeclarationBlock_SetCounterSetListItem(mDecl, aValue);
}
// Set "counter-increment: list-item <integer>".
void SetCounterIncrementListItem(int32_t aValue) {
Servo_DeclarationBlock_SetCounterIncrementListItem(mDecl, aValue);
}
// Set a property to a pixel value
void SetPixelValue(nsCSSPropertyID aId, float aValue) {
Servo_DeclarationBlock_SetPixelValue(mDecl, aId, aValue);

View File

@ -11,7 +11,7 @@ moz_task = { path = "../../../../xpcom/rust/moz_task" }
nserror = { path = "../../../../xpcom/rust/nserror" }
nsstring = { path = "../../../../xpcom/rust/nsstring" }
rkv = "^0.9"
sha2 = "^0.7"
sha2 = "^0.8"
style = { path = "../../../../servo/components/style" }
thin-vec = { version = "0.1.0", features = ["gecko-ffi"] }
time = "0.1"

View File

@ -4430,23 +4430,6 @@ pub extern "C" fn Servo_DeclarationBlock_SetCounterSetListItem(
})
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetCounterIncrementListItem(
declarations: &RawServoDeclarationBlock,
counter_value: i32,
) {
use style::values::generics::counters::{CounterPair, CounterIncrement};
use style::properties::{PropertyDeclaration};
let prop = PropertyDeclaration::CounterIncrement(CounterIncrement::new(vec![CounterPair {
name: CustomIdent(atom!("list-item")),
value: style::values::specified::Integer::new(counter_value),
}]));
write_locked_arc(declarations, |decls: &mut PropertyDeclarationBlock| {
decls.push(prop, Importance::Normal);
})
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetPixelValue(
declarations: &RawServoDeclarationBlock,

View File

@ -18,8 +18,8 @@ html,body {
<span>7</span><!-- "7" -->
<span>0</span><!-- "0" -->
<span>7</span><!-- "7" -->
<span>8</span><!-- "8" -->
<span>2</span><!-- "2" -->
<span>6</span><!-- "6" -->
<span>0</span><!-- "0" -->
<x>
<span>2</span><!-- "2" -->
</x>

View File

@ -21,8 +21,8 @@ span::before { content: counters(n, '.'); }
<span style="counter-set: n 7"></span><!-- "7" -->
<span style="counter-set: n"></span><!-- "0" -->
<span style="counter-set: n 8 n 7"></span><!-- "7" -->
<span style="counter-set: n 6; counter-increment: n 2"></span><!-- "8" -->
<span style="counter-set: n; counter-increment: n 2"></span><!-- "2" -->
<span style="counter-set: n 6; counter-increment: n 2"></span><!-- "6" -->
<span style="counter-set: n; counter-increment: n 2"></span><!-- "0" -->
<x style="counter-reset: n 9">
<span style="counter-set: n 2"></span><!-- "2" -->
</x>

View File

@ -17,10 +17,10 @@ body { margin-left: 10em; }
<body>
<ol><li value=0>a<li value=4>b<li value=4>c</ol>
<ol><li value=0>a<li value=9>b<li value=9>c</ol>
<ol><li value=-1>a<li value=3>b<li value=2>c</ol>
<ol><li value=0>a<li value=4>b<li value=4>c</ol>
<ol><li value=2>a<li value=6>b<li value=8>c</ol>
<ol><li value=-1>a<li value=4>b<li value=3>c</ol>
<ol><li value=0>a<li value=4>b<li value=4>c</ol>
<ol><li value=2>a<li value=4>b<li value=6>c</ol>
</body>
</html>

View File

@ -17,9 +17,9 @@ body { margin-left: 10em; }
<body>
<ol><li>a<li value=99>b</ol>
<ol><li>a<li value=149>b</ol>
<ol><li>a<li value=54>b</ol>
<ol><li>a<li value=149>b</ol>
<ol><li>a<li value=99>b</ol>
<ol><li>a<li value=4>b</ol>
<ol><li>a<li value=99>b</ol>
<ol><li>a<li value=51>b</ol>
<ol><li>a<li value=88>b</ol>

View File

@ -1 +0,0 @@
{"files":{"Cargo.toml":"373908618d7bdf561f84ddc5add92f69dab295c97ab0908d3a4ec428fad23bad","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"9e0dfd2dd4173a530e238cb6adb37aa78c34c6bc7444e0e10c1ab5d8881f63ba","src/lib.rs":"bdf23c8a00fb4d51beabeb6600fe45ebf1be618632db885013b6f60a5666c124","src/paddings.rs":"7a18850dab9dca0a3e6cc49d6a94a9566ea2473628f42f726a69f8e07f95872a"},"package":"a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"}

View File

@ -1,27 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "block-buffer"
version = "0.3.3"
authors = ["RustCrypto Developers"]
description = "Fixed size buffer for block processing of data"
documentation = "https://docs.rs/block-buffer"
keywords = ["block", "padding", "pkcs7", "ansix923", "iso7816"]
categories = ["cryptography", "no-std"]
license = "MIT/Apache-2.0"
repository = "https://github.com/RustCrypto/utils"
[dependencies.arrayref]
version = "0.3"
[dependencies.byte-tools]
version = "0.2"

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,25 +0,0 @@
Copyright (c) 2017 Artyom Pavlov
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -1,144 +0,0 @@
#![no_std]
#[macro_use]
extern crate arrayref;
extern crate byte_tools;
use byte_tools::{zero, write_u64_le};
mod paddings;
pub use paddings::*;
macro_rules! impl_buffer {
($name:ident, $len:expr) => {
pub struct $name {
buffer: [u8; $len],
pos: usize,
}
impl Copy for $name {}
impl Clone for $name {
fn clone(&self) -> Self {
*self
}
}
impl Default for $name {
fn default() -> Self {
$name {buffer: [0; $len], pos: 0}
}
}
impl $name {
#[inline]
pub fn input<F: FnMut(&[u8; $len])>(&mut self, mut input: &[u8], mut func: F) {
// If there is already data in the buffer, copy as much as we can
// into it and process the data if the buffer becomes full.
if self.pos != 0 {
let rem = self.remaining();
if input.len() >= rem {
let (l, r) = input.split_at(rem);
input = r;
self.buffer[self.pos..].copy_from_slice(l);
self.pos = 0;
func(&self.buffer);
} else {
let end = self.pos + input.len();
self.buffer[self.pos..end].copy_from_slice(input);
self.pos = end;
return;
}
}
// While we have at least a full buffer size chunks's worth of data,
// process that data without copying it into the buffer
while input.len() >= self.size() {
let (l, r) = input.split_at(self.size());
input = r;
func(array_ref!(l, 0, $len));
}
// Copy any input data into the buffer. At this point in the method,
// the ammount of data left in the input vector will be less than
// the buffer size and the buffer will be empty.
self.buffer[..input.len()].copy_from_slice(input);
self.pos = input.len();
}
#[inline]
fn digest_pad<F>(&mut self, up_to: usize, func: &mut F)
where F: FnMut(&[u8; $len])
{
self.buffer[self.pos] = 0x80;
self.pos += 1;
zero(&mut self.buffer[self.pos..]);
if self.remaining() < up_to {
func(&self.buffer);
zero(&mut self.buffer[..self.pos]);
}
}
#[inline]
/// Will pad message with message length in big-endian format
pub fn len_padding<F>(&mut self, data_len: u64, mut func: F)
where F: FnMut(&[u8; $len])
{
self.digest_pad(8, &mut func);
let s = self.size();
write_u64_le(&mut self.buffer[s-8..], data_len);
func(&self.buffer);
self.pos = 0;
}
#[inline]
pub fn len_padding_u128<F>(&mut self, hi: u64, lo: u64, mut func: F)
where F: FnMut(&[u8; $len])
{
self.digest_pad(16, &mut func);
let s = self.size();
write_u64_le(&mut self.buffer[s-16..s-8], hi);
write_u64_le(&mut self.buffer[s-8..], lo);
func(&self.buffer);
self.pos = 0;
}
#[inline]
pub fn pad_with<P: Padding>(&mut self) -> &mut [u8; $len] {
P::pad(&mut self.buffer[..], self.pos);
self.pos = 0;
&mut self.buffer
}
#[inline]
pub fn size(&self) -> usize {
$len
}
#[inline]
pub fn position(&self) -> usize {
self.pos
}
#[inline]
pub fn remaining(&self) -> usize {
self.size() - self.pos
}
}
}
}
impl_buffer!(BlockBuffer128, 16);
impl_buffer!(BlockBuffer256, 32);
impl_buffer!(BlockBuffer512, 64);
impl_buffer!(BlockBuffer1024, 128);
impl_buffer!(BlockBuffer576, 72);
impl_buffer!(BlockBuffer832, 104);
impl_buffer!(BlockBuffer1088, 136);
impl_buffer!(BlockBuffer1152, 144);
impl_buffer!(BlockBuffer1344, 168);

View File

@ -1,129 +0,0 @@
use byte_tools::{zero, set};
/// Trait for padding messages divided into blocks
pub trait Padding {
/// Pads `block` filled with data up to `pos`
fn pad(block: &mut [u8], pos: usize);
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
/// Error for indicating failed unpadding process
pub struct UnpadError;
/// Trait for extracting oringinal message from padded medium
pub trait Unpadding {
/// Unpad given `data` by truncating it according to the used padding.
/// In case of the malformed padding will return `UnpadError`
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError>;
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum ZeroPadding{}
impl Padding for ZeroPadding {
#[inline]
fn pad(block: &mut [u8], pos: usize) {
zero(&mut block[pos..])
}
}
impl Unpadding for ZeroPadding {
#[inline]
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
let mut n = data.len() - 1;
while n != 0 {
if data[n] != 0 {
break;
}
n -= 1;
}
Ok(&data[..n+1])
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Pkcs7{}
impl Padding for Pkcs7 {
#[inline]
fn pad(block: &mut [u8], pos: usize) {
let n = block.len() - pos;
set(&mut block[pos..], n as u8);
}
}
impl Unpadding for Pkcs7 {
#[inline]
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
if data.is_empty() { return Err(UnpadError); }
let l = data.len();
let n = data[l-1];
if n == 0 {
return Err(UnpadError)
}
for v in &data[l-n as usize..l-1] {
if *v != n { return Err(UnpadError); }
}
Ok(&data[..l-n as usize])
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum AnsiX923{}
impl Padding for AnsiX923 {
#[inline]
fn pad(block: &mut [u8], pos: usize) {
let n = block.len() - 1;
zero(&mut block[pos..n]);
block[n] = (n - pos) as u8;
}
}
impl Unpadding for AnsiX923 {
#[inline]
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
if data.is_empty() { return Err(UnpadError); }
let l = data.len();
let n = data[l-1] as usize;
if n == 0 {
return Err(UnpadError)
}
for v in &data[l-n..l-1] {
if *v != 0 { return Err(UnpadError); }
}
Ok(&data[..l-n])
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Iso7816{}
impl Padding for Iso7816 {
#[inline]
fn pad(block: &mut [u8], pos: usize) {
let n = block.len() - pos;
block[pos] = 0x80;
for b in block[pos+1..].iter_mut() {
*b = n as u8;
}
}
}
impl Unpadding for Iso7816 {
fn unpad(data: &[u8]) -> Result<&[u8], UnpadError> {
if data.is_empty() { return Err(UnpadError); }
let mut n = data.len() - 1;
while n != 0 {
if data[n] != 0 {
break;
}
n -= 1;
}
if data[n] != 0x80 { return Err(UnpadError); }
Ok(&data[..n])
}
}

View File

@ -1 +0,0 @@
{"files":{"Cargo.toml":"af6af6ea1dfa296af5dc58986d1afb46952328588069ec0b08723db439e9972d","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"52232c2cee3bb7d8cabe47ef367f1bf8bb607c22bdfca0219d6156cb7f446e9d","src/lib.rs":"9c96cffef7458fc7bd9e4e61270b69d539ff3a9225a0319b7996155c25ff96ab","src/read_single.rs":"3ab78b15754c2a7848a1be871ff6ee2a31a099f8f4f89be44ad210cda0dbcc9a","src/read_slice.rs":"b3790f2fd080db97e239c05c63da123ea375fb9b354dc9cacb859ed9c44f552e","src/write_single.rs":"1cee4f2f5d8690e47840ea7017539ead417a26abc0717137442a6d9d2875afe4","src/write_slice.rs":"de90e6b9cfca67125871bee7cef55c63574b1871a6584e51fc00a97e5877fe69"},"package":"560c32574a12a89ecd91f5e742165893f86e3ab98d21f8ea548658eb9eef5f40"}

View File

@ -1,21 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "byte-tools"
version = "0.2.0"
authors = ["The Rust-Crypto Project Developers"]
description = "Utility functions for working with bytes"
documentation = "https://docs.rs/byte-tools"
keywords = ["bytes"]
license = "MIT/Apache-2.0"
repository = "https://github.com/RustCrypto/utils"

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,26 +0,0 @@
Copyright (c) 2006-2009 Graydon Hoare
Copyright (c) 2009-2013 Mozilla Foundation
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -1,37 +0,0 @@
#![no_std]
use core::ptr;
mod read_single;
mod write_single;
mod read_slice;
mod write_slice;
pub use read_single::*;
pub use write_single::*;
pub use read_slice::*;
pub use write_slice::*;
/// Copy bytes from src to dest
#[inline]
pub fn copy_memory(src: &[u8], dst: &mut [u8]) {
assert!(dst.len() >= src.len());
unsafe {
let srcp = src.as_ptr();
let dstp = dst.as_mut_ptr();
ptr::copy_nonoverlapping(srcp, dstp, src.len());
}
}
/// Zero all bytes in dst
#[inline]
pub fn zero(dst: &mut [u8]) {
set(dst, 0);
}
/// Sets all bytes in `dst` equal to `value`
#[inline]
pub fn set(dst: &mut [u8], value: u8) {
unsafe {
ptr::write_bytes(dst.as_mut_ptr(), value, dst.len());
}
}

View File

@ -1,38 +0,0 @@
use core::{mem, ptr};
macro_rules! read_single {
($src:expr, $size:expr, $ty:ty, $which:ident) => ({
assert!($size == mem::size_of::<$ty>());
assert!($size == $src.len());
unsafe {
let mut tmp: $ty = mem::uninitialized();
let p = &mut tmp as *mut _ as *mut u8;
ptr::copy_nonoverlapping($src.as_ptr(), p, $size);
tmp.$which()
}
});
}
/// Read the value of a vector of bytes as a u32 value in little-endian format.
#[inline]
pub fn read_u32_le(src: &[u8]) -> u32 {
read_single!(src, 4, u32, to_le)
}
/// Read the value of a vector of bytes as a u32 value in big-endian format.
#[inline]
pub fn read_u32_be(src: &[u8]) -> u32 {
read_single!(src, 4, u32, to_be)
}
/// Read the value of a vector of bytes as a u64 value in little-endian format.
#[inline]
pub fn read_u64_le(src: &[u8]) -> u64 {
read_single!(src, 8, u64, to_le)
}
/// Read the value of a vector of bytes as a u64 value in big-endian format.
#[inline]
pub fn read_u64_be(src: &[u8]) -> u64 {
read_single!(src, 8, u64, to_be)
}

View File

@ -1,44 +0,0 @@
use core::ptr;
macro_rules! read_slice {
($src:expr, $dst:expr, $size:expr, $which:ident) => ({
assert_eq!($size*$dst.len(), $src.len());
unsafe {
ptr::copy_nonoverlapping(
$src.as_ptr(),
$dst.as_mut_ptr() as *mut u8,
$src.len());
}
for v in $dst.iter_mut() {
*v = v.$which();
}
});
}
/// Read a vector of bytes into a vector of u32s. The values are read in
/// little-endian format.
#[inline]
pub fn read_u32v_le(dst: &mut [u32], src: &[u8]) {
read_slice!(src, dst, 4, to_le);
}
/// Read a vector of bytes into a vector of u32s. The values are read in
/// big-endian format.
#[inline]
pub fn read_u32v_be(dst: &mut [u32], src: &[u8]) {
read_slice!(src, dst, 4, to_be);
}
/// Read a vector of bytes into a vector of u64s. The values are read in
/// little-endian format.
#[inline]
pub fn read_u64v_le(dst: &mut [u64], src: &[u8]) {
read_slice!(src, dst, 8, to_le);
}
/// Read a vector of bytes into a vector of u64s. The values are read in
/// big-endian format.
#[inline]
pub fn read_u64v_be(dst: &mut [u64], src: &[u8]) {
read_slice!(src, dst, 8, to_be);
}

View File

@ -1,39 +0,0 @@
use core::{mem, ptr};
macro_rules! write_single {
($dst:expr, $n:expr, $size:expr, $which:ident) => ({
assert!($size == $dst.len());
unsafe {
let bytes = mem::transmute::<_, [u8; $size]>($n.$which());
ptr::copy_nonoverlapping((&bytes).as_ptr(), $dst.as_mut_ptr(), $size);
}
});
}
/// Write a u32 into a vector, which must be 4 bytes long. The value is written
/// in little-endian format.
#[inline]
pub fn write_u32_le(dst: &mut [u8], n: u32) {
write_single!(dst, n, 4, to_le);
}
/// Write a u32 into a vector, which must be 4 bytes long. The value is written
/// in big-endian format.
#[inline]
pub fn write_u32_be(dst: &mut [u8], n: u32) {
write_single!(dst, n, 4, to_be);
}
/// Write a u64 into a vector, which must be 8 bytes long. The value is written
/// in little-endian format.
#[inline]
pub fn write_u64_le(dst: &mut [u8], n: u64) {
write_single!(dst, n, 8, to_le);
}
/// Write a u64 into a vector, which must be 8 bytes long. The value is written
/// in big-endian format.
#[inline]
pub fn write_u64_be(dst: &mut [u8], n: u64) {
write_single!(dst, n, 8, to_be);
}

View File

@ -1,46 +0,0 @@
use core::{ptr, mem};
macro_rules! write_slice {
($src:expr, $dst:expr, $ty:ty, $size:expr, $which:ident) => ({
assert!($size == mem::size_of::<$ty>());
assert_eq!($dst.len(), $size*$src.len());
unsafe {
ptr::copy_nonoverlapping(
$src.as_ptr() as *const u8,
$dst.as_mut_ptr(),
$dst.len());
let tmp: &mut [$ty] = mem::transmute($dst);
for v in tmp[..$src.len()].iter_mut() {
*v = v.$which();
}
}
});
}
/// Write a vector of u32s into a vector of bytes. The values are written in
/// little-endian format.
#[inline]
pub fn write_u32v_le(dst: &mut [u8], src: &[u32]) {
write_slice!(src, dst, u32, 4, to_le);
}
/// Write a vector of u32s into a vector of bytes. The values are written in
/// big-endian format.
#[inline]
pub fn write_u32v_be(dst: &mut [u8], src: &[u32]) {
write_slice!(src, dst, u32, 4, to_be);
}
/// Write a vector of u64s into a vector of bytes. The values are written in
/// little-endian format.
#[inline]
pub fn write_u64v_le(dst: &mut [u8], src: &[u64]) {
write_slice!(src, dst, u64, 8, to_le);
}
/// Write a vector of u64s into a vector of bytes. The values are written in
/// little-endian format.
#[inline]
pub fn write_u64v_be(dst: &mut [u8], src: &[u64]) {
write_slice!(src, dst, u64, 8, to_be);
}

View File

@ -1 +0,0 @@
{"files":{"Cargo.toml":"b3667b1e1a3985dd2c9e7873f6945c2d7163ed7da95569f40c2097285a325ec4","LICENSE-APACHE":"a9040321c3712d8fd0b09cf52b17445de04a23a10165049ae187cd39e5c86be5","LICENSE-MIT":"9e0dfd2dd4173a530e238cb6adb37aa78c34c6bc7444e0e10c1ab5d8881f63ba","src/dev.rs":"c824f834fa8b8c729024e4ec61138e89c26a56bfb6b50295600dddb5ff8fff62","src/digest.rs":"6710ac33c80e6159a2396839794fc76a61b94ab573516a69486457b3e291c793","src/errors.rs":"cff5bf2350bc109ad4f08caacf6780ff1e7016d9995f0847e84e96a8e31ab9d5","src/lib.rs":"bf4e93ebd066513001f3d6d77024ae8addf4df4fd89f76549fd1b73df386f3e4"},"package":"03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"}

View File

@ -1,32 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "digest"
version = "0.7.6"
authors = ["RustCrypto Developers"]
description = "Traits for cryptographic hash functions"
documentation = "https://docs.rs/digest"
keywords = ["digest", "crypto", "hash"]
categories = ["cryptography", "no-std"]
license = "MIT/Apache-2.0"
repository = "https://github.com/RustCrypto/traits"
[package.metadata.docs.rs]
features = ["std"]
[dependencies.generic-array]
version = "0.9"
[features]
dev = []
std = []
[badges.travis-ci]
repository = "RustCrypto/traits"

View File

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,25 +0,0 @@
Copyright (c) 2017 Artyom Pavlov
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -1,171 +0,0 @@
use super::{Digest, Input, VariableOutput, ExtendableOutput, XofReader};
use core::fmt::Debug;
pub struct Test {
pub name: &'static str,
pub input: &'static [u8],
pub output: &'static [u8],
}
#[macro_export]
macro_rules! new_tests {
( $( $name:expr ),* ) => {
[$(
Test {
name: $name,
input: include_bytes!(concat!("data/", $name, ".input.bin")),
output: include_bytes!(concat!("data/", $name, ".output.bin")),
},
)*]
};
( $( $name:expr ),+, ) => (new_tests!($($name),+))
}
pub fn main_test<D: Digest + Debug + Clone>(tests: &[Test]) {
// Test that it works when accepting the message all at once
for t in tests.iter() {
let mut sh = D::default();
sh.input(t.input);
let out = sh.result();
assert_eq!(out[..], t.output[..]);
}
// Test that it works when accepting the message in pieces
for t in tests.iter() {
let mut sh = D::default();
let len = t.input.len();
let mut left = len;
while left > 0 {
let take = (left + 1) / 2;
sh.input(&t.input[len - left..take + len - left]);
left = left - take;
}
let out = sh.result();
assert_eq!(out[..], t.output[..]);
}
}
pub fn variable_test<D>(tests: &[Test])
where D: Input + VariableOutput + Clone + Debug
{
let mut buf = [0u8; 1024];
// Test that it works when accepting the message all at once
for t in tests.iter() {
let mut sh = D::new(t.output.len()).unwrap();
sh.process(t.input);
let out = sh.variable_result(&mut buf[..t.output.len()]).unwrap();
assert_eq!(out[..], t.output[..]);
}
// Test that it works when accepting the message in pieces
for t in tests.iter() {
let mut sh = D::new(t.output.len()).unwrap();
let len = t.input.len();
let mut left = len;
while left > 0 {
let take = (left + 1) / 2;
sh.process(&t.input[len - left..take + len - left]);
left = left - take;
}
let out = sh.variable_result(&mut buf[..t.output.len()]).unwrap();
assert_eq!(out[..], t.output[..]);
}
}
pub fn xof_test<D>(tests: &[Test])
where D: Input + ExtendableOutput + Default + Debug + Clone
{
let mut buf = [0u8; 1024];
// Test that it works when accepting the message all at once
for t in tests.iter() {
let mut sh = D::default();
sh.process(t.input);
let out = &mut buf[..t.output.len()];
sh.xof_result().read(out);
assert_eq!(out[..], t.output[..]);
}
// Test that it works when accepting the message in pieces
for t in tests.iter() {
let mut sh = D::default();
let len = t.input.len();
let mut left = len;
while left > 0 {
let take = (left + 1) / 2;
sh.process(&t.input[len - left..take + len - left]);
left = left - take;
}
let out = &mut buf[..t.output.len()];
sh.xof_result().read(out);
assert_eq!(out[..], t.output[..]);
}
// Test reeading from reader byte by byte
for t in tests.iter() {
let mut sh = D::default();
sh.process(t.input);
let mut reader = sh.xof_result();
let out = &mut buf[..t.output.len()];
for chunk in out.chunks_mut(1) {
reader.read(chunk);
}
assert_eq!(out[..], t.output[..]);
}
}
pub fn one_million_a<D: Digest + Default + Debug + Clone>(expected: &[u8]) {
let mut sh = D::default();
for _ in 0..50000 {
sh.input(&[b'a'; 10]);
}
sh.input(&[b'a'; 500000]);
let out = sh.result();
assert_eq!(out[..], expected[..]);
}
#[macro_export]
macro_rules! bench_digest {
($name:ident, $engine:path, $bs:expr) => {
#[bench]
fn $name(b: &mut Bencher) {
let mut d = <$engine>::default();
let data = [0; $bs];
b.iter(|| {
d.input(&data);
});
b.bytes = $bs;
}
};
($engine:path) => {
extern crate test;
use test::Bencher;
use digest::Digest;
bench_digest!(bench1_16, $engine, 1<<4);
bench_digest!(bench2_64, $engine, 1<<6);
bench_digest!(bench3_256, $engine, 1<<8);
bench_digest!(bench4_1k, $engine, 1<<10);
bench_digest!(bench5_4k, $engine, 1<<12);
bench_digest!(bench6_16k, $engine, 1<<14);
}
}

View File

@ -1,86 +0,0 @@
use super::{Input, BlockInput, FixedOutput};
use generic_array::GenericArray;
#[cfg(feature = "std")]
use std::io;
type Output<N> = GenericArray<u8, N>;
/// The `Digest` trait specifies an interface common for digest functions.
///
/// It's a convinience wrapper around `Input`, `FixedOutput`, `BlockInput` and
/// `Default` traits. It also provides additional convenience methods.
pub trait Digest: Input + BlockInput + FixedOutput + Default {
/// Create new hasher instance
fn new() -> Self {
Self::default()
}
/// Digest input data. This method can be called repeatedly
/// for use with streaming messages.
fn input(&mut self, input: &[u8]) {
self.process(input);
}
/// Retrieve the digest result. This method consumes digest instance.
fn result(self) -> Output<Self::OutputSize> {
self.fixed_result()
}
/// Convenience function to compute hash of the `data`. It will handle
/// hasher creation, data feeding and finalization.
///
/// Example:
///
/// ```rust,ignore
/// println!("{:x}", sha2::Sha256::digest(b"Hello world"));
/// ```
#[inline]
fn digest(data: &[u8]) -> Output<Self::OutputSize> {
let mut hasher = Self::default();
hasher.process(data);
hasher.fixed_result()
}
/// Convenience function to compute hash of the string. It's equivalent to
/// `digest(input_string.as_bytes())`.
#[inline]
fn digest_str(str: &str) -> Output<Self::OutputSize> {
Self::digest(str.as_bytes())
}
/// Convenience function which takes `std::io::Read` as a source and computes
/// value of digest function `D`, e.g. SHA-2, SHA-3, BLAKE2, etc. using 1 KB
/// blocks.
///
/// Usage example:
///
/// ```rust,ignore
/// use std::fs;
/// use sha2::{Sha256, Digest};
///
/// let mut file = fs::File::open("Cargo.toml")?;
/// let result = Sha256::digest_reader(&mut file)?;
/// println!("{:x}", result);
/// ```
#[cfg(feature = "std")]
#[inline]
fn digest_reader(source: &mut io::Read)
-> io::Result<Output<Self::OutputSize>>
{
let mut hasher = Self::default();
let mut buf = [0u8; 8 * 1024];
loop {
let len = match source.read(&mut buf) {
Ok(0) => return Ok(hasher.result()),
Ok(len) => len,
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue,
Err(e) => Err(e)?,
};
hasher.process(&buf[..len]);
}
}
}
impl<D: Input + FixedOutput + BlockInput + Default> Digest for D {}

View File

@ -1,37 +0,0 @@
use core::fmt;
#[cfg(feature = "std")]
use std::error;
/// The error type for variable hasher initialization
#[derive(Clone, Copy, Debug, Default)]
pub struct InvalidOutputSize;
/// The error type for variable hasher result
#[derive(Clone, Copy, Debug, Default)]
pub struct InvalidBufferLength;
impl fmt::Display for InvalidOutputSize {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("invalid output size")
}
}
impl fmt::Display for InvalidBufferLength {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("invalid buffer length")
}
}
#[cfg(feature = "std")]
impl error::Error for InvalidOutputSize {
fn description(&self) -> &str {
"invalid output size"
}
}
#[cfg(feature = "std")]
impl error::Error for InvalidBufferLength {
fn description(&self) -> &str {
"invalid buffer size"
}
}

View File

@ -1,98 +0,0 @@
//! This crate provides traits for describing funcionality of cryptographic hash
//! functions.
//!
//! By default std functionality in this crate disabled. (e.g. method for
//! hashing `Read`ers) To enable it turn on `std` feature in your `Cargo.toml`
//! for this crate.
#![cfg_attr(not(feature = "std"), no_std)]
pub extern crate generic_array;
#[cfg(feature = "std")]
use std as core;
use generic_array::{GenericArray, ArrayLength};
mod digest;
mod errors;
#[cfg(feature = "dev")]
pub mod dev;
pub use errors::{InvalidOutputSize, InvalidBufferLength};
pub use digest::Digest;
// `process` is choosen to not overlap with `input` method in the digest trait
// change it on trait alias stabilization
/// Trait for processing input data
pub trait Input {
/// Digest input data. This method can be called repeatedly
/// for use with streaming messages.
fn process(&mut self, input: &[u8]);
}
/// Trait to indicate that digest function processes data in blocks of size
/// `BlockSize`. Main usage of this trait is for implementing HMAC generically.
pub trait BlockInput {
type BlockSize: ArrayLength<u8>;
}
/// Trait for returning digest result with the fixed size
pub trait FixedOutput {
type OutputSize: ArrayLength<u8>;
/// Retrieve the digest result. This method consumes digest instance.
fn fixed_result(self) -> GenericArray<u8, Self::OutputSize>;
}
/// The error type for variable digest output
#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct InvalidLength;
/// Trait for returning digest result with the varaible size
pub trait VariableOutput: core::marker::Sized {
/// Create new hasher instance with given output size. Will return
/// `Err(InvalidLength)` in case if hasher can not work with the given
/// output size. Will always return an error if output size equals to zero.
fn new(output_size: usize) -> Result<Self, InvalidLength>;
/// Get output size of the hasher instance provided to the `new` method
fn output_size(&self) -> usize;
/// Retrieve the digest result into provided buffer. Length of the buffer
/// must be equal to output size provided to the `new` method, otherwise
/// `Err(InvalidLength)` will be returned
fn variable_result(self, buffer: &mut [u8]) -> Result<&[u8], InvalidLength>;
}
/// Trait for decribing readers which are used to extract extendable output
/// from the resulting state of hash function.
pub trait XofReader {
/// Read output into the `buffer`. Can be called unlimited number of times.
fn read(&mut self, buffer: &mut [u8]);
}
/// Trait which describes extendable output (XOF) of hash functions. Using this
/// trait you first need to get structure which implements `XofReader`, using
/// which you can read extendable output.
pub trait ExtendableOutput {
type Reader: XofReader;
/// Finalize hash function and return XOF reader
fn xof_result(self) -> Self::Reader;
}
/// Macro for defining opaque `Debug` implementation. It will use the following
/// format: "HasherName { ... }". While it's convinient to have it
/// (e.g. for including in other structs), it could be undesirable to leak
/// internall state, which can happen for example through uncareful logging.
#[macro_export]
macro_rules! impl_opaque_debug {
($state:ty) => {
impl ::core::fmt::Debug for $state {
fn fmt(&self, f: &mut ::core::fmt::Formatter)
-> Result<(), ::core::fmt::Error>
{
write!(f, concat!(stringify!($state), " {{ ... }}"))
}
}
}
}

View File

@ -1 +1 @@
{"files":{"COPYING":"01c266bced4a434da0051174d6bee16a4c82cf634e2679b6155d40d75012390f","Cargo.toml":"ef181d3a88c48c794a7f1a97974c83045bfa956eb5b1b8e5efc1f8c92938a135","LICENSE-MIT":"0f96a83840e146e43c0ec96a22ec1f392e0680e6c1226e6f3ba87e0740af850f","Makefile":"db1787c5c7d2daea87d92c0549976a18bbe0601acb2ab5bd8dc5edb9f2b46e63","README.md":"3b46f46ffd466fc3aa36becb0ce194820b4669ca75d0c186620abef6115317e0","UNLICENSE":"7e12e5df4bae12cb21581ba157ced20e1986a0508dd10d0e8a4ab9a4cf94e85c","completions/docopt-wordlist.bash":"213bf1baea244eeb32af3a24a9ad895212cb538e3cdaee3bfed842b11a2a64d8","ctags.rust":"3d128d3cc59f702e68953ba2fe6c3f46bc6991fc575308db060482d5da0c79f3","examples/cargo.rs":"6a5012a3359e574a61607eca0c15add23ea9e312e8f20fb90d6438740483fefd","examples/cp.rs":"35e705c59968c22a965b7ba9afc4b7a3af5d411e929432b2fb6bd2ed08a7c9ce","examples/decode.rs":"85f5033cf6450a771d6be2af819718d316b92fb98b201e247cdbe0eb39039487","examples/hashmap.rs":"9066a7b7192e15b3b667702519645d31926a371bc54ab8d70b211d98458d5a8d","examples/optional_command.rs":"44d8dda079e237ac140b1d81d34d065cb2427a6edb4e60eadaa2c8ceaff0831c","examples/verbose_multiple.rs":"3279c76c7f3bde135deca90085b9f9d5a86ea3bd619e57ddfed35f4200bb5f4a","scripts/mk-testcases":"649f37d391650175c8462171f7a98fce81735c9317630a5eb13db532ddb22976","session.vim":"1d51566b00f8ff2021d56948c1c55f123959f3e24879a6ad9337eccb11fc8fe9","src/dopt.rs":"4bbdd90fca8f71e4d898bc0656d09dce219e255d4b92671716da8fce5180572a","src/lib.rs":"e916a13a1e7f16566b768f4b9906d2d1a7c31a0524767350b1063d9255a03997","src/parse.rs":"e67d4a5ee95a9fcc1aa5c84e78605f32a1c2bbc5e772de9109ae1ce5fac6f16a","src/synonym.rs":"152b89b6f755222f81ebb63fd3d372d7407aa8046522fc1dcc2e40f417cfc65b","src/test/mod.rs":"1f3eb58d5740f8789dea7bdb2815b1313e948c6f5de9ea6d79cad5bbed484114","src/test/suggestions.rs":"51e044db856a424ef12d2bc2eb541ae922b93d81ac5548767c9c638ccd87d388","src/test/testcases.docopt":"13fcd2948a5625b76f93b98ac7b6cb53ef70c119fc2c5f85d2cb67e56bd4e9c3","src/test/testcases.rs":"cbecfab0c82249a7d8ad193ad5e9e10f45a7a41b37e69cfc025a9cdc6c213f04","src/wordlist.rs":"45ccc3441d1abf072c2079f15b7f5a7af68bd2989c99a8acd5554133fa8db7fa"},"package":"d8acd393692c503b168471874953a2531df0e9ab77d0b6bbc582395743300a4a"}
{"files":{"COPYING":"01c266bced4a434da0051174d6bee16a4c82cf634e2679b6155d40d75012390f","Cargo.toml":"9b11b3f077cb37e9314fd44a9c385662bebd96f6858e0886e28b00ab1beee421","LICENSE-MIT":"0f96a83840e146e43c0ec96a22ec1f392e0680e6c1226e6f3ba87e0740af850f","README.md":"9a9d39001433160095de7a297b51052c91c9ef7f25a94d6f67ebe50343977926","UNLICENSE":"7e12e5df4bae12cb21581ba157ced20e1986a0508dd10d0e8a4ab9a4cf94e85c","completions/docopt-wordlist.bash":"213bf1baea244eeb32af3a24a9ad895212cb538e3cdaee3bfed842b11a2a64d8","examples/cargo.rs":"6a5012a3359e574a61607eca0c15add23ea9e312e8f20fb90d6438740483fefd","examples/cp.rs":"35e705c59968c22a965b7ba9afc4b7a3af5d411e929432b2fb6bd2ed08a7c9ce","examples/decode.rs":"85f5033cf6450a771d6be2af819718d316b92fb98b201e247cdbe0eb39039487","examples/hashmap.rs":"9066a7b7192e15b3b667702519645d31926a371bc54ab8d70b211d98458d5a8d","examples/optional_command.rs":"44d8dda079e237ac140b1d81d34d065cb2427a6edb4e60eadaa2c8ceaff0831c","examples/verbose_multiple.rs":"3279c76c7f3bde135deca90085b9f9d5a86ea3bd619e57ddfed35f4200bb5f4a","src/dopt.rs":"df0132f0e4ddc4f0bc6fa5789cf24b5fe01d1a91338dc1431bf93c5a1d6ffc11","src/lib.rs":"e7089315c3ebd4d2774bad8b5a6b2899db6348a44f88dc4253c840bbb389f147","src/parse.rs":"e67d4a5ee95a9fcc1aa5c84e78605f32a1c2bbc5e772de9109ae1ce5fac6f16a","src/synonym.rs":"152b89b6f755222f81ebb63fd3d372d7407aa8046522fc1dcc2e40f417cfc65b","src/test/mod.rs":"1f3eb58d5740f8789dea7bdb2815b1313e948c6f5de9ea6d79cad5bbed484114","src/test/suggestions.rs":"51e044db856a424ef12d2bc2eb541ae922b93d81ac5548767c9c638ccd87d388","src/test/testcases.docopt":"13fcd2948a5625b76f93b98ac7b6cb53ef70c119fc2c5f85d2cb67e56bd4e9c3","src/test/testcases.rs":"cbecfab0c82249a7d8ad193ad5e9e10f45a7a41b37e69cfc025a9cdc6c213f04","src/wordlist.rs":"45ccc3441d1abf072c2079f15b7f5a7af68bd2989c99a8acd5554133fa8db7fa"},"package":"db2906c2579b5b7207fc1e328796a9a8835dc44e22dbe8e460b1d636f9a7b225"}

View File

@ -12,8 +12,9 @@
[package]
name = "docopt"
version = "0.8.3"
version = "1.0.2"
authors = ["Andrew Gallant <jamslam@gmail.com>"]
exclude = ["/.travis.yml", "/Makefile", "/ctags.rust", "/scripts/*", "/session.vim"]
description = "Command line argument parsing."
homepage = "https://github.com/docopt/docopt.rs"
documentation = "http://burntsushi.net/rustdoc/docopt/"
@ -35,7 +36,7 @@ doc = false
version = "1"
[dependencies.regex]
version = "0.2"
version = "1.0.3"
[dependencies.serde]
version = "1.0"
@ -44,4 +45,4 @@ version = "1.0"
version = "1.0"
[dependencies.strsim]
version = "0.6"
version = "0.7"

View File

@ -1,18 +0,0 @@
all:
@echo Nothing to do
docs: $(LIB_FILES)
cargo doc
# WTF is rustdoc doing?
in-dir ./target/doc fix-perms
rscp ./target/doc/* gopher:~/www/burntsushi.net/rustdoc/
src/test/testcases.rs: src/test/testcases.docopt scripts/mk-testcases
./scripts/mk-testcases ./src/test/testcases.docopt > ./src/test/testcases.rs
ctags:
ctags --recurse --options=ctags.rust --languages=Rust
push:
git push github master
git push origin master

View File

@ -26,15 +26,11 @@ This crate is fully compatible with Cargo. Just add it to your `Cargo.toml`:
```toml
[dependencies]
docopt = "0.8"
docopt = "1"
serde = "1.0" # if you're using `derive(Deserialize)`
serde_derive = "1.0" # if you're using `derive(Deserialize)`
```
If you want to use the macro, then add `docopt_macros = "0.8"` instead.
Note that the **`docopt!` macro only works on a nightly Rust compiler** because
it is a compiler plugin.
### Quick example
@ -87,49 +83,6 @@ fn main() {
}
```
Here is the same example, but with the use of the `docopt!` macro, which will
*generate a struct for you*. Note that this uses a compiler plugin, so it only
works on a **nightly Rust compiler**:
```rust
#![feature(plugin)]
#![plugin(docopt_macros)]
#[macro_use]
extern crate serde_derive;
extern crate docopt;
use docopt::Docopt;
docopt!(Args derive Debug, "
Naval Fate.
Usage:
naval_fate.py ship new <name>...
naval_fate.py ship <name> move <x> <y> [--speed=<kn>]
naval_fate.py ship shoot <x> <y>
naval_fate.py mine (set|remove) <x> <y> [--moored | --drifting]
naval_fate.py (-h | --help)
naval_fate.py --version
Options:
-h --help Show this screen.
--version Show version.
--speed=<kn> Speed in knots [default: 10].
--moored Moored (anchored) mine.
--drifting Drifting mine.
");
fn main() {
let args: Args = Args::docopt().deserialize().unwrap_or_else(|e| e.exit());
println!("{:?}", args);
}
```
The `Args` struct has one static method defined for it: `docopt`. The method
returns a normal `Docopt` value, which can be used to set configuration
options, `argv` and parse or decode command line arguments.
### Struct field name mapping
@ -145,125 +98,6 @@ build => cmd_build
```
### Data validation example
Here's another example that shows how to specify the types of your arguments:
```rust
#![feature(plugin)]
#![plugin(docopt_macros)]
#[macro_use]
extern crate serde_derive;
extern crate docopt;
docopt!(Args, "Usage: add <x> <y>", arg_x: i32, arg_y: i32);
fn main() {
let args: Args = Args::docopt().deserialize().unwrap_or_else(|e| e.exit());
println!("x: {}, y: {}", args.arg_x, args.arg_y);
}
```
In this example, specific type annotations were added. They will be
automatically inserted into the generated struct. You can override as many (or
as few) fields as you want. If you don't specify a type, then one of `bool`,
`u64`, `String` or `Vec<String>` will be chosen depending on the type of
argument. In this case, both `arg_x` and `arg_y` would have been `String`.
If any value cannot be decoded into a value with the right type, then an error
will be shown to the user.
And of course, you don't need the macro to do this. You can do the same thing
with a manually written struct too.
### Modeling `rustc`
Here's a selected subset for some of `rustc`'s options. This also shows how to
restrict values to a list of choices via an `enum` type and demonstrates more
Docopt features.
```rust
#![feature(plugin)]
#![plugin(docopt_macros)]
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate docopt;
use serde::de;
docopt!(Args derive Debug, "
Usage: rustc [options] [--cfg SPEC... -L PATH...] INPUT
rustc (--help | --version)
Options:
-h, --help Show this message.
--version Show the version of rustc.
--cfg SPEC Configure the compilation environment.
-L PATH Add a directory to the library search path.
--emit TYPE Configure the output that rustc will produce.
Valid values: asm, ir, bc, obj, link.
--opt-level LEVEL Optimize with possible levels 0-3.
", flag_opt_level: Option<OptLevel>, flag_emit: Option<Emit>);
#[derive(Deserialize, Debug)]
enum Emit { Asm, Ir, Bc, Obj, Link }
#[derive(Debug)]
enum OptLevel { Zero, One, Two, Three }
impl<'de> de::Deserialize<'de> for OptLevel {
fn deserialize<D>(deserializer: D) -> Result<OptLevel, D::Error>
where D: de::Deserializer<'de>
{
let level = match u8::deserialize(deserializer)? {
0 => OptLevel::Zero,
1 => OptLevel::One,
2 => OptLevel::Two,
3 => OptLevel::Three,
n => {
let value = de::Unexpected::Unsigned(n as u64);
let msg = "expected an integer between 0 and 3";
return Err(de::Error::invalid_value(value, &msg));
}
};
Ok(level)
}
}
fn main() {
let args: Args = Args::docopt().deserialize().unwrap_or_else(|e| e.exit());
println!("{:?}", args);
}
```
### Viewing the generated struct
Generating a struct is pretty magical, but if you want, you can look at it by
expanding all macros. Say you wrote the above example for `Usage: add <x> <y>`
into a file called `add.rs`. Then running:
```bash
rustc -L path/containing/docopt/lib -Z unstable-options --pretty=expanded add.rs
```
Will show all macros expanded. The `path/containing/docopt/lib` is usually
`target/debug/deps` or `target/release/deps` in a cargo project. In the generated code, you should be
able to find the generated struct:
```rust
struct Args {
pub arg_x: int,
pub arg_y: int,
}
```
### Traditional Docopt API
The reference implementation of Docopt returns a Python dictionary with names

View File

@ -1,11 +0,0 @@
--langdef=Rust
--langmap=Rust:.rs
--regex-Rust=/^[ \t]*(#\[[^\]]\][ \t]*)*(pub[ \t]+)?(extern[ \t]+)?("[^"]+"[ \t]+)?(unsafe[ \t]+)?fn[ \t]+([a-zA-Z0-9_]+)/\6/f,functions,function definitions/
--regex-Rust=/^[ \t]*(pub[ \t]+)?type[ \t]+([a-zA-Z0-9_]+)/\2/T,types,type definitions/
--regex-Rust=/^[ \t]*(pub[ \t]+)?enum[ \t]+([a-zA-Z0-9_]+)/\2/g,enum,enumeration names/
--regex-Rust=/^[ \t]*(pub[ \t]+)?struct[ \t]+([a-zA-Z0-9_]+)/\2/s,structure names/
--regex-Rust=/^[ \t]*(pub[ \t]+)?mod[ \t]+([a-zA-Z0-9_]+)/\2/m,modules,module names/
--regex-Rust=/^[ \t]*(pub[ \t]+)?static[ \t]+([a-zA-Z0-9_]+)/\2/c,consts,static constants/
--regex-Rust=/^[ \t]*(pub[ \t]+)?trait[ \t]+([a-zA-Z0-9_]+)/\2/t,traits,traits/
--regex-Rust=/^[ \t]*(pub[ \t]+)?impl([ \t\n]+<.*>)?[ \t]+([a-zA-Z0-9_]+)/\3/i,impls,trait implementations/
--regex-Rust=/^[ \t]*macro_rules![ \t]+([a-zA-Z0-9_]+)/\1/d,macros,macro definitions/

View File

@ -1,80 +0,0 @@
#!/usr/bin/env python2
from __future__ import absolute_import, division, print_function
import argparse
import json
import re
retests = re.compile('(.*?)"""(.*?)(r"""|\s*$)', re.DOTALL)
reinvokes = re.compile('(.+?$)(.+?)\s*(\$|\Z)', re.DOTALL | re.MULTILINE)
p = argparse.ArgumentParser(
description="Outputs src/test/testcases.rs to stdout")
p.add_argument("testcases", metavar="FILE",
help="The testcases.docopt language agnostic test suite.")
args = p.parse_args()
with open(args.testcases) as f:
alltests = f.read()
alltests = re.sub('^r"""', '', alltests)
alltests = re.sub('^\s*#.*$', '', alltests, flags=re.MULTILINE)
tests = [] # [{usage, args, expect}] (expect is None ==> user-error)
for m in retests.finditer(alltests):
usage, invokes = m.group(1).strip(), m.group(2).strip()
assert invokes.startswith('$'), 'Bad test: "%s"' % invokes
invokes = re.sub('^\$', '', invokes)
for mi in reinvokes.finditer(invokes):
invoke, expect = mi.group(1).strip(), mi.group(2).strip()
err = expect.startswith('"user-error"')
tests.append({
'usage': usage,
'args': invoke.split()[1:],
'expect': None if err else json.loads(expect),
})
def show_test(i, t):
def show_expect(e):
kvs = []
for k, v in e.iteritems():
kvs.append('("%s", %s)' % (k, show_value(v)))
return ', '.join(kvs)
def show_value(v):
if v is None:
return 'Plain(None)'
elif isinstance(v, basestring):
return 'Plain(Some("%s".to_string()))' % v
elif isinstance(v, bool):
return 'Switch(%s)' % ('true' if v else 'false')
elif isinstance(v, int):
return 'Counted(%d)' % v
elif isinstance(v, list):
elms = ', '.join(['"%s".to_string()' % el for el in v])
return 'List(vec!(%s))' % elms
else:
raise ValueError('Unrecognized value: "%s" (type: %s)'
% (v, type(v)))
args = ', '.join(['"%s"' % arg for arg in t['args']])
if t['expect'] is None:
return 'test_user_error!(test_%d_testcases, "%s", &[%s]);' \
% (i, t['usage'], args)
else:
expect = show_expect(t['expect'])
return 'test_expect!(test_%d_testcases, "%s", &[%s], vec!(%s));' \
% (i, t['usage'], args, expect)
print(
"""// !!! ATTENTION !!!
// This file is automatically generated by `scripts/mk-testcases`.
// Please do not edit this file directly!
use Value::{{Switch, Counted, Plain, List}};
use test::{{get_args, map_from_alist, same_args}};
{tests}
""".format(tests='\n\n'.join([show_test(i, t) for i, t in enumerate(tests)])))

View File

@ -1,3 +0,0 @@
au BufWritePost *.rs silent!make ctags > /dev/null 2>&1
" let g:syntastic_rust_rustc_fname = "src/lib.rs"
" let g:syntastic_rust_rustc_args = "--no-trans"

View File

@ -307,7 +307,6 @@ impl Docopt {
}
#[doc(hidden)]
// Exposed for use in `docopt_macros`.
pub fn parser(&self) -> &Parser {
&self.p
}

View File

@ -182,53 +182,6 @@
//! assert_eq!(args.flag_emit, Some(Emit::Ir));
//! # }
//! ```
//!
//! # The `docopt!` macro
//!
//! This package comes bundled with an additional crate, `docopt_macros`,
//! which provides a `docopt!` syntax extension. Its purpose is to automate
//! the creation of a Rust struct from a Docopt usage string. In particular,
//! this provides a single point of truth about the definition of command line
//! arguments in your program.
//!
//! Another advantage of using the macro is that errors in your Docopt usage
//! string will be caught at compile time. Stated differently, your program
//! will not compile with an invalid Docopt usage string.
//!
//! The example above using type based decoding can be simplified to this:
//!
//! ```ignore
//! #![feature(plugin)]
//! #![plugin(docopt_macros)]
//!
//! extern crate serde;
//!
//! extern crate docopt;
//!
//! // Write the Docopt usage string with the `docopt!` macro.
//! docopt!(Args, "
//! Usage: cp [-a] <source> <dest>
//! cp [-a] <source>... <dir>
//!
//! Options:
//! -a, --archive Copy everything.
//! ")
//!
//! fn main() {
//! let argv = || vec!["cp", "-a", "file1", "file2", "dest/"];
//!
//! // Your `Args` struct has a single static method defined on it,
//! // `docopt`, which will return a normal `Docopt` value.
//! let args: Args = Args::docopt().deserialize().unwrap_or_else(|e| e.exit());
//!
//! // Now access your argv values.
//! fn s(x: &str) -> String { x.to_string() }
//! assert!(args.flag_archive);
//! assert_eq!(args.arg_source, vec![s("file1"), s("file2")]);
//! assert_eq!(args.arg_dir, s("dest/"));
//! assert_eq!(args.arg_dest, s(""));
//! }
//! ```
#![crate_name = "docopt"]
#![doc(html_root_url = "http://burntsushi.net/rustdoc/docopt")]

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"13e445b6bc53bf1ea2379fd2ec33205daa9b1b74d5a41e4dd9ea8cb966185c5a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"4b02d7ebfb188b1f2cbef20ade3082197046ccaa89e49d2bcdef6102d48919e3","measurements.txt":"b209f98f2bc696904a48829e86952f4f09b59e4e685f7c12087c59d05ed31829","src/bitvec.rs":"c6c66c348776ff480b7ff6e4a3e0f64554a4194266f614408b45b5e3c324ec0a","src/lib.rs":"294aabf6fb846dbe35bba837d70ea9115f20cd808995a318c0fccb05f91d096f","src/snapshot_vec.rs":"abc649bb42dc8592741b02d53ba1ed5f6ad64710b971070872b0c42665d73c93","src/unify/backing_vec.rs":"7d57036ce671169893d069f94454f1c4b95104517ffd62859f180d80cbe490e5","src/unify/mod.rs":"9fc90951778be635fbbf4fba8b3a0a4eb21e2c955660f019377465ac773b9563","src/unify/tests.rs":"b18974faeebdf2c03e82035fe7281bf4db3360ab10ce34b1d3441547836b19f2"},"package":"88dc8393b3c7352f94092497f6b52019643e493b6b890eb417cdb7c46117e621"}
{"files":{"Cargo.toml":"479607f839ec311f5b48754953c3b33bd2d170d2bcb3008e904bef21ecad7a6d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"0621878e61f0d0fda054bcbe02df75192c28bde1ecc8289cbd86aeba2dd72720","README.md":"4b02d7ebfb188b1f2cbef20ade3082197046ccaa89e49d2bcdef6102d48919e3","measurements.txt":"b209f98f2bc696904a48829e86952f4f09b59e4e685f7c12087c59d05ed31829","src/bitvec.rs":"c6c66c348776ff480b7ff6e4a3e0f64554a4194266f614408b45b5e3c324ec0a","src/lib.rs":"294aabf6fb846dbe35bba837d70ea9115f20cd808995a318c0fccb05f91d096f","src/snapshot_vec.rs":"4935b5eb8292e3b62d662ca01d0baef3d6b341f5479811d837e872ebc3c8518f","src/unify/backing_vec.rs":"0bcc5cd9d7a8bf1fd17e87b6388eeb0f9e3c21ed280fa31ab5dcc4a1ee69fcca","src/unify/mod.rs":"1bed8bd5c8f804fb4c225ed309940ede74b05e58d64f6182ff1ea3895c18a930","src/unify/tests.rs":"b18974faeebdf2c03e82035fe7281bf4db3360ab10ce34b1d3441547836b19f2"},"package":"25b4e5febb25f08c49f1b07dc33a182729a6b21edfb562b5aef95f78e0dbe5bb"}

View File

@ -12,7 +12,7 @@
[package]
name = "ena"
version = "0.9.3"
version = "0.10.1"
authors = ["Niko Matsakis <niko@alum.mit.edu>"]
description = "Union-find, congruence closure, and other unification code. Based on code from rustc."
homepage = "https://github.com/nikomatsakis/ena"

View File

@ -75,13 +75,20 @@ pub trait SnapshotVecDelegate {
fn reverse(values: &mut Vec<Self::Value>, action: Self::Undo);
}
impl<D: SnapshotVecDelegate> SnapshotVec<D> {
pub fn new() -> SnapshotVec<D> {
// HACK(eddyb) manual impl avoids `Default` bound on `D`.
impl<D: SnapshotVecDelegate> Default for SnapshotVec<D> {
fn default() -> Self {
SnapshotVec {
values: Vec::new(),
undo_log: Vec::new(),
}
}
}
impl<D: SnapshotVecDelegate> SnapshotVec<D> {
pub fn new() -> Self {
Self::default()
}
pub fn with_capacity(c: usize) -> SnapshotVec<D> {
SnapshotVec {
@ -275,8 +282,12 @@ impl<D: SnapshotVecDelegate> Extend<D::Value> for SnapshotVec<D> {
where
T: IntoIterator<Item = D::Value>,
{
for item in iterable {
self.push(item);
let initial_len = self.values.len();
self.values.extend(iterable);
let final_len = self.values.len();
if self.in_snapshot() {
self.undo_log.extend((initial_len..final_len).map(|len| NewElem(len)));
}
}
}

View File

@ -7,18 +7,19 @@ use std::marker::PhantomData;
use super::{VarValue, UnifyKey, UnifyValue};
#[allow(dead_code)] // rustc BUG
type Key<S> = <S as UnificationStore>::Key;
#[allow(type_alias_bounds)]
type Key<S: UnificationStore> = <S as UnificationStore>::Key;
/// Largely internal trait implemented by the unification table
/// backing store types. The most common such type is `InPlace`,
/// which indicates a standard, mutable unification table.
pub trait UnificationStore: ops::Index<usize, Output = VarValue<Key<Self>>> + Clone {
pub trait UnificationStore:
ops::Index<usize, Output = VarValue<Key<Self>>> + Clone + Default
{
type Key: UnifyKey<Value = Self::Value>;
type Value: UnifyValue;
type Snapshot;
fn new() -> Self;
fn start_snapshot(&mut self) -> Self::Snapshot;
fn rollback_to(&mut self, snapshot: Self::Snapshot);
@ -51,16 +52,18 @@ pub struct InPlace<K: UnifyKey> {
values: sv::SnapshotVec<Delegate<K>>
}
// HACK(eddyb) manual impl avoids `Default` bound on `K`.
impl<K: UnifyKey> Default for InPlace<K> {
fn default() -> Self {
InPlace { values: sv::SnapshotVec::new() }
}
}
impl<K: UnifyKey> UnificationStore for InPlace<K> {
type Key = K;
type Value = K::Value;
type Snapshot = sv::Snapshot;
#[inline]
fn new() -> Self {
InPlace { values: sv::SnapshotVec::new() }
}
#[inline]
fn start_snapshot(&mut self) -> Self::Snapshot {
self.values.start_snapshot()
@ -132,17 +135,20 @@ pub struct Persistent<K: UnifyKey> {
values: DVec<VarValue<K>>
}
// HACK(eddyb) manual impl avoids `Default` bound on `K`.
#[cfg(feature = "persistent")]
impl<K: UnifyKey> Default for Persistent<K> {
fn default() -> Self {
Persistent { values: DVec::new() }
}
}
#[cfg(feature = "persistent")]
impl<K: UnifyKey> UnificationStore for Persistent<K> {
type Key = K;
type Value = K::Value;
type Snapshot = Self;
#[inline]
fn new() -> Self {
Persistent { values: DVec::new() }
}
#[inline]
fn start_snapshot(&mut self) -> Self::Snapshot {
self.clone()

View File

@ -174,18 +174,20 @@ pub struct VarValue<K: UnifyKey> { // FIXME pub
/// cloning the table is an O(1) operation.
/// - This implies that ordinary operations are quite a bit slower though.
/// - Requires the `persistent` feature be selected in your Cargo.toml file.
#[derive(Clone, Debug)]
#[derive(Clone, Debug, Default)]
pub struct UnificationTable<S: UnificationStore> {
/// Indicates the current value of each key.
values: S,
}
/// A unification table that uses an "in-place" vector.
pub type InPlaceUnificationTable<K> = UnificationTable<InPlace<K>>;
#[allow(type_alias_bounds)]
pub type InPlaceUnificationTable<K: UnifyKey> = UnificationTable<InPlace<K>>;
/// A unification table that uses a "persistent" vector.
#[cfg(feature = "persistent")]
pub type PersistentUnificationTable<K> = UnificationTable<Persistent<K>>;
#[allow(type_alias_bounds)]
pub type PersistentUnificationTable<K: UnifyKey> = UnificationTable<Persistent<K>>;
/// At any time, users may snapshot a unification table. The changes
/// made during the snapshot may either be *committed* or *rolled back*.
@ -237,9 +239,7 @@ impl<K: UnifyKey> VarValue<K> {
impl<S: UnificationStore> UnificationTable<S> {
pub fn new() -> Self {
UnificationTable {
values: S::new()
}
Self::default()
}
/// Starts a new snapshot. Each snapshot must be either

View File

@ -1 +0,0 @@
{"files":{"Cargo.toml":"87ff65d640c137c26d338f96e21e769af1e1b2e7fa615b40a1bcc755448bb118","LICENSE":"ad4fcfaf8d5b12b97409c137a03d4a4e4b21024c65c54f976cc3b609c1bd5b0f","README.md":"9a1a45416eac57050036b13df6ec84d21d555e820726af3c782896bd9d37d94b","rustfmt.toml":"2a298b4ce1fe6e16b8f281a0035567b8eb15042ed3062729fd28224f29c2f75a","src/arr.rs":"cc1ea0a9ef6a524b90767cc8a89f6b939394a2948a645ed313c0bf5ce5a258a4","src/hex.rs":"bfbf304fb4dea6f7edc0569b38bf2ac7657ce089c5761891321722509e3b5076","src/impl_serde.rs":"805885478728b3c205b842d46deb377b7dd6dd4c4c50254064431f49f0981a2a","src/impls.rs":"8c54e294a82a2bf344bdcb9949b8a84903fb65698d6b1b1e0ab9f5e7847be64f","src/iter.rs":"e52217f04d0dc046f13ef2e3539b90eabd4d55bb85cf40f76ba0bf86d5e55ef0","src/lib.rs":"da93fa505eee94b40fce0fe98e26ed3bb4d2bc4d4869af01598b6e54fc9c0f8d","tests/hex.rs":"e909bc0564e7d52c5fcf172dfc0fac7085010c6a21d38581bf73a54ab2e256e1","tests/import_name.rs":"1235729ecbde47fc9a38b3bf35c750a53ed55e3cf967c9d2b24fd759dc9e9e0c","tests/mod.rs":"f4100c5338906c038636f98f4d2b3d272f59580662afa89d915eafb96d7bbcf9"},"package":"ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"}

View File

@ -1,32 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "generic-array"
version = "0.9.0"
authors = ["Bartłomiej Kamiński <fizyk20@gmail.com>"]
description = "Generic types implementing functionality of arrays"
documentation = "http://fizyk20.github.io/generic-array/generic_array/"
license = "MIT"
repository = "https://github.com/fizyk20/generic-array.git"
[lib]
name = "generic_array"
[dependencies.typenum]
version = "1.9"
[dependencies.serde]
version = "1.0"
optional = true
default-features = false
[dev-dependencies.serde_json]
version = "1.0"

View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 Bartłomiej Kamiński
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,34 +0,0 @@
[![Crates.io](https://img.shields.io/crates/v/generic-array.svg)](https://crates.io/crates/generic-array)
[![Build Status](https://travis-ci.org/fizyk20/generic-array.svg?branch=master)](https://travis-ci.org/fizyk20/generic-array)
# generic-array
This crate implements generic array types for Rust.
[Documentation](http://fizyk20.github.io/generic-array/generic_array/)
## Usage
The Rust arrays `[T; N]` are problematic in that they can't be used generically with respect to `N`, so for example this won't work:
```rust
struct Foo<N> {
data: [i32; N]
}
```
**generic-array** defines a new trait `ArrayLength<T>` and a struct `GenericArray<T, N: ArrayLength<T>>`, which let the above be implemented as:
```rust
struct Foo<N: ArrayLength<i32>> {
data: GenericArray<i32, N>
}
```
To actually define a type implementing `ArrayLength`, you can use unsigned integer types defined in [typenum](https://github.com/paholg/typenum) crate - for example, `GenericArray<T, U5>` would work almost like `[T; 5]` :)
In version 0.1.1 an `arr!` macro was introduced, allowing for creation of arrays as shown below:
```rust
let array = arr![u32; 1, 2, 3];
assert_eq!(array[2], 3);
```

View File

@ -1,3 +0,0 @@
reorder_imports = true
reorder_imported_names = true
use_try_shorthand = true

View File

@ -1,57 +0,0 @@
//! Implementation for `arr!` macro.
use super::ArrayLength;
use core::ops::Add;
use typenum::U1;
/// Helper trait for `arr!` macro
pub trait AddLength<T, N: ArrayLength<T>>: ArrayLength<T> {
/// Resulting length
type Output: ArrayLength<T>;
}
impl<T, N1, N2> AddLength<T, N2> for N1
where
N1: ArrayLength<T> + Add<N2>,
N2: ArrayLength<T>,
<N1 as Add<N2>>::Output: ArrayLength<T>,
{
type Output = <N1 as Add<N2>>::Output;
}
/// Helper type for `arr!` macro
pub type Inc<T, U> = <U as AddLength<T, U1>>::Output;
#[doc(hidden)]
#[macro_export]
macro_rules! arr_impl {
($T:ty; $N:ty, [$($x:expr),*], []) => ({
unsafe { $crate::transmute::<_, $crate::GenericArray<$T, $N>>([$($x),*]) }
});
($T:ty; $N:ty, [], [$x1:expr]) => (
arr_impl!($T; $crate::arr::Inc<$T, $N>, [$x1 as $T], [])
);
($T:ty; $N:ty, [], [$x1:expr, $($x:expr),+]) => (
arr_impl!($T; $crate::arr::Inc<$T, $N>, [$x1 as $T], [$($x),*])
);
($T:ty; $N:ty, [$($y:expr),+], [$x1:expr]) => (
arr_impl!($T; $crate::arr::Inc<$T, $N>, [$($y),*, $x1 as $T], [])
);
($T:ty; $N:ty, [$($y:expr),+], [$x1:expr, $($x:expr),+]) => (
arr_impl!($T; $crate::arr::Inc<$T, $N>, [$($y),*, $x1 as $T], [$($x),*])
);
}
/// Macro allowing for easy generation of Generic Arrays.
/// Example: `let test = arr![u32; 1, 2, 3];`
#[macro_export]
macro_rules! arr {
($T:ty;) => ({
unsafe { $crate::transmute::<[$T; 0], $crate::GenericArray<$T, $crate::typenum::U0>>([]) }
});
($T:ty; $($x:expr),*) => (
arr_impl!($T; $crate::typenum::U0, [], [$($x),*])
);
($($x:expr,)+) => (arr![$($x),*]);
() => ("""Macro requires a type, e.g. `let array = arr![u32; 1, 2, 3];`")
}

View File

@ -1,101 +0,0 @@
//! Generic array are commonly used as a return value for hash digests, so
//! it's a good idea to allow to hexlify them easily. This module implements
//! `std::fmt::LowerHex` and `std::fmt::UpperHex` traits.
//!
//! Example:
//!
//! ```rust
//! # #[macro_use]
//! # extern crate generic_array;
//! # extern crate typenum;
//! # fn main() {
//! let array = arr![u8; 10, 20, 30];
//! assert_eq!(format!("{:x}", array), "0a141e");
//! # }
//! ```
//!
use {ArrayLength, GenericArray};
use core::fmt;
use core::ops::Add;
use core::str;
use typenum::*;
static LOWER_CHARS: &'static [u8] = b"0123456789abcdef";
static UPPER_CHARS: &'static [u8] = b"0123456789ABCDEF";
impl<T: ArrayLength<u8>> fmt::LowerHex for GenericArray<u8, T>
where
T: Add<T>,
<T as Add<T>>::Output: ArrayLength<u8>,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let max_digits = f.precision().unwrap_or_else(|| self.len());
if T::to_usize() < 1024 {
// For small arrays use a stack allocated
// buffer of 2x number of bytes
let mut res = GenericArray::<u8, Sum<T, T>>::default();
for (i, c) in self.iter().take(max_digits).enumerate() {
res[i * 2] = LOWER_CHARS[(c >> 4) as usize];
res[i * 2 + 1] = LOWER_CHARS[(c & 0xF) as usize];
}
f.write_str(
unsafe { str::from_utf8_unchecked(&res[..max_digits * 2]) },
)?;
} else {
// For large array use chunks of up to 1024 bytes (2048 hex chars)
let mut buf = [0u8; 2048];
for chunk in self[..max_digits].chunks(1024) {
for (i, c) in chunk.iter().enumerate() {
buf[i * 2] = LOWER_CHARS[(c >> 4) as usize];
buf[i * 2 + 1] = LOWER_CHARS[(c & 0xF) as usize];
}
f.write_str(unsafe {
str::from_utf8_unchecked(&buf[..chunk.len() * 2])
})?;
}
}
Ok(())
}
}
impl<T: ArrayLength<u8>> fmt::UpperHex for GenericArray<u8, T>
where
T: Add<T>,
<T as Add<T>>::Output: ArrayLength<u8>,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let max_digits = f.precision().unwrap_or_else(|| self.len());
if T::to_usize() < 1024 {
// For small arrays use a stack allocated
// buffer of 2x number of bytes
let mut res = GenericArray::<u8, Sum<T, T>>::default();
for (i, c) in self.iter().take(max_digits).enumerate() {
res[i * 2] = UPPER_CHARS[(c >> 4) as usize];
res[i * 2 + 1] = UPPER_CHARS[(c & 0xF) as usize];
}
f.write_str(
unsafe { str::from_utf8_unchecked(&res[..max_digits * 2]) },
)?;
} else {
// For large array use chunks of up to 1024 bytes (2048 hex chars)
let mut buf = [0u8; 2048];
for chunk in self[..max_digits].chunks(1024) {
for (i, c) in chunk.iter().enumerate() {
buf[i * 2] = UPPER_CHARS[(c >> 4) as usize];
buf[i * 2 + 1] = UPPER_CHARS[(c & 0xF) as usize];
}
f.write_str(unsafe {
str::from_utf8_unchecked(&buf[..chunk.len() * 2])
})?;
}
}
Ok(())
}
}

View File

@ -1,68 +0,0 @@
//! Serde serialization/deserialization implementation
use {ArrayLength, GenericArray};
use core::fmt;
use core::marker::PhantomData;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde::de::{self, SeqAccess, Visitor};
impl<T, N> Serialize for GenericArray<T, N>
where
T: Serialize,
N: ArrayLength<T>,
{
#[inline]
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.collect_seq(self.iter())
}
}
struct GAVisitor<T, N> {
_t: PhantomData<T>,
_n: PhantomData<N>,
}
impl<'de, T, N> Visitor<'de> for GAVisitor<T, N>
where
T: Deserialize<'de> + Default,
N: ArrayLength<T>,
{
type Value = GenericArray<T, N>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("struct GenericArray")
}
fn visit_seq<A>(self, mut seq: A) -> Result<GenericArray<T, N>, A::Error>
where
A: SeqAccess<'de>,
{
let mut result = GenericArray::default();
for i in 0..N::to_usize() {
result[i] = seq.next_element()?.ok_or_else(
|| de::Error::invalid_length(i, &self),
)?;
}
Ok(result)
}
}
impl<'de, T, N> Deserialize<'de> for GenericArray<T, N>
where
T: Deserialize<'de> + Default,
N: ArrayLength<T>,
{
fn deserialize<D>(deserializer: D) -> Result<GenericArray<T, N>, D::Error>
where
D: Deserializer<'de>,
{
let visitor = GAVisitor {
_t: PhantomData,
_n: PhantomData,
};
deserializer.deserialize_seq(visitor)
}
}

View File

@ -1,171 +0,0 @@
use super::{ArrayLength, GenericArray};
use core::borrow::{Borrow, BorrowMut};
use core::cmp::Ordering;
use core::fmt::{self, Debug};
use core::hash::{Hash, Hasher};
impl<T: Default, N> Default for GenericArray<T, N>
where
N: ArrayLength<T>,
{
#[inline]
fn default() -> Self {
Self::generate(|_| T::default())
}
}
impl<T: Clone, N> Clone for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn clone(&self) -> GenericArray<T, N> {
self.map_ref(|x| x.clone())
}
}
impl<T: Copy, N> Copy for GenericArray<T, N>
where
N: ArrayLength<T>,
N::ArrayType: Copy,
{
}
impl<T: PartialEq, N> PartialEq for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn eq(&self, other: &Self) -> bool {
**self == **other
}
}
impl<T: Eq, N> Eq for GenericArray<T, N>
where
N: ArrayLength<T>,
{
}
impl<T: PartialOrd, N> PartialOrd for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn partial_cmp(&self, other: &GenericArray<T, N>) -> Option<Ordering> {
PartialOrd::partial_cmp(self.as_slice(), other.as_slice())
}
}
impl<T: Ord, N> Ord for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn cmp(&self, other: &GenericArray<T, N>) -> Ordering {
Ord::cmp(self.as_slice(), other.as_slice())
}
}
impl<T: Debug, N> Debug for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
self[..].fmt(fmt)
}
}
impl<T, N> Borrow<[T]> for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn borrow(&self) -> &[T] {
&self[..]
}
}
impl<T, N> BorrowMut<[T]> for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn borrow_mut(&mut self) -> &mut [T] {
&mut self[..]
}
}
impl<T, N> AsRef<[T]> for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn as_ref(&self) -> &[T] {
&self[..]
}
}
impl<T, N> AsMut<[T]> for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn as_mut(&mut self) -> &mut [T] {
&mut self[..]
}
}
impl<T: Hash, N> Hash for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn hash<H>(&self, state: &mut H)
where
H: Hasher,
{
Hash::hash(&self[..], state)
}
}
macro_rules! impl_from {
($($n: expr => $ty: ty),*) => {
$(
impl<T> From<[T; $n]> for GenericArray<T, $ty> {
fn from(arr: [T; $n]) -> Self {
use core::mem::{forget, transmute_copy};
let x = unsafe { transmute_copy(&arr) };
forget(arr);
x
}
}
)*
}
}
impl_from! {
1 => ::typenum::U1,
2 => ::typenum::U2,
3 => ::typenum::U3,
4 => ::typenum::U4,
5 => ::typenum::U5,
6 => ::typenum::U6,
7 => ::typenum::U7,
8 => ::typenum::U8,
9 => ::typenum::U9,
10 => ::typenum::U10,
11 => ::typenum::U11,
12 => ::typenum::U12,
13 => ::typenum::U13,
14 => ::typenum::U14,
15 => ::typenum::U15,
16 => ::typenum::U16,
17 => ::typenum::U17,
18 => ::typenum::U18,
19 => ::typenum::U19,
20 => ::typenum::U20,
21 => ::typenum::U21,
22 => ::typenum::U22,
23 => ::typenum::U23,
24 => ::typenum::U24,
25 => ::typenum::U25,
26 => ::typenum::U26,
27 => ::typenum::U27,
28 => ::typenum::U28,
29 => ::typenum::U29,
30 => ::typenum::U30,
31 => ::typenum::U31,
32 => ::typenum::U32
}

View File

@ -1,117 +0,0 @@
//! `GenericArray` iterator implementation.
use super::{ArrayLength, GenericArray};
use core::{cmp, ptr};
use core::mem::ManuallyDrop;
/// An iterator that moves out of a `GenericArray`
pub struct GenericArrayIter<T, N: ArrayLength<T>> {
// Invariants: index <= index_back <= N
// Only values in array[index..index_back] are alive at any given time.
// Values from array[..index] and array[index_back..] are already moved/dropped.
array: ManuallyDrop<GenericArray<T, N>>,
index: usize,
index_back: usize,
}
impl<T, N> IntoIterator for GenericArray<T, N>
where
N: ArrayLength<T>,
{
type Item = T;
type IntoIter = GenericArrayIter<T, N>;
fn into_iter(self) -> Self::IntoIter {
GenericArrayIter {
array: ManuallyDrop::new(self),
index: 0,
index_back: N::to_usize(),
}
}
}
impl<T, N> Drop for GenericArrayIter<T, N>
where
N: ArrayLength<T>,
{
fn drop(&mut self) {
// Drop values that are still alive.
for p in &mut self.array[self.index..self.index_back] {
unsafe {
ptr::drop_in_place(p);
}
}
}
}
impl<T, N> Iterator for GenericArrayIter<T, N>
where
N: ArrayLength<T>,
{
type Item = T;
fn next(&mut self) -> Option<T> {
if self.len() > 0 {
unsafe {
let p = self.array.get_unchecked(self.index);
self.index += 1;
Some(ptr::read(p))
}
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
fn count(self) -> usize {
self.len()
}
fn nth(&mut self, n: usize) -> Option<T> {
// First consume values prior to the nth.
let ndrop = cmp::min(n, self.len());
for p in &mut self.array[self.index..self.index + ndrop] {
self.index += 1;
unsafe {
ptr::drop_in_place(p);
}
}
self.next()
}
fn last(mut self) -> Option<T> {
// Note, everything else will correctly drop first as `self` leaves scope.
self.next_back()
}
}
impl<T, N> DoubleEndedIterator for GenericArrayIter<T, N>
where
N: ArrayLength<T>,
{
fn next_back(&mut self) -> Option<T> {
if self.len() > 0 {
self.index_back -= 1;
unsafe {
let p = self.array.get_unchecked(self.index_back);
Some(ptr::read(p))
}
} else {
None
}
}
}
impl<T, N> ExactSizeIterator for GenericArrayIter<T, N>
where
N: ArrayLength<T>,
{
fn len(&self) -> usize {
self.index_back - self.index
}
}

View File

@ -1,464 +0,0 @@
//! This crate implements a structure that can be used as a generic array type.use
//! Core Rust array types `[T; N]` can't be used generically with
//! respect to `N`, so for example this:
//!
//! ```{should_fail}
//! struct Foo<T, N> {
//! data: [T; N]
//! }
//! ```
//!
//! won't work.
//!
//! **generic-array** exports a `GenericArray<T,N>` type, which lets
//! the above be implemented as:
//!
//! ```
//! # use generic_array::{ArrayLength, GenericArray};
//! struct Foo<T, N: ArrayLength<T>> {
//! data: GenericArray<T,N>
//! }
//! ```
//!
//! The `ArrayLength<T>` trait is implemented by default for
//! [unsigned integer types](../typenum/uint/index.html) from
//! [typenum](../typenum/index.html).
//!
//! For ease of use, an `arr!` macro is provided - example below:
//!
//! ```
//! # #[macro_use]
//! # extern crate generic_array;
//! # extern crate typenum;
//! # fn main() {
//! let array = arr![u32; 1, 2, 3];
//! assert_eq!(array[2], 3);
//! # }
//! ```
//#![deny(missing_docs)]
#![no_std]
pub extern crate typenum;
#[cfg(feature = "serde")]
extern crate serde;
mod hex;
mod impls;
#[cfg(feature = "serde")]
pub mod impl_serde;
use core::{mem, ptr, slice};
use core::marker::PhantomData;
use core::mem::ManuallyDrop;
pub use core::mem::transmute;
use core::ops::{Deref, DerefMut};
use typenum::bit::{B0, B1};
use typenum::uint::{UInt, UTerm, Unsigned};
#[cfg_attr(test, macro_use)]
pub mod arr;
pub mod iter;
pub use iter::GenericArrayIter;
/// Trait making `GenericArray` work, marking types to be used as length of an array
pub unsafe trait ArrayLength<T>: Unsigned {
/// Associated type representing the array type for the number
type ArrayType;
}
unsafe impl<T> ArrayLength<T> for UTerm {
#[doc(hidden)]
type ArrayType = ();
}
/// Internal type used to generate a struct of appropriate size
#[allow(dead_code)]
#[repr(C)]
#[doc(hidden)]
pub struct GenericArrayImplEven<T, U> {
parent1: U,
parent2: U,
_marker: PhantomData<T>,
}
impl<T: Clone, U: Clone> Clone for GenericArrayImplEven<T, U> {
fn clone(&self) -> GenericArrayImplEven<T, U> {
GenericArrayImplEven {
parent1: self.parent1.clone(),
parent2: self.parent2.clone(),
_marker: PhantomData,
}
}
}
impl<T: Copy, U: Copy> Copy for GenericArrayImplEven<T, U> {}
/// Internal type used to generate a struct of appropriate size
#[allow(dead_code)]
#[repr(C)]
#[doc(hidden)]
pub struct GenericArrayImplOdd<T, U> {
parent1: U,
parent2: U,
data: T,
}
impl<T: Clone, U: Clone> Clone for GenericArrayImplOdd<T, U> {
fn clone(&self) -> GenericArrayImplOdd<T, U> {
GenericArrayImplOdd {
parent1: self.parent1.clone(),
parent2: self.parent2.clone(),
data: self.data.clone(),
}
}
}
impl<T: Copy, U: Copy> Copy for GenericArrayImplOdd<T, U> {}
unsafe impl<T, N: ArrayLength<T>> ArrayLength<T> for UInt<N, B0> {
#[doc(hidden)]
type ArrayType = GenericArrayImplEven<T, N::ArrayType>;
}
unsafe impl<T, N: ArrayLength<T>> ArrayLength<T> for UInt<N, B1> {
#[doc(hidden)]
type ArrayType = GenericArrayImplOdd<T, N::ArrayType>;
}
/// Struct representing a generic array - `GenericArray<T, N>` works like [T; N]
#[allow(dead_code)]
pub struct GenericArray<T, U: ArrayLength<T>> {
data: U::ArrayType,
}
impl<T, N> Deref for GenericArray<T, N>
where
N: ArrayLength<T>,
{
type Target = [T];
fn deref(&self) -> &[T] {
unsafe { slice::from_raw_parts(self as *const Self as *const T, N::to_usize()) }
}
}
impl<T, N> DerefMut for GenericArray<T, N>
where
N: ArrayLength<T>,
{
fn deref_mut(&mut self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self as *mut Self as *mut T, N::to_usize()) }
}
}
struct ArrayBuilder<T, N: ArrayLength<T>> {
array: ManuallyDrop<GenericArray<T, N>>,
position: usize,
}
impl<T, N: ArrayLength<T>> ArrayBuilder<T, N> {
fn new() -> ArrayBuilder<T, N> {
ArrayBuilder {
array: ManuallyDrop::new(unsafe { mem::uninitialized() }),
position: 0,
}
}
fn into_inner(self) -> GenericArray<T, N> {
let array = unsafe { ptr::read(&self.array) };
mem::forget(self);
ManuallyDrop::into_inner(array)
}
}
impl<T, N: ArrayLength<T>> Drop for ArrayBuilder<T, N> {
fn drop(&mut self) {
for value in self.array.iter_mut().take(self.position) {
unsafe {
ptr::drop_in_place(value);
}
}
}
}
struct ArrayConsumer<T, N: ArrayLength<T>> {
array: ManuallyDrop<GenericArray<T, N>>,
position: usize,
}
impl<T, N: ArrayLength<T>> ArrayConsumer<T, N> {
fn new(array: GenericArray<T, N>) -> ArrayConsumer<T, N> {
ArrayConsumer {
array: ManuallyDrop::new(array),
position: 0,
}
}
}
impl<T, N: ArrayLength<T>> Drop for ArrayConsumer<T, N> {
fn drop(&mut self) {
for i in self.position..N::to_usize() {
unsafe {
ptr::drop_in_place(self.array.get_unchecked_mut(i));
}
}
}
}
impl<T, N> GenericArray<T, N>
where
N: ArrayLength<T>,
{
/// Initializes a new `GenericArray` instance using the given function.
///
/// If the generator function panics while initializing the array,
/// any already initialized elements will be dropped.
pub fn generate<F>(f: F) -> GenericArray<T, N>
where
F: Fn(usize) -> T,
{
let mut destination = ArrayBuilder::new();
for (i, dst) in destination.array.iter_mut().enumerate() {
unsafe {
ptr::write(dst, f(i));
}
destination.position += 1;
}
destination.into_inner()
}
/// Map a function over a slice to a `GenericArray`.
///
/// The length of the slice *must* be equal to the length of the array.
#[inline]
pub fn map_slice<S, F: Fn(&S) -> T>(s: &[S], f: F) -> GenericArray<T, N> {
assert_eq!(s.len(), N::to_usize());
Self::generate(|i| f(unsafe { s.get_unchecked(i) }))
}
/// Maps a `GenericArray` to another `GenericArray`.
///
/// If the mapping function panics, any already initialized elements in the new array
/// will be dropped, AND any unused elements in the source array will also be dropped.
pub fn map<U, F>(self, f: F) -> GenericArray<U, N>
where
F: Fn(T) -> U,
N: ArrayLength<U>,
{
let mut source = ArrayConsumer::new(self);
let mut destination = ArrayBuilder::new();
for (dst, src) in destination.array.iter_mut().zip(source.array.iter()) {
unsafe {
ptr::write(dst, f(ptr::read(src)));
}
source.position += 1;
destination.position += 1;
}
destination.into_inner()
}
/// Maps a `GenericArray` to another `GenericArray` by reference.
///
/// If the mapping function panics, any already initialized elements will be dropped.
#[inline]
pub fn map_ref<U, F>(&self, f: F) -> GenericArray<U, N>
where
F: Fn(&T) -> U,
N: ArrayLength<U>,
{
GenericArray::generate(|i| f(unsafe { self.get_unchecked(i) }))
}
/// Combines two `GenericArray` instances and iterates through both of them,
/// initializing a new `GenericArray` with the result of the zipped mapping function.
///
/// If the mapping function panics, any already initialized elements in the new array
/// will be dropped, AND any unused elements in the source arrays will also be dropped.
pub fn zip<B, U, F>(self, rhs: GenericArray<B, N>, f: F) -> GenericArray<U, N>
where
F: Fn(T, B) -> U,
N: ArrayLength<B> + ArrayLength<U>,
{
let mut left = ArrayConsumer::new(self);
let mut right = ArrayConsumer::new(rhs);
let mut destination = ArrayBuilder::new();
for (dst, (lhs, rhs)) in
destination.array.iter_mut().zip(left.array.iter().zip(
right.array.iter(),
))
{
unsafe {
ptr::write(dst, f(ptr::read(lhs), ptr::read(rhs)));
}
destination.position += 1;
left.position += 1;
right.position += 1;
}
destination.into_inner()
}
/// Combines two `GenericArray` instances and iterates through both of them by reference,
/// initializing a new `GenericArray` with the result of the zipped mapping function.
///
/// If the mapping function panics, any already initialized elements will be dropped.
pub fn zip_ref<B, U, F>(&self, rhs: &GenericArray<B, N>, f: F) -> GenericArray<U, N>
where
F: Fn(&T, &B) -> U,
N: ArrayLength<B> + ArrayLength<U>,
{
GenericArray::generate(|i| unsafe {
f(self.get_unchecked(i), rhs.get_unchecked(i))
})
}
/// Extracts a slice containing the entire array.
#[inline]
pub fn as_slice(&self) -> &[T] {
self.deref()
}
/// Extracts a mutable slice containing the entire array.
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [T] {
self.deref_mut()
}
/// Converts slice to a generic array reference with inferred length;
///
/// Length of the slice must be equal to the length of the array.
#[inline]
pub fn from_slice(slice: &[T]) -> &GenericArray<T, N> {
assert_eq!(slice.len(), N::to_usize());
unsafe { &*(slice.as_ptr() as *const GenericArray<T, N>) }
}
/// Converts mutable slice to a mutable generic array reference
///
/// Length of the slice must be equal to the length of the array.
#[inline]
pub fn from_mut_slice(slice: &mut [T]) -> &mut GenericArray<T, N> {
assert_eq!(slice.len(), N::to_usize());
unsafe { &mut *(slice.as_mut_ptr() as *mut GenericArray<T, N>) }
}
}
impl<T: Clone, N> GenericArray<T, N>
where
N: ArrayLength<T>,
{
/// Construct a `GenericArray` from a slice by cloning its content
///
/// Length of the slice must be equal to the length of the array
#[inline]
pub fn clone_from_slice(list: &[T]) -> GenericArray<T, N> {
Self::from_exact_iter(list.iter().cloned()).expect(
"Slice must be the same length as the array",
)
}
}
impl<T, N> GenericArray<T, N>
where
N: ArrayLength<T>,
{
pub fn from_exact_iter<I>(iter: I) -> Option<Self>
where
I: IntoIterator<Item = T>,
<I as IntoIterator>::IntoIter: ExactSizeIterator,
{
let iter = iter.into_iter();
if iter.len() == N::to_usize() {
let mut destination = ArrayBuilder::new();
for (dst, src) in destination.array.iter_mut().zip(iter.into_iter()) {
unsafe {
ptr::write(dst, src);
}
destination.position += 1;
}
let array = unsafe { ptr::read(&destination.array) };
mem::forget(destination);
Some(ManuallyDrop::into_inner(array))
} else {
None
}
}
}
impl<T, N> ::core::iter::FromIterator<T> for GenericArray<T, N>
where
N: ArrayLength<T>,
T: Default,
{
fn from_iter<I>(iter: I) -> GenericArray<T, N>
where
I: IntoIterator<Item = T>,
{
let mut destination = ArrayBuilder::new();
let defaults = ::core::iter::repeat(()).map(|_| T::default());
for (dst, src) in destination.array.iter_mut().zip(
iter.into_iter().chain(defaults),
)
{
unsafe {
ptr::write(dst, src);
}
}
destination.into_inner()
}
}
#[cfg(test)]
mod test {
// Compile with:
// cargo rustc --lib --profile test --release --
// -C target-cpu=native -C opt-level=3 --emit asm
// and view the assembly to make sure test_assembly generates
// SIMD instructions instead of a niave loop.
#[inline(never)]
pub fn black_box<T>(val: T) -> T {
use core::{mem, ptr};
let ret = unsafe { ptr::read_volatile(&val) };
mem::forget(val);
ret
}
#[test]
fn test_assembly() {
let a = black_box(arr![i32; 1, 3, 5, 7]);
let b = black_box(arr![i32; 2, 4, 6, 8]);
let c = a.zip_ref(&b, |l, r| l + r);
assert_eq!(c, arr![i32; 3, 7, 11, 15]);
}
}

View File

@ -1,44 +0,0 @@
#[macro_use]
extern crate generic_array;
extern crate typenum;
use generic_array::GenericArray;
use std::str::from_utf8;
use typenum::U2048;
#[test]
fn short_lower_hex() {
let ar = arr![u8; 10, 20, 30];
assert_eq!(format!("{:x}", ar), "0a141e");
}
#[test]
fn short_upper_hex() {
let ar = arr![u8; 30, 20, 10];
assert_eq!(format!("{:X}", ar), "1E140A");
}
#[test]
fn long_lower_hex() {
let ar = GenericArray::<u8, U2048>::default();
assert_eq!(format!("{:x}", ar), from_utf8(&[b'0'; 4096]).unwrap());
}
#[test]
fn long_upper_hex() {
let ar = GenericArray::<u8, U2048>::default();
assert_eq!(format!("{:X}", ar), from_utf8(&[b'0'; 4096]).unwrap());
}
#[test]
fn truncated_lower_hex() {
let ar = arr![u8; 10, 20, 30, 40, 50];
assert_eq!(format!("{:.2x}", ar), "0a14");
}
#[test]
fn truncated_upper_hex() {
let ar = arr![u8; 30, 20, 10, 17, 0];
assert_eq!(format!("{:.4X}", ar), "1E140A11");
}

View File

@ -1,10 +0,0 @@
#[macro_use]
extern crate generic_array as gen_arr;
use gen_arr::typenum;
#[test]
fn test_different_crate_name() {
let _: gen_arr::GenericArray<u32, typenum::U4> = arr![u32; 0, 1, 2, 3];
let _: gen_arr::GenericArray<u32, typenum::U0> = arr![u32;];
}

View File

@ -1,169 +0,0 @@
#![recursion_limit="128"]
#![no_std]
#[macro_use]
extern crate generic_array;
use core::cell::Cell;
use core::ops::Drop;
use generic_array::GenericArray;
use generic_array::typenum::{U1, U3, U4, U97};
#[test]
fn test() {
let mut list97 = [0; 97];
for i in 0..97 {
list97[i] = i as i32;
}
let l: GenericArray<i32, U97> = GenericArray::clone_from_slice(&list97);
assert_eq!(l[0], 0);
assert_eq!(l[1], 1);
assert_eq!(l[32], 32);
assert_eq!(l[56], 56);
}
#[test]
fn test_drop() {
#[derive(Clone)]
struct TestDrop<'a>(&'a Cell<u32>);
impl<'a> Drop for TestDrop<'a> {
fn drop(&mut self) {
self.0.set(self.0.get() + 1);
}
}
let drop_counter = Cell::new(0);
{
let _: GenericArray<TestDrop, U3> =
arr![TestDrop; TestDrop(&drop_counter),
TestDrop(&drop_counter),
TestDrop(&drop_counter)];
}
assert_eq!(drop_counter.get(), 3);
}
#[test]
fn test_arr() {
let test: GenericArray<u32, U3> = arr![u32; 1, 2, 3];
assert_eq!(test[1], 2);
}
#[test]
fn test_copy() {
let test = arr![u32; 1, 2, 3];
let test2 = test;
// if GenericArray is not copy, this should fail as a use of a moved value
assert_eq!(test[1], 2);
assert_eq!(test2[0], 1);
}
#[test]
fn test_iter_flat_map() {
assert!((0..5).flat_map(|i| arr![i32; 2 * i, 2 * i + 1]).eq(0..10));
}
#[derive(Debug, PartialEq, Eq)]
struct NoClone<T>(T);
#[test]
fn test_from_slice() {
let arr = [1, 2, 3, 4];
let gen_arr = GenericArray::<_, U3>::from_slice(&arr[..3]);
assert_eq!(&arr[..3], gen_arr.as_slice());
let arr = [NoClone(1u32), NoClone(2), NoClone(3), NoClone(4)];
let gen_arr = GenericArray::<_, U3>::from_slice(&arr[..3]);
assert_eq!(&arr[..3], gen_arr.as_slice());
}
#[test]
fn test_from_mut_slice() {
let mut arr = [1, 2, 3, 4];
{
let gen_arr = GenericArray::<_, U3>::from_mut_slice(&mut arr[..3]);
gen_arr[2] = 10;
}
assert_eq!(arr, [1, 2, 10, 4]);
let mut arr = [NoClone(1u32), NoClone(2), NoClone(3), NoClone(4)];
{
let gen_arr = GenericArray::<_, U3>::from_mut_slice(&mut arr[..3]);
gen_arr[2] = NoClone(10);
}
assert_eq!(arr, [NoClone(1), NoClone(2), NoClone(10), NoClone(4)]);
}
#[test]
fn test_default() {
let arr = GenericArray::<u8, U1>::default();
assert_eq!(arr[0], 0);
}
#[test]
fn test_from() {
let data = [(1, 2, 3), (4, 5, 6), (7, 8, 9)];
let garray: GenericArray<(usize, usize, usize), U3> = data.into();
assert_eq!(&data, garray.as_slice());
}
#[test]
fn test_unit_macro() {
let arr = arr![f32; 3.14];
assert_eq!(arr[0], 3.14);
}
#[test]
fn test_empty_macro() {
let _arr = arr![f32;];
}
#[test]
fn test_cmp() {
arr![u8; 0x00].cmp(&arr![u8; 0x00]);
}
/// This test should cause a helpful compile error if uncommented.
// #[test]
// fn test_empty_macro2(){
// let arr = arr![];
// }
#[cfg(feature = "serde")]
mod impl_serde {
extern crate serde_json;
use generic_array::GenericArray;
use generic_array::typenum::U6;
#[test]
fn test_serde_implementation() {
let array: GenericArray<f64, U6> = arr![f64; 0.0, 5.0, 3.0, 7.07192, 76.0, -9.0];
let string = serde_json::to_string(&array).unwrap();
assert_eq!(string, "[0.0,5.0,3.0,7.07192,76.0,-9.0]");
let test_array: GenericArray<f64, U6> = serde_json::from_str(&string).unwrap();
assert_eq!(test_array, array);
}
}
#[test]
fn test_map() {
let b: GenericArray<i32, U4> = GenericArray::generate(|i| i as i32 * 4).map(|x| x - 3);
assert_eq!(b, arr![i32; -3, 1, 5, 9]);
}
#[test]
fn test_zip() {
let a: GenericArray<_, U4> = GenericArray::generate(|i| i + 1);
let b: GenericArray<_, U4> = GenericArray::generate(|i| i as i32 * 4);
let c = a.zip(b, |r, l| r as i32 + l);
assert_eq!(c, arr![i32; 1, 6, 11, 16]);
}
#[test]
fn test_from_iter() {
use core::iter::repeat;
let a: GenericArray<_, U4> = repeat(11).take(3).collect();
assert_eq!(a, arr![i32; 11, 11, 11, 0]);
}

0
third_party/rust/gleam/COPYING vendored Normal file → Executable file
View File

0
third_party/rust/gleam/LICENSE-APACHE vendored Normal file → Executable file
View File

0
third_party/rust/gleam/LICENSE-MIT vendored Normal file → Executable file
View File

0
third_party/rust/gleam/README.md vendored Normal file → Executable file
View File

0
third_party/rust/gleam/build.rs vendored Normal file → Executable file
View File

0
third_party/rust/gleam/rustfmt.toml vendored Normal file → Executable file
View File

0
third_party/rust/gleam/src/gl.rs vendored Normal file → Executable file
View File

0
third_party/rust/gleam/src/gl_fns.rs vendored Normal file → Executable file
View File

0
third_party/rust/gleam/src/gles_fns.rs vendored Normal file → Executable file
View File

0
third_party/rust/gleam/src/lib.rs vendored Normal file → Executable file
View File

File diff suppressed because one or more lines are too long

View File

@ -1,65 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g. crates.io) dependencies
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "lalrpop-snap"
version = "0.16.0"
authors = ["Niko Matsakis <niko@alum.mit.edu>"]
description = "convenient LR(1) parser generator"
readme = "../README.md"
keywords = ["parser", "generator", "LR", "yacc", "grammar"]
categories = ["parsing"]
license = "Apache-2.0/MIT"
repository = "https://github.com/lalrpop/lalrpop"
[lib]
doctest = false
[dependencies.ascii-canvas]
version = "1.0"
[dependencies.atty]
version = "0.2"
[dependencies.bit-set]
version = "0.5.0"
[dependencies.diff]
version = "0.1.9"
[dependencies.ena]
version = "0.9"
[dependencies.itertools]
version = "0.7"
[dependencies.lalrpop-util]
version = "0.16.0"
[dependencies.petgraph]
version = "0.4.13"
[dependencies.regex]
version = "1"
[dependencies.regex-syntax]
version = "0.4.0"
[dependencies.string_cache]
version = "0.7.1"
[dependencies.term]
version = "0.4.5"
[dependencies.unicode-xid]
version = "0.1"
[dev-dependencies.rand]
version = "0.4"

View File

@ -1,182 +0,0 @@
use build;
use log::Level;
use session::{ColorConfig, Session};
use std::default::Default;
use std::env;
use std::env::current_dir;
use std::error::Error;
use std::path::{Path, PathBuf};
use std::rc::Rc;
/// Configure various aspects of how LALRPOP works.
/// Intended for use within a `build.rs` script.
/// To get the default configuration, use `Configuration::new`.
#[derive(Clone, Default)]
pub struct Configuration {
session: Session,
}
impl Configuration {
/// Creates the default configuration; equivalent to `Configuration::default`.
pub fn new() -> Configuration {
Configuration::default()
}
/// Always use ANSI colors in output, even if output does not appear to be a TTY.
pub fn always_use_colors(&mut self) -> &mut Configuration {
self.session.color_config = ColorConfig::Yes;
self
}
/// Never use ANSI colors in output, even if output appears to be a TTY.
pub fn never_use_colors(&mut self) -> &mut Configuration {
self.session.color_config = ColorConfig::No;
self
}
/// Use ANSI colors in output if output appears to be a TTY, but
/// not otherwise. This is the default.
pub fn use_colors_if_tty(&mut self) -> &mut Configuration {
self.session.color_config = ColorConfig::IfTty;
self
}
/// Specify a custom directory to search for input files. This
/// directory is recursively searched for `.lalrpop` files to be
/// considered as input files. This configuration setting also
/// impacts where output files are placed; paths are made relative
/// to the input path before being resolved relative to the output
/// path. By default, the input directory is the current working
/// directory.
pub fn set_in_dir<P>(&mut self, dir: P) -> &mut Self
where
P: Into<PathBuf>,
{
self.session.in_dir = Some(dir.into());
self
}
/// Specify a custom directory to use when writing output files.
/// By default, the output directory is the same as the input
/// directory.
pub fn set_out_dir<P>(&mut self, dir: P) -> &mut Self
where
P: Into<PathBuf>,
{
self.session.out_dir = Some(dir.into());
self
}
/// Apply `cargo` directory location conventions, by setting the
/// input directory to `src` and the output directory to
/// `$OUT_DIR`.
pub fn use_cargo_dir_conventions(&mut self) -> &mut Self {
self.set_in_dir("src")
.set_out_dir(env::var("OUT_DIR").unwrap());
self
}
/// If true, always convert `.lalrpop` files into `.rs` files, even if the
/// `.rs` file is newer. Default is false.
pub fn force_build(&mut self, val: bool) -> &mut Configuration {
self.session.force_build = val;
self
}
/// If true, emit comments into the generated code. This makes the
/// generated code significantly larger. Default is false.
pub fn emit_comments(&mut self, val: bool) -> &mut Configuration {
self.session.emit_comments = val;
self
}
/// If true, emit report file about generated code.
pub fn emit_report(&mut self, val: bool) -> &mut Configuration {
self.session.emit_report = val;
self
}
/// Minimal logs: only for errors that halt progress.
pub fn log_quiet(&mut self) -> &mut Configuration {
self.session.log.set_level(Level::Taciturn);
self
}
/// Informative logs: give some high-level indications of
/// progress (default).
pub fn log_info(&mut self) -> &mut Configuration {
self.session.log.set_level(Level::Informative);
self
}
/// Verbose logs: more than info, but still not overwhelming.
pub fn log_verbose(&mut self) -> &mut Configuration {
self.session.log.set_level(Level::Verbose);
self
}
/// Debug logs: better redirect this to a file. Intended for
/// debugging LALRPOP itself.
pub fn log_debug(&mut self) -> &mut Configuration {
self.session.log.set_level(Level::Debug);
self
}
/// Enables "unit-testing" configuration. This is only for
/// lalrpop-test.
#[doc(hidden)]
pub fn unit_test(&mut self) -> &mut Configuration {
self.session.unit_test = true;
self
}
/// Process all files according to the `set_in_dir` and
/// `set_out_dir` configuration.
pub fn process(&self) -> Result<(), Box<Error>> {
let root = if let Some(ref d) = self.session.in_dir {
d.as_path()
} else {
Path::new(".")
};
self.process_dir(root)
}
/// Process all files in the current directory, which -- unless you
/// have changed it -- is typically the root of the crate being compiled.
pub fn process_current_dir(&self) -> Result<(), Box<Error>> {
self.process_dir(try!(current_dir()))
}
/// Process all `.lalrpop` files in `path`.
pub fn process_dir<P: AsRef<Path>>(&self, path: P) -> Result<(), Box<Error>> {
let session = Rc::new(self.session.clone());
try!(build::process_dir(session, path));
Ok(())
}
/// Process the given `.lalrpop` file.
pub fn process_file<P: AsRef<Path>>(&self, path: P) -> Result<(), Box<Error>> {
let session = Rc::new(self.session.clone());
try!(build::process_file(session, path));
Ok(())
}
}
/// Process all files in the current directory, which -- unless you
/// have changed it -- is typically the root of the crate being compiled.
///
/// Equivalent to `Configuration::new().process_current_dir()`.
pub fn process_root() -> Result<(), Box<Error>> {
Configuration::new().process_current_dir()
}
/// Deprecated in favor of `Configuration`. Try:
///
/// ```rust
/// Configuration::new().force_build(true).process_current_dir()
/// ```
///
/// instead.
pub fn process_root_unconditionally() -> Result<(), Box<Error>> {
Configuration::new().force_build(true).process_current_dir()
}

View File

@ -1,422 +0,0 @@
//! Code for generating action code.
//!
//! From the outside, action fns have one of two forms. If they take
//! symbols as input, e.g. from a production like `X = Y Z => ...`
//! (which takes Y and Z as input), they have this form:
//!
//! ```
//! fn __action17<
//! 'input, // user-declared type parameters (*)
//! >(
//! input: &'input str, // user-declared parameters
//! __0: (usize, usize, usize), // symbols being reduced, if any
//! ...
//! __N: (usize, Foo, usize), // each has a type (L, T, L)
//! ) -> Box<Expr<'input>>
//! ```
//!
//! Otherwise, they have this form:
//!
//! ```
//! fn __action17<
//! 'input, // user-declared type parameters (*)
//! >(
//! input: &'input str, // user-declared parameters
//! __lookbehind: &usize, // value for @R -- "end of previous token"
//! __lookahead: &usize, // value for @L -- "start of next token"
//! ) -> Box<Expr<'input>>
//! ```
//!
//! * -- in this case, those "user-declared" parameters are inserted by
//! the "internal tokenizer".
use grammar::repr as r;
use rust::RustWrite;
use std::io::{self, Write};
pub fn emit_action_code<W: Write>(grammar: &r::Grammar, rust: &mut RustWrite<W>) -> io::Result<()> {
for (i, defn) in grammar.action_fn_defns.iter().enumerate() {
rust!(rust, "");
// we always thread the parameters through to the action code,
// even if they are not used, and hence we need to disable the
// unused variables lint, which otherwise gets very excited.
if !grammar.parameters.is_empty() {
rust!(rust, "#[allow(unused_variables)]");
}
match defn.kind {
r::ActionFnDefnKind::User(ref data) => {
try!(emit_user_action_code(grammar, rust, i, defn, data))
}
r::ActionFnDefnKind::Lookaround(ref variant) => {
try!(emit_lookaround_action_code(grammar, rust, i, defn, variant))
}
r::ActionFnDefnKind::Inline(ref data) => {
try!(emit_inline_action_code(grammar, rust, i, defn, data))
}
}
}
Ok(())
}
fn ret_type_string(grammar: &r::Grammar, defn: &r::ActionFnDefn) -> String {
if defn.fallible {
format!(
"Result<{},{}lalrpop_util::ParseError<{},{},{}>>",
defn.ret_type,
grammar.prefix,
grammar.types.terminal_loc_type(),
grammar.types.terminal_token_type(),
grammar.types.error_type()
)
} else {
format!("{}", defn.ret_type)
}
}
fn emit_user_action_code<W: Write>(
grammar: &r::Grammar,
rust: &mut RustWrite<W>,
index: usize,
defn: &r::ActionFnDefn,
data: &r::UserActionFnDefn,
) -> io::Result<()> {
let ret_type = ret_type_string(grammar, defn);
// For each symbol to be reduced, we will receive
// a (L, T, L) triple where the Ls are locations and
// the T is the data. Ignore the locations and bind
// the data to the name the user gave.
let mut arguments: Vec<String> = data.arg_patterns
.iter()
.zip(
data.arg_types
.iter()
.cloned()
.map(|t| grammar.types.spanned_type(t)),
)
.map(|(p, t)| format!("(_, {}, _): {}", p, t))
.collect();
// If this is a reduce of an empty production, we will
// automatically add position information in the form of
// lookbehind/lookahead values. Otherwise, those values would be
// determined from the arguments themselves.
if data.arg_patterns.is_empty() {
arguments.extend(vec![
format!(
"{}lookbehind: &{}",
grammar.prefix,
grammar.types.terminal_loc_type()
),
format!(
"{}lookahead: &{}",
grammar.prefix,
grammar.types.terminal_loc_type()
),
]);
}
try!(rust.write_fn_header(
grammar,
&r::Visibility::Priv,
format!("{}action{}", grammar.prefix, index),
vec![],
None,
arguments,
ret_type,
vec![]
));
rust!(rust, "{{");
rust!(rust, "{}", data.code);
rust!(rust, "}}");
Ok(())
}
fn emit_lookaround_action_code<W: Write>(
grammar: &r::Grammar,
rust: &mut RustWrite<W>,
index: usize,
_defn: &r::ActionFnDefn,
data: &r::LookaroundActionFnDefn,
) -> io::Result<()> {
try!(rust.write_fn_header(
grammar,
&r::Visibility::Priv,
format!("{}action{}", grammar.prefix, index),
vec![],
None,
vec![
format!(
"{}lookbehind: &{}",
grammar.prefix,
grammar.types.terminal_loc_type()
),
format!(
"{}lookahead: &{}",
grammar.prefix,
grammar.types.terminal_loc_type()
),
],
format!("{}", grammar.types.terminal_loc_type()),
vec![]
));
rust!(rust, "{{");
match *data {
r::LookaroundActionFnDefn::Lookahead => {
// take the lookahead, if any; otherwise, we are
// at EOF, so taker the lookbehind (end of last
// pushed token); if that is missing too, then
// supply default.
rust!(rust, "{}lookahead.clone()", grammar.prefix);
}
r::LookaroundActionFnDefn::Lookbehind => {
// take lookbehind or supply default
rust!(rust, "{}lookbehind.clone()", grammar.prefix);
}
}
rust!(rust, "}}");
Ok(())
}
fn emit_inline_action_code<W: Write>(
grammar: &r::Grammar,
rust: &mut RustWrite<W>,
index: usize,
defn: &r::ActionFnDefn,
data: &r::InlineActionFnDefn,
) -> io::Result<()> {
let ret_type = ret_type_string(grammar, defn);
let arg_types: Vec<_> = data.symbols
.iter()
.flat_map(|sym| match *sym {
r::InlinedSymbol::Original(ref s) => vec![s.clone()],
r::InlinedSymbol::Inlined(_, ref syms) => syms.clone(),
})
.map(|s| s.ty(&grammar.types))
.collect();
// this is the number of symbols we expect to be passed in; it is
// distinct from data.symbols.len(), because sometimes we have
// inlined actions with no input symbols
let num_flat_args = arg_types.len();
let mut arguments: Vec<_> = arg_types
.iter()
.map(|&t| grammar.types.spanned_type(t.clone()))
.enumerate()
.map(|(i, t)| format!("{}{}: {}", grammar.prefix, i, t))
.collect();
// If no symbols are being reduced, add in the
// lookbehind/lookahead.
if arguments.len() == 0 {
arguments.extend(vec![
format!(
"{}lookbehind: &{}",
grammar.prefix,
grammar.types.terminal_loc_type()
),
format!(
"{}lookahead: &{}",
grammar.prefix,
grammar.types.terminal_loc_type()
),
]);
}
try!(rust.write_fn_header(
grammar,
&r::Visibility::Priv,
format!("{}action{}", grammar.prefix, index),
vec![],
None,
arguments,
ret_type,
vec![]
));
rust!(rust, "{{");
// For each inlined thing, compute the start/end locations.
// Do this first so that none of the arguments have been moved
// yet and we can easily access their locations.
let mut arg_counter = 0;
let mut temp_counter = 0;
for symbol in &data.symbols {
match *symbol {
r::InlinedSymbol::Original(_) => {
arg_counter += 1;
}
r::InlinedSymbol::Inlined(_, ref syms) => {
if syms.len() > 0 {
// If we are reducing symbols, then start and end
// can be the start/end location of the first/last
// symbol respectively. Easy peezy.
rust!(
rust,
"let {}start{} = {}{}.0.clone();",
grammar.prefix,
temp_counter,
grammar.prefix,
arg_counter
);
let last_arg_index = arg_counter + syms.len() - 1;
rust!(
rust,
"let {}end{} = {}{}.2.clone();",
grammar.prefix,
temp_counter,
grammar.prefix,
last_arg_index
);
} else {
// If we have no symbols, then `arg_counter`
// represents index of the first symbol after this
// inlined item (if any), and `arg_counter-1`
// represents index of the symbol before this
// item.
if arg_counter > 0 {
rust!(
rust,
"let {}start{} = {}{}.2.clone();",
grammar.prefix,
temp_counter,
grammar.prefix,
arg_counter - 1
);
} else if num_flat_args > 0 {
rust!(
rust,
"let {}start{} = {}{}.0.clone();",
grammar.prefix,
temp_counter,
grammar.prefix,
arg_counter
);
} else {
rust!(
rust,
"let {}start{} = {}lookbehind.clone();",
grammar.prefix,
temp_counter,
grammar.prefix
);
}
if arg_counter < num_flat_args {
rust!(
rust,
"let {}end{} = {}{}.0.clone();",
grammar.prefix,
temp_counter,
grammar.prefix,
arg_counter
);
} else if num_flat_args > 0 {
rust!(
rust,
"let {}end{} = {}{}.2.clone();",
grammar.prefix,
temp_counter,
grammar.prefix,
num_flat_args - 1
);
} else {
rust!(
rust,
"let {}end{} = {}lookahead.clone();",
grammar.prefix,
temp_counter,
grammar.prefix
);
}
}
temp_counter += 1;
arg_counter += syms.len();
}
}
}
// Now create temporaries for the inlined things
let mut arg_counter = 0;
let mut temp_counter = 0;
for symbol in &data.symbols {
match *symbol {
r::InlinedSymbol::Original(_) => {
arg_counter += 1;
}
r::InlinedSymbol::Inlined(inlined_action, ref syms) => {
// execute the inlined reduce action
rust!(
rust,
"let {}temp{} = {}action{}(",
grammar.prefix,
temp_counter,
grammar.prefix,
inlined_action.index()
);
for parameter in &grammar.parameters {
rust!(rust, "{},", parameter.name);
}
for i in 0..syms.len() {
rust!(rust, "{}{},", grammar.prefix, arg_counter + i);
}
if syms.len() == 0 {
rust!(rust, "&{}start{},", grammar.prefix, temp_counter);
rust!(rust, "&{}end{},", grammar.prefix, temp_counter);
}
rust!(rust, ");");
// wrap up the inlined value along with its span
rust!(
rust,
"let {}temp{} = ({}start{}, {}temp{}, {}end{});",
grammar.prefix,
temp_counter,
grammar.prefix,
temp_counter,
grammar.prefix,
temp_counter,
grammar.prefix,
temp_counter
);
temp_counter += 1;
arg_counter += syms.len();
}
}
}
rust!(rust, "{}action{}(", grammar.prefix, data.action.index());
for parameter in &grammar.parameters {
rust!(rust, "{},", parameter.name);
}
let mut arg_counter = 0;
let mut temp_counter = 0;
for symbol in &data.symbols {
match *symbol {
r::InlinedSymbol::Original(_) => {
rust!(rust, "{}{},", grammar.prefix, arg_counter);
arg_counter += 1;
}
r::InlinedSymbol::Inlined(_, ref syms) => {
rust!(rust, "{}temp{},", grammar.prefix, temp_counter);
temp_counter += 1;
arg_counter += syms.len();
}
}
}
assert!(data.symbols.len() > 0);
rust!(rust, ")");
rust!(rust, "}}");
Ok(())
}

View File

@ -1,84 +0,0 @@
use std::io::{self, Write};
use term::{self, Attr, Terminal};
use term::color::Color;
/// A `Terminal` that just ignores all attempts at formatting. Used
/// to report errors when no ANSI terminfo is available.
pub struct FakeTerminal<W: Write> {
write: W,
}
impl<W: Write> FakeTerminal<W> {
pub fn new(write: W) -> FakeTerminal<W> {
FakeTerminal { write: write }
}
}
impl<W: Write> Write for FakeTerminal<W> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.write.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.write.flush()
}
}
impl<W: Write> Terminal for FakeTerminal<W> {
type Output = W;
fn fg(&mut self, _color: Color) -> term::Result<()> {
Ok(())
}
fn bg(&mut self, _color: Color) -> term::Result<()> {
Ok(())
}
fn attr(&mut self, _attr: Attr) -> term::Result<()> {
Ok(())
}
fn supports_attr(&self, _attr: Attr) -> bool {
false
}
fn reset(&mut self) -> term::Result<()> {
Ok(())
}
fn supports_reset(&self) -> bool {
false
}
fn supports_color(&self) -> bool {
false
}
fn cursor_up(&mut self) -> term::Result<()> {
Ok(())
}
fn delete_line(&mut self) -> term::Result<()> {
Ok(())
}
fn carriage_return(&mut self) -> term::Result<()> {
Ok(())
}
fn get_ref(&self) -> &Self::Output {
&self.write
}
fn get_mut(&mut self) -> &mut Self::Output {
&mut self.write
}
fn into_inner(self) -> Self::Output
where
Self: Sized,
{
self.write
}
}

View File

@ -1,585 +0,0 @@
//! Utilies for running in a build script.
use atty;
use file_text::FileText;
use grammar::parse_tree as pt;
use grammar::repr as r;
use lalrpop_util::ParseError;
use lexer::intern_token;
use lr1;
use message::{Content, Message};
use message::builder::InlineBuilder;
use normalize;
use parser;
use rust::RustWrite;
use session::{ColorConfig, Session};
use term;
use tls::Tls;
use tok;
use std::fs;
use std::io::{self, BufRead, Write};
use std::path::{Path, PathBuf};
use std::process::exit;
use std::rc::Rc;
mod action;
mod fake_term;
use self::fake_term::FakeTerminal;
const LALRPOP_VERSION_HEADER: &'static str = concat!(
"// auto-generated: \"",
env!("CARGO_PKG_NAME"),
" ",
env!("CARGO_PKG_VERSION"),
"\""
);
pub fn process_dir<P: AsRef<Path>>(session: Rc<Session>, root_dir: P) -> io::Result<()> {
let lalrpop_files = try!(lalrpop_files(root_dir));
for lalrpop_file in lalrpop_files {
try!(process_file(session.clone(), lalrpop_file));
}
Ok(())
}
pub fn process_file<P: AsRef<Path>>(session: Rc<Session>, lalrpop_file: P) -> io::Result<()> {
let lalrpop_file = lalrpop_file.as_ref();
let rs_file = try!(resolve_rs_file(&session, lalrpop_file));
let report_file = try!(resolve_report_file(&session, lalrpop_file));
process_file_into(session, lalrpop_file, &rs_file, &report_file)
}
fn resolve_rs_file(session: &Session, lalrpop_file: &Path) -> io::Result<PathBuf> {
gen_resolve_file(session, lalrpop_file, "rs")
}
fn resolve_report_file(session: &Session, lalrpop_file: &Path) -> io::Result<PathBuf> {
gen_resolve_file(session, lalrpop_file, "report")
}
fn gen_resolve_file(session: &Session, lalrpop_file: &Path, ext: &str) -> io::Result<PathBuf> {
let in_dir = if let Some(ref d) = session.in_dir {
d.as_path()
} else {
Path::new(".")
};
let out_dir = if let Some(ref d) = session.out_dir {
d.as_path()
} else {
in_dir
};
// If the lalrpop file is not in in_dir, the result is that the
// .rs file is created in the same directory as the lalrpop file
// for compatibility reasons
Ok(out_dir
.join(lalrpop_file.strip_prefix(&in_dir).unwrap_or(lalrpop_file))
.with_extension(ext))
}
fn process_file_into(
session: Rc<Session>,
lalrpop_file: &Path,
rs_file: &Path,
report_file: &Path,
) -> io::Result<()> {
if session.force_build || try!(needs_rebuild(&lalrpop_file, &rs_file)) {
log!(
session,
Informative,
"processing file `{}`",
lalrpop_file.to_string_lossy()
);
if let Some(parent) = rs_file.parent() {
try!(fs::create_dir_all(parent));
}
try!(make_read_only(&rs_file, false));
try!(remove_old_file(&rs_file));
// Load the LALRPOP source text for this file:
let file_text = Rc::new(try!(FileText::from_path(lalrpop_file.to_path_buf())));
// Store the session and file-text in TLS -- this is not
// intended to be used in this high-level code, but it gives
// easy access to this information pervasively in the
// low-level LR(1) and grammar normalization code. This is
// particularly useful for error-reporting.
let _tls = Tls::install(session.clone(), file_text.clone());
// Do the LALRPOP processing itself and write the resulting
// buffer into a file. We use a buffer so that if LR(1)
// generation fails at some point, we don't leave a partial
// file behind.
{
let grammar = try!(parse_and_normalize_grammar(&session, &file_text));
let buffer = try!(emit_recursive_ascent(&session, &grammar, &report_file));
let mut output_file = try!(fs::File::create(&rs_file));
try!(writeln!(output_file, "{}", LALRPOP_VERSION_HEADER));
try!(output_file.write_all(&buffer));
}
try!(make_read_only(&rs_file, true));
}
Ok(())
}
fn remove_old_file(rs_file: &Path) -> io::Result<()> {
match fs::remove_file(rs_file) {
Ok(()) => Ok(()),
Err(e) => {
// Unix reports NotFound, Windows PermissionDenied!
match e.kind() {
io::ErrorKind::NotFound | io::ErrorKind::PermissionDenied => Ok(()),
_ => Err(e),
}
}
}
}
fn needs_rebuild(lalrpop_file: &Path, rs_file: &Path) -> io::Result<bool> {
return match fs::metadata(&rs_file) {
Ok(rs_metadata) => {
let lalrpop_metadata = try!(fs::metadata(&lalrpop_file));
if compare_modification_times(&lalrpop_metadata, &rs_metadata) {
return Ok(true);
}
compare_lalrpop_version(rs_file)
}
Err(e) => match e.kind() {
io::ErrorKind::NotFound => Ok(true),
_ => Err(e),
},
};
#[cfg(unix)]
fn compare_modification_times(
lalrpop_metadata: &fs::Metadata,
rs_metadata: &fs::Metadata,
) -> bool {
use std::os::unix::fs::MetadataExt;
lalrpop_metadata.mtime() >= rs_metadata.mtime()
}
#[cfg(windows)]
fn compare_modification_times(
lalrpop_metadata: &fs::Metadata,
rs_metadata: &fs::Metadata,
) -> bool {
use std::os::windows::fs::MetadataExt;
lalrpop_metadata.last_write_time() >= rs_metadata.last_write_time()
}
#[cfg(not(any(unix, windows)))]
fn compare_modification_times(
lalrpop_metadata: &fs::Metadata,
rs_metadata: &fs::Metadata,
) -> bool {
true
}
fn compare_lalrpop_version(rs_file: &Path) -> io::Result<bool> {
let mut input_str = String::new();
let mut f = io::BufReader::new(try!(fs::File::open(&rs_file)));
try!(f.read_line(&mut input_str));
Ok(input_str.trim() != LALRPOP_VERSION_HEADER)
}
}
fn make_read_only(rs_file: &Path, ro: bool) -> io::Result<()> {
if rs_file.is_file() {
let rs_metadata = try!(fs::metadata(&rs_file));
let mut rs_permissions = rs_metadata.permissions();
rs_permissions.set_readonly(ro);
fs::set_permissions(&rs_file, rs_permissions)
} else {
Ok(())
}
}
fn lalrpop_files<P: AsRef<Path>>(root_dir: P) -> io::Result<Vec<PathBuf>> {
let mut result = vec![];
for entry in try!(fs::read_dir(root_dir)) {
let entry = try!(entry);
let file_type = try!(entry.file_type());
let path = entry.path();
if file_type.is_dir() {
result.extend(try!(lalrpop_files(&path)));
}
if file_type.is_file() && path.extension().is_some()
&& path.extension().unwrap() == "lalrpop"
{
result.push(path);
}
}
Ok(result)
}
fn parse_and_normalize_grammar(session: &Session, file_text: &FileText) -> io::Result<r::Grammar> {
let grammar = match parser::parse_grammar(file_text.text()) {
Ok(grammar) => grammar,
Err(ParseError::InvalidToken { location }) => {
let ch = file_text.text()[location..].chars().next().unwrap();
report_error(
&file_text,
pt::Span(location, location),
&format!("invalid character `{}`", ch),
);
}
Err(ParseError::UnrecognizedToken {
token: None,
expected: _,
}) => {
let len = file_text.text().len();
report_error(
&file_text,
pt::Span(len, len),
&format!("unexpected end of file"),
);
}
Err(ParseError::UnrecognizedToken {
token: Some((lo, _, hi)),
expected,
}) => {
let _ = expected; // didn't implement this yet :)
let text = &file_text.text()[lo..hi];
report_error(
&file_text,
pt::Span(lo, hi),
&format!("unexpected token: `{}`", text),
);
}
Err(ParseError::ExtraToken { token: (lo, _, hi) }) => {
let text = &file_text.text()[lo..hi];
report_error(
&file_text,
pt::Span(lo, hi),
&format!("extra token at end of input: `{}`", text),
);
}
Err(ParseError::User { error }) => {
let string = match error.code {
tok::ErrorCode::UnrecognizedToken => "unrecognized token",
tok::ErrorCode::UnterminatedEscape => "unterminated escape; missing '`'?",
tok::ErrorCode::UnterminatedStringLiteral => {
"unterminated string literal; missing `\"`?"
}
tok::ErrorCode::UnterminatedCharacterLiteral => {
"unterminated character literal; missing `'`?"
}
tok::ErrorCode::UnterminatedAttribute => "unterminated #! attribute; missing `]`?",
tok::ErrorCode::ExpectedStringLiteral => "expected string literal; missing `\"`?",
tok::ErrorCode::UnterminatedCode => {
"unterminated code block; perhaps a missing `;`, `)`, `]` or `}`?"
}
};
report_error(
&file_text,
pt::Span(error.location, error.location + 1),
string,
)
}
};
match normalize::normalize(session, grammar) {
Ok(grammar) => Ok(grammar),
Err(error) => report_error(&file_text, error.span, &error.message),
}
}
fn report_error(file_text: &FileText, span: pt::Span, message: &str) -> ! {
println!("{} error: {}", file_text.span_str(span), message);
let out = io::stderr();
let mut out = out.lock();
file_text.highlight(span, &mut out).unwrap();
exit(1);
}
fn report_messages(messages: Vec<Message>) -> term::Result<()> {
let builder = InlineBuilder::new().begin_paragraphs();
let builder = messages
.into_iter()
.fold(builder, |b, m| b.push(Box::new(m)));
let content = builder.end().end();
report_content(&*content)
}
fn report_content(content: &Content) -> term::Result<()> {
// FIXME -- can we query the size of the terminal somehow?
let canvas = content.emit_to_canvas(80);
let try_colors = match Tls::session().color_config {
ColorConfig::Yes => true,
ColorConfig::No => false,
ColorConfig::IfTty => atty::is(atty::Stream::Stdout),
};
if try_colors {
if let Some(mut stdout) = term::stdout() {
return canvas.write_to(&mut *stdout);
}
}
let stdout = io::stdout();
let mut stdout = FakeTerminal::new(stdout.lock());
canvas.write_to(&mut stdout)
}
fn emit_module_attributes<W: Write>(
grammar: &r::Grammar,
rust: &mut RustWrite<W>,
) -> io::Result<()> {
rust.write_module_attributes(grammar)
}
fn emit_uses<W: Write>(grammar: &r::Grammar, rust: &mut RustWrite<W>) -> io::Result<()> {
rust.write_uses("", grammar)
}
fn emit_recursive_ascent(
session: &Session,
grammar: &r::Grammar,
report_file: &Path,
) -> io::Result<Vec<u8>> {
let mut rust = RustWrite::new(vec![]);
// We generate a module structure like this:
//
// ```
// mod <output-file> {
// // For each public symbol:
// pub fn parse_XYZ();
// mod __XYZ { ... }
//
// // For each bit of action code:
// <action-code>
// }
// ```
//
// Note that the action code goes in the outer module. This is
// intentional because it means that the foo.lalrpop file serves
// as a module in the rust hierarchy, so if the action code
// includes things like `super::` it will resolve in the natural
// way.
try!(emit_module_attributes(grammar, &mut rust));
try!(emit_uses(grammar, &mut rust));
if grammar.start_nonterminals.is_empty() {
println!("Error: no public symbols declared in grammar");
exit(1);
}
for (user_nt, start_nt) in &grammar.start_nonterminals {
// We generate these, so there should always be exactly 1
// production. Otherwise the LR(1) algorithm doesn't know
// where to stop!
assert_eq!(grammar.productions_for(start_nt).len(), 1);
log!(
session,
Verbose,
"Building states for public nonterminal `{}`",
user_nt
);
let _lr1_tls = lr1::Lr1Tls::install(grammar.terminals.clone());
let lr1result = lr1::build_states(&grammar, start_nt.clone());
if session.emit_report {
let mut output_report_file = try!(fs::File::create(&report_file));
try!(lr1::generate_report(&mut output_report_file, &lr1result));
}
let states = match lr1result {
Ok(states) => states,
Err(error) => {
let messages = lr1::report_error(&grammar, &error);
let _ = report_messages(messages);
exit(1) // FIXME -- propagate up instead of calling `exit`
}
};
match grammar.algorithm.codegen {
r::LrCodeGeneration::RecursiveAscent => try!(lr1::codegen::ascent::compile(
&grammar,
user_nt.clone(),
start_nt.clone(),
&states,
"super",
&mut rust,
)),
r::LrCodeGeneration::TableDriven => try!(lr1::codegen::parse_table::compile(
&grammar,
user_nt.clone(),
start_nt.clone(),
&states,
"super",
&mut rust,
)),
r::LrCodeGeneration::TestAll => try!(lr1::codegen::test_all::compile(
&grammar,
user_nt.clone(),
start_nt.clone(),
&states,
&mut rust,
)),
}
rust!(
rust,
"{}use self::{}parse{}::{}Parser;",
grammar.nonterminals[&user_nt].visibility,
grammar.prefix,
start_nt,
user_nt
);
}
if let Some(ref intern_token) = grammar.intern_token {
try!(intern_token::compile(&grammar, intern_token, &mut rust));
rust!(rust, "pub use self::{}intern_token::Token;", grammar.prefix);
}
try!(action::emit_action_code(grammar, &mut rust));
try!(emit_to_triple_trait(grammar, &mut rust));
Ok(rust.into_inner())
}
fn emit_to_triple_trait<W: Write>(grammar: &r::Grammar, rust: &mut RustWrite<W>) -> io::Result<()> {
#![allow(non_snake_case)]
let L = grammar.types.terminal_loc_type();
let T = grammar.types.terminal_token_type();
let E = grammar.types.error_type();
let mut user_type_parameters = String::new();
for type_parameter in &grammar.type_parameters {
user_type_parameters.push_str(&format!("{}, ", type_parameter));
}
rust!(rust, "");
rust!(
rust,
"pub trait {}ToTriple<{}> {{",
grammar.prefix,
user_type_parameters,
);
rust!(rust, "type Error;");
rust!(
rust,
"fn to_triple(value: Self) -> Result<({},{},{}),Self::Error>;",
L,
T,
L,
);
rust!(rust, "}}");
rust!(rust, "");
if grammar.types.opt_terminal_loc_type().is_some() {
rust!(
rust,
"impl<{}> {}ToTriple<{}> for ({}, {}, {}) {{",
user_type_parameters,
grammar.prefix,
user_type_parameters,
L,
T,
L,
);
rust!(rust, "type Error = {};", E);
rust!(
rust,
"fn to_triple(value: Self) -> Result<({},{},{}),{}> {{",
L,
T,
L,
E,
);
rust!(rust, "Ok(value)");
rust!(rust, "}}");
rust!(rust, "}}");
rust!(
rust,
"impl<{}> {}ToTriple<{}> for Result<({}, {}, {}),{}> {{",
user_type_parameters,
grammar.prefix,
user_type_parameters,
L,
T,
L,
E,
);
rust!(rust, "type Error = {};", E);
rust!(
rust,
"fn to_triple(value: Self) -> Result<({},{},{}),{}> {{",
L,
T,
L,
E,
);
rust!(rust, "value");
rust!(rust, "}}");
rust!(rust, "}}");
} else {
rust!(
rust,
"impl<{}> {}ToTriple<{}> for {} {{",
user_type_parameters,
grammar.prefix,
user_type_parameters,
T,
);
rust!(rust, "type Error = {};", E);
rust!(
rust,
"fn to_triple(value: Self) -> Result<((),{},()),{}> {{",
T,
E,
);
rust!(rust, "Ok(((), value, ()))");
rust!(rust, "}}");
rust!(rust, "}}");
rust!(
rust,
"impl<{}> {}ToTriple<{}> for Result<({}),{}> {{",
user_type_parameters,
grammar.prefix,
user_type_parameters,
T,
E,
);
rust!(rust, "type Error = {};", E);
rust!(
rust,
"fn to_triple(value: Self) -> Result<((),{},()),{}> {{",
T,
E,
);
rust!(rust, "value.map(|v| ((), v, ()))");
rust!(rust, "}}");
rust!(rust, "}}");
}
Ok(())
}

View File

@ -1,15 +0,0 @@
use std::collections::BTreeMap;
pub use std::collections::btree_map::Entry;
/// In general, we avoid coding directly against any particular map,
/// but rather build against `util::Map` (and `util::map` to construct
/// an instance). This should be a deterministic map, such that two
/// runs of LALRPOP produce the same output, but otherwise it doesn't
/// matter much. I'd probably prefer to use `HashMap` with an
/// alternative hasher, but that's not stable.
pub type Map<K, V> = BTreeMap<K, V>;
pub fn map<K: Ord, V>() -> Map<K, V> {
Map::<K, V>::default()
}

View File

@ -1,7 +0,0 @@
mod map;
mod multimap;
mod set;
pub use self::map::{map, Entry, Map};
pub use self::multimap::{Collection, Multimap};
pub use self::set::{set, Set};

View File

@ -1,140 +0,0 @@
use std::collections::btree_map;
use std::default::Default;
use std::iter::FromIterator;
use super::map::{map, Map};
use super::set::Set;
pub struct Multimap<K, C: Collection> {
map: Map<K, C>,
}
pub trait Collection: Default {
type Item;
/// Push `item` into the collection and return `true` if
/// collection changed.
fn push(&mut self, item: Self::Item) -> bool;
}
impl<K: Ord, C: Collection> Multimap<K, C> {
pub fn new() -> Multimap<K, C> {
Multimap { map: map() }
}
pub fn is_empty(&self) -> bool {
self.map.is_empty()
}
/// Push `value` to the collection associated with `key`. Returns
/// true if the collection was changed from the default.
pub fn push(&mut self, key: K, value: C::Item) -> bool {
let mut inserted = false;
let pushed = self.map
.entry(key)
.or_insert_with(|| {
inserted = true;
C::default()
})
.push(value);
inserted || pushed
}
pub fn get(&self, key: &K) -> Option<&C> {
self.map.get(key)
}
pub fn iter(&self) -> btree_map::Iter<K, C> {
self.map.iter()
}
pub fn into_iter(self) -> btree_map::IntoIter<K, C> {
self.map.into_iter()
}
}
impl<K: Ord, C: Collection> IntoIterator for Multimap<K, C> {
type Item = (K, C);
type IntoIter = btree_map::IntoIter<K, C>;
fn into_iter(self) -> btree_map::IntoIter<K, C> {
self.into_iter()
}
}
impl<'iter, K: Ord, C: Collection> IntoIterator for &'iter Multimap<K, C> {
type Item = (&'iter K, &'iter C);
type IntoIter = btree_map::Iter<'iter, K, C>;
fn into_iter(self) -> btree_map::Iter<'iter, K, C> {
self.iter()
}
}
impl<K: Ord, C: Collection> FromIterator<(K, C::Item)> for Multimap<K, C> {
fn from_iter<T>(iterator: T) -> Self
where
T: IntoIterator<Item = (K, C::Item)>,
{
let mut map = Multimap::new();
for (key, value) in iterator {
map.push(key, value);
}
map
}
}
impl Collection for () {
type Item = ();
fn push(&mut self, _item: ()) -> bool {
false
}
}
impl<T> Collection for Vec<T> {
type Item = T;
fn push(&mut self, item: T) -> bool {
self.push(item);
true // always changes
}
}
impl<T: Ord> Collection for Set<T> {
type Item = T;
fn push(&mut self, item: T) -> bool {
self.insert(item)
}
}
impl<K: Ord, C: Collection> Default for Multimap<K, C> {
fn default() -> Self {
Multimap::new()
}
}
impl<K: Ord, C: Collection<Item = I>, I> Collection for Multimap<K, C> {
type Item = (K, I);
fn push(&mut self, item: (K, I)) -> bool {
let (key, value) = item;
self.push(key, value)
}
}
#[test]
fn push() {
let mut m: Multimap<u32, Set<char>> = Multimap::new();
assert!(m.push(0, 'a'));
assert!(m.push(0, 'b'));
assert!(!m.push(0, 'b'));
assert!(m.push(1, 'a'));
}
#[test]
fn push_nil() {
let mut m: Multimap<u32, ()> = Multimap::new();
assert!(m.push(0, ()));
assert!(!m.push(0, ()));
assert!(m.push(1, ()));
assert!(!m.push(0, ()));
}

View File

@ -1,8 +0,0 @@
use std::collections::BTreeSet;
/// As `Map`, but for sets.
pub type Set<K> = BTreeSet<K>;
pub fn set<K: Ord>() -> Set<K> {
Set::<K>::default()
}

View File

@ -1,144 +0,0 @@
use grammar::parse_tree as pt;
use std::fmt::{Display, Error, Formatter};
use std::fs::File;
use std::path::PathBuf;
use std::io::{self, Read, Write};
pub struct FileText {
path: PathBuf,
input_str: String,
newlines: Vec<usize>,
}
impl FileText {
pub fn from_path(path: PathBuf) -> io::Result<FileText> {
let mut input_str = String::new();
let mut f = try!(File::open(&path));
try!(f.read_to_string(&mut input_str));
Ok(FileText::new(path, input_str))
}
pub fn new(path: PathBuf, input_str: String) -> FileText {
let newline_indices: Vec<usize> = {
let input_indices = input_str
.as_bytes()
.iter()
.enumerate()
.filter(|&(_, &b)| b == ('\n' as u8))
.map(|(i, _)| i + 1); // index of first char in the line
Some(0).into_iter().chain(input_indices).collect()
};
FileText {
path: path,
input_str: input_str,
newlines: newline_indices,
}
}
#[cfg(test)]
pub fn test() -> FileText {
Self::new(PathBuf::from("test.lalrpop"), String::from(""))
}
pub fn text(&self) -> &String {
&self.input_str
}
pub fn span_str(&self, span: pt::Span) -> String {
let (start_line, start_col) = self.line_col(span.0);
let (end_line, end_col) = self.line_col(span.1);
format!(
"{}:{}:{}: {}:{}",
self.path.display(),
start_line + 1,
start_col + 1,
end_line + 1,
end_col
)
}
fn line_col(&self, pos: usize) -> (usize, usize) {
let num_lines = self.newlines.len();
let line = (0..num_lines)
.filter(|&i| self.newlines[i] > pos)
.map(|i| i - 1)
.next()
.unwrap_or(num_lines - 1);
// offset of the first character in `line`
let line_offset = self.newlines[line];
// find the column; use `saturating_sub` in case `pos` is the
// newline itself, which we'll call column 0
let col = pos - line_offset;
(line, col)
}
fn line_text(&self, line_num: usize) -> &str {
let start_offset = self.newlines[line_num];
if line_num == self.newlines.len() - 1 {
&self.input_str[start_offset..]
} else {
let end_offset = self.newlines[line_num + 1];
&self.input_str[start_offset..end_offset - 1]
}
}
pub fn highlight(&self, span: pt::Span, out: &mut Write) -> io::Result<()> {
let (start_line, start_col) = self.line_col(span.0);
let (end_line, end_col) = self.line_col(span.1);
// (*) use `saturating_sub` since the start line could be the newline
// itself, in which case we'll call it column zero
// span is within one line:
if start_line == end_line {
let text = self.line_text(start_line);
try!(writeln!(out, " {}", text));
if end_col - start_col <= 1 {
try!(writeln!(out, " {}^", Repeat(' ', start_col)));
} else {
let width = end_col - start_col;
try!(writeln!(
out,
" {}~{}~",
Repeat(' ', start_col),
Repeat('~', width.saturating_sub(2))
));
}
} else {
// span is across many lines, find the maximal width of any of those
let line_strs: Vec<_> = (start_line..end_line + 1)
.map(|i| self.line_text(i))
.collect();
let max_len = line_strs.iter().map(|l| l.len()).max().unwrap();
try!(writeln!(
out,
" {}{}~+",
Repeat(' ', start_col),
Repeat('~', max_len - start_col)
));
for line in &line_strs[..line_strs.len() - 1] {
try!(writeln!(out, "| {0:<1$} |", line, max_len));
}
try!(writeln!(out, "| {}", line_strs[line_strs.len() - 1]));
try!(writeln!(out, "+~{}", Repeat('~', end_col)));
}
Ok(())
}
}
struct Repeat(char, usize);
impl Display for Repeat {
fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {
for _ in 0..self.1 {
try!(write!(fmt, "{}", self.0));
}
Ok(())
}
}

View File

@ -1,78 +0,0 @@
//! Generate valid parse trees.
use grammar::repr::*;
use rand::{self, Rng};
use std::iter::Iterator;
#[derive(PartialEq, Eq)]
pub enum ParseTree {
Nonterminal(NonterminalString, Vec<ParseTree>),
Terminal(TerminalString),
}
pub fn random_parse_tree(grammar: &Grammar, symbol: NonterminalString) -> ParseTree {
let mut gen = Generator {
grammar: grammar,
rng: rand::thread_rng(),
depth: 0,
};
loop {
// sometimes, the random walk overflows the stack, so we have a max, and if
// it is exceeded, we just try again
if let Some(result) = gen.nonterminal(symbol.clone()) {
return result;
}
gen.depth = 0;
}
}
struct Generator<'grammar> {
grammar: &'grammar Grammar,
rng: rand::ThreadRng,
depth: u32,
}
const MAX_DEPTH: u32 = 10000;
impl<'grammar> Generator<'grammar> {
fn nonterminal(&mut self, nt: NonterminalString) -> Option<ParseTree> {
if self.depth > MAX_DEPTH {
return None;
}
self.depth += 1;
let productions = self.grammar.productions_for(&nt);
let index: usize = self.rng.gen_range(0, productions.len());
let production = &productions[index];
let trees: Option<Vec<_>> = production
.symbols
.iter()
.map(|sym| self.symbol(sym.clone()))
.collect();
trees.map(|trees| ParseTree::Nonterminal(nt, trees))
}
fn symbol(&mut self, symbol: Symbol) -> Option<ParseTree> {
match symbol {
Symbol::Nonterminal(nt) => self.nonterminal(nt),
Symbol::Terminal(t) => Some(ParseTree::Terminal(t)),
}
}
}
impl ParseTree {
pub fn terminals(&self) -> Vec<TerminalString> {
let mut vec = vec![];
self.push_terminals(&mut vec);
vec
}
fn push_terminals(&self, vec: &mut Vec<TerminalString>) {
match *self {
ParseTree::Terminal(ref s) => vec.push(s.clone()),
ParseTree::Nonterminal(_, ref trees) => for tree in trees {
tree.push_terminals(vec);
},
}
}
}

View File

@ -1,26 +0,0 @@
/// Recognized associated type for the token location
pub const LOCATION: &'static str = "Location";
/// Recognized associated type for custom errors
pub const ERROR: &'static str = "Error";
/// The lifetime parameter injected when we do not have an external token enum
pub const INPUT_LIFETIME: &'static str = "'input";
/// The parameter injected when we do not have an external token enum
pub const INPUT_PARAMETER: &'static str = "input";
/// The annotation to request inlining.
pub const INLINE: &'static str = "inline";
/// Annotation to request LALR.
pub const LALR: &'static str = "LALR";
/// Annotation to request recursive-ascent-style code generation.
pub const TABLE_DRIVEN: &'static str = "table_driven";
/// Annotation to request recursive-ascent-style code generation.
pub const RECURSIVE_ASCENT: &'static str = "recursive_ascent";
/// Annotation to request test-all-style code generation.
pub const TEST_ALL: &'static str = "test_all";

View File

@ -1,7 +0,0 @@
//! The grammar definition.
pub mod consts;
pub mod parse_tree;
pub mod pattern;
pub mod repr;
// pub mod token;

Some files were not shown because too many files have changed in this diff Show More