[packages/thunderbird] up to 78.5.0
atler
atler at pld-linux.org
Wed Nov 18 20:47:53 CET 2020
commit c82906ead315d93de1cb04763de92e5e0b033fdb
Author: Jan Palus <atler at pld-linux.org>
Date: Wed Nov 18 20:47:12 2020 +0100
up to 78.5.0
rust-1.47.patch | 35276 -----------------------------------------------------
thunderbird.spec | 138 +-
2 files changed, 68 insertions(+), 35346 deletions(-)
---
diff --git a/thunderbird.spec b/thunderbird.spec
index 7c691f5..8b4e2e2 100644
--- a/thunderbird.spec
+++ b/thunderbird.spec
@@ -41,151 +41,150 @@ curl -s $U | sed -ne 's,.*href="\([^"]\+\)/".*,'"$U"'xpi/\1.xpi,p'
Summary: Thunderbird - email client
Summary(pl.UTF-8): Thunderbird - klient poczty
Name: thunderbird
-Version: 78.4.3
+Version: 78.5.0
Release: 1
License: MPL v2.0
Group: X11/Applications/Mail
Source0: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/source/%{name}-%{version}.source.tar.xz
-# Source0-md5: 2357e6d9d59e1dcd6d42304aca6c7aad
+# Source0-md5: 93c340699825f86be853b8fc5439e0f3
Source1: %{name}.desktop
Source2: %{name}.sh
Source100: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/af.xpi
-# Source100-md5: 1ba993887d9c279959e35e7c018e5ff9
+# Source100-md5: 4c57b8a7f23ff5da42bc148101078474
Source101: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ar.xpi
-# Source101-md5: 43a335f4bc87ea65c6062d9edf8070bd
+# Source101-md5: c16b735a816bab2b39384abaebc84787
Source102: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ast.xpi
-# Source102-md5: a2917afec784e6fdf627a22bd59d03da
+# Source102-md5: 5cbae3a5dfc210b0f6cba532ac80d6e5
Source103: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/be.xpi
-# Source103-md5: d6414c080e00312167b51ada7cb5e7bf
+# Source103-md5: 3f558a5d5d06a9b71392ba318005d275
Source104: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/bg.xpi
-# Source104-md5: 68ea50ac89802fe9dfa873485f0b9b81
+# Source104-md5: 14b95ec9e42b05d31eb56ea393c2f96f
Source105: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/br.xpi
-# Source105-md5: e06030f539b717a1b637293cc28296b6
+# Source105-md5: 3285c4b6943001284bccd2ee78054bad
Source106: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ca.xpi
-# Source106-md5: f6e3afa4399afcb4f87498a8cdf3af3b
+# Source106-md5: 60ee8df57ade264aadf07d6acba74bee
Source107: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/cak.xpi
-# Source107-md5: c9cd0736918af7e1b1fad7aa813ab5e9
+# Source107-md5: 0f0000b195b2609f43d5966ad6db708d
Source108: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/cs.xpi
-# Source108-md5: 04392ccc6a05a50179a3fe15ca32ecf4
+# Source108-md5: d507677e76db45177b4cb74b64d095d4
Source109: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/cy.xpi
-# Source109-md5: 2a9084e9ab279b8abb020ca92049e433
+# Source109-md5: 644fa4a4b47840b2776dc66245f57ce3
Source110: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/da.xpi
-# Source110-md5: b7fae865528971cbd8a67f91be7a6cf9
+# Source110-md5: b7ddcd6ced9a6016792afca50136470f
Source111: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/de.xpi
-# Source111-md5: 33ea9724cda54cb68afb5db69fdfcc80
+# Source111-md5: b58e654beec6a9127138954759adbf8f
Source112: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/dsb.xpi
-# Source112-md5: f37763033560e6aa79dba24a4cd66f15
+# Source112-md5: d3b015d04cda7100f7f4d2d8b1e83ce2
Source113: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/el.xpi
-# Source113-md5: beeb8560b19986183c80259c1a64c392
+# Source113-md5: 506b83f4722bfce04258b65479a9e8c5
Source114: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/en-CA.xpi
-# Source114-md5: 1ec2acd173e0ed2abe53f23aa8c4af88
+# Source114-md5: 96abb17e619531be39b635c5d19bb30f
Source115: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/en-GB.xpi
-# Source115-md5: d1f8c39c08690f1acf1c0a8f184ba766
+# Source115-md5: f5020a817eddbff589c189f029f10366
Source116: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/en-US.xpi
-# Source116-md5: 0f6aa15022b48e9919bb502fd627e38d
+# Source116-md5: 10d1e85585b9d8a854615fc73bace499
Source117: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/es-AR.xpi
-# Source117-md5: 99711553817ece04586cee1aa656fd34
+# Source117-md5: ed44d70c40ddebe4436d50ff48373023
Source118: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/es-ES.xpi
-# Source118-md5: 8e259be5a8ae7086daffb2b84d82cdce
+# Source118-md5: 9a0edd9087726f42451b81584193a5f3
Source119: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/et.xpi
-# Source119-md5: 66ea4904ab60fd1f2b906be85e587fab
+# Source119-md5: ab428db46c226867a83a7232140caf42
Source120: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/eu.xpi
-# Source120-md5: c3a9c3fc2863b6221b12a098f5771740
+# Source120-md5: d9b41f2e85403179826e3ad19d769ec5
Source121: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/fa.xpi
-# Source121-md5: 0a88ad350471abb1c8d6ad76043ab556
+# Source121-md5: af6a05ed45fefb65288e0236cd812f2d
Source122: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/fi.xpi
-# Source122-md5: 627a46a1fba7847a0fc92219ebbd1add
+# Source122-md5: 3981d745369270bd6646a957acebdf06
Source123: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/fr.xpi
-# Source123-md5: aaf13c24a457cfceff00ea5c99602ee1
+# Source123-md5: 29b0195c6ab6710f61948590f24b8c0e
Source124: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/fy-NL.xpi
-# Source124-md5: 151ff97382f9db9df34fbaa6013b4425
+# Source124-md5: cced51d3440853c7f836c9e7f13598f5
Source125: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ga-IE.xpi
-# Source125-md5: 5b99b98978281ab126d23fb30981985a
+# Source125-md5: 8f5df8d515f3ddd2db71ad84fb55dcbc
Source126: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/gd.xpi
-# Source126-md5: 12c69f0cb24c8dcbe3ef027fceaedea5
+# Source126-md5: 11956dd7b6749a0f3849963c2ccd8126
Source127: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/gl.xpi
-# Source127-md5: e94fa4369bdca5d6c73956fb3067871a
+# Source127-md5: e3f1729bb01d943a148c1f3354006ad4
Source128: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/he.xpi
-# Source128-md5: 3fdd3a2960300c8cf41ed6443748bd29
+# Source128-md5: cef47c4c591e8906ed5d3c06ce089a4a
Source129: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/hr.xpi
-# Source129-md5: 3151344af2949d7186ab3e437fd7ac92
+# Source129-md5: 2b26cf8c5319df3b75f982e420af4f26
Source130: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/hsb.xpi
-# Source130-md5: 6dfa164093ece63daa17d9f328e06df4
+# Source130-md5: 5915e710a9378b8ee97a3b8d6c1680d1
Source131: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/hu.xpi
-# Source131-md5: ec832144b557a222c6693d06610bbd5e
+# Source131-md5: e24b2436555750b7dbf28abe643995ff
Source132: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/hy-AM.xpi
-# Source132-md5: 2cf61f01d5964aa49f433fe7065ceab2
+# Source132-md5: 37bd544318288fe31eadbbf3c2f9e66a
Source133: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/id.xpi
-# Source133-md5: 9294db0d42d062e047a79c90d1953389
+# Source133-md5: a4749953d96f399f0b161093765a29e7
Source134: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/is.xpi
-# Source134-md5: 1fe34bb029b972713cdb17c20a49ead8
+# Source134-md5: f7422eae7447c116ead2d7e44cf6be1a
Source135: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/it.xpi
-# Source135-md5: 76bcd4b0cd18d8d306a674ef91045f1d
+# Source135-md5: a206d8d7148b595fc9b64c3b41908fde
Source136: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ja.xpi
-# Source136-md5: 060e14144c56c975c6cd35b0f256caee
+# Source136-md5: 6fd0694d93072d0d6e4a8f7b016de9aa
Source137: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ka.xpi
-# Source137-md5: a5d340f4da1bf7cd1dfa08b0f6de9377
+# Source137-md5: baa3dbc5c85f90b64ebd84126e4738ee
Source138: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/kab.xpi
-# Source138-md5: 8b43efc3dc44210933c0eb43f030485b
+# Source138-md5: 1d026fa6fbae5a7b135ba9c84a182155
Source139: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/kk.xpi
-# Source139-md5: f75aa5a598defbea7ae1e41bc3b66c27
+# Source139-md5: 12c359defd2dfe6ac446b7be46ec25b9
Source140: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ko.xpi
-# Source140-md5: 9ea95d9fce3e37f932365941e7921601
+# Source140-md5: 83e6a9c998dd20681d3ad6770da2f87b
Source141: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/lt.xpi
-# Source141-md5: 54eb0722d5d25fe58060872e305054a3
+# Source141-md5: 5006fc7dee5a72b19ac6a87095d401a7
Source142: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ms.xpi
-# Source142-md5: 3ca0ff229f8146dacee4fb91e11c40c1
+# Source142-md5: 3daa775c38c2cce056ad2343104d69a7
Source143: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/nb-NO.xpi
-# Source143-md5: 9be26e0e6d242fff77b959e991d20c5d
+# Source143-md5: ead5ec675f735b9b9875f1afafdaa58c
Source144: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/nl.xpi
-# Source144-md5: e0d6edac4ddcdeede79403afa6aa2366
+# Source144-md5: 1292e00075f72f7d89d9c25c4a30fdc8
Source145: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/nn-NO.xpi
-# Source145-md5: 1018b411a4de2668c18dbb83995b6237
+# Source145-md5: 862b26e64ce0547d4581eea1a3bbf2ec
Source146: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/pa-IN.xpi
-# Source146-md5: 131482f2094c7c4c4341350ae35e92a5
+# Source146-md5: 2ef33b0b566c68d7df96b47313bab078
Source147: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/pl.xpi
-# Source147-md5: 9de9fbe56dcd98d6ab1368b475d6d7a4
+# Source147-md5: 765478acbc60206af143ea8395808d93
Source148: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/pt-BR.xpi
-# Source148-md5: a38a5d21d23cca0e060f5c37a2c56022
+# Source148-md5: 72b2df8c348450d1c956514b60be3821
Source149: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/pt-PT.xpi
-# Source149-md5: d8f37e35c9cde46674d651edd199603c
+# Source149-md5: 9bd66e70938525e9da370f79b882ba33
Source150: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/rm.xpi
-# Source150-md5: 132370cf2830b9029bf16cbab05def52
+# Source150-md5: 86ec6f4f44e21b8c2ce9166b4d9a3b1a
Source151: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ro.xpi
-# Source151-md5: 1b8c24caf8f8eebf3791f4ca7b60710a
+# Source151-md5: e5decfbb6e8247ee235f9994f43d1e71
Source152: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/ru.xpi
-# Source152-md5: 62e392ca0f7acde5b79295ad7a5bae35
+# Source152-md5: 1a58a8ac0f9727d61757fdc77d2abef4
Source153: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/si.xpi
-# Source153-md5: 41897e2acd24e2ee7d205ccc85d84dc9
+# Source153-md5: 4fafd392649ad1b722a7f1acb6505246
Source154: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/sk.xpi
-# Source154-md5: 5e4465bd6f369a9f2b26ef951266e9bd
+# Source154-md5: 524b90e2f79b22d847bec0bd33a656c7
Source155: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/sl.xpi
-# Source155-md5: f1b09628f444e4c28975b6ff29977393
+# Source155-md5: 7e2418ab38557429163edc44effc48e7
Source156: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/sq.xpi
-# Source156-md5: 5f5b456e40e908fb14b78882538a37a1
+# Source156-md5: 2adca49a770c656104356c1764f1a39d
Source157: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/sr.xpi
-# Source157-md5: de2eb5261487742131a9d59f1336b6e4
+# Source157-md5: 7bf40c50aca51ec13e9c721560f118a1
Source158: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/sv-SE.xpi
-# Source158-md5: 9d68dd8561d193f0f164aa67e0f924f1
+# Source158-md5: f54984a8c5084c73640658ae6907d62d
Source159: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/th.xpi
-# Source159-md5: 6536fe56844da2be8271495fd46a5b64
+# Source159-md5: 49489f4ada65bdff1362eecf116b3ce3
Source160: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/tr.xpi
-# Source160-md5: 432e81a20e0ef11825259e622af2585c
+# Source160-md5: db3078f916c17cb15713cddfb9ca2623
Source161: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/uk.xpi
-# Source161-md5: dc617db5bcef859b5eede7f94ad3a73e
+# Source161-md5: 3edb8629ae461025f075cd13408c34eb
Source162: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/uz.xpi
-# Source162-md5: 5fc113e954b2bb56a008ed2e83a77905
+# Source162-md5: fc55339e2661e011b7aa79293072c5bb
Source163: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/vi.xpi
-# Source163-md5: 630d99177e31e8c7cafdbf899b72f800
+# Source163-md5: 89597b42cd0b38383294e1d446e6dbf2
Source164: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/zh-CN.xpi
-# Source164-md5: a72e5d032863a420ab314319e3699291
+# Source164-md5: ac624236f25427217e0176539e80cc1f
Source165: http://releases.mozilla.org/pub/mozilla.org/thunderbird/releases/%{version}/linux-x86_64/xpi/zh-TW.xpi
-# Source165-md5: 67c4d354a0110b53188f2b0b550d0599
+# Source165-md5: 8d5063a2e549cffe12ac31956f3449a2
Patch0: prefs.patch
Patch1: no-subshell.patch
Patch2: enable-addons.patch
Patch3: %{name}-system-virtualenv.patch
-Patch4: rust-1.47.patch
URL: http://www.mozilla.org/projects/thunderbird/
BuildRequires: alsa-lib-devel
BuildRequires: autoconf2_13 >= 2.13
@@ -1365,7 +1364,6 @@ unpack() {
%patch1 -p1
%patch2 -p0
%patch3 -p2
-%patch4 -p1
%build
cp -p %{_datadir}/automake/config.* build/autoconf
diff --git a/rust-1.47.patch b/rust-1.47.patch
deleted file mode 100644
index 65441a1..0000000
--- a/rust-1.47.patch
+++ /dev/null
@@ -1,35276 +0,0 @@
-
-# HG changeset patch
-# User Emilio Cobos Álvarez <emilio at crisal.io>
-# Date 1599584448 0
-# Node ID 85c38ea4d34969797eb5d24265cd90cc6841e6ae
-# Parent 5aa243a2fe9d77578dd95ce3ab3a2aa6c1e92604
-Bug 1663715 - Update syn and proc-macro2 so that Firefox can build on Rust nightly again. r=froydnj, a=RyanVM
-
-Generated with:
-
- cargo update -p syn --precise 1.0.40
- ./mach vendor rust
-
-Rust issue: https://github.com/rust-lang/rust/issues/76482
-
-Differential Revision: https://phabricator.services.mozilla.com/D89473
-
-diff --git a/Cargo.lock b/Cargo.lock
---- a/Cargo.lock
-+++ b/Cargo.lock
-@@ -3712,19 +3712,19 @@ checksum = "ecd45702f76d6d3c75a80564378a
- dependencies = [
- "proc-macro2",
- "quote",
- "syn",
- ]
-
- [[package]]
- name = "proc-macro2"
--version = "1.0.5"
--source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
-+version = "1.0.20"
-+source = "registry+https://github.com/rust-lang/crates.io-index"
-+checksum = "175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"
- dependencies = [
- "unicode-xid",
- ]
-
- [[package]]
- name = "procedural-masquerade"
- version = "0.1.1"
- source = "registry+https://github.com/rust-lang/crates.io-index"
-@@ -4642,19 +4642,19 @@ dependencies = [
- "cc",
- "gleam",
- "glsl-to-cxx",
- "webrender_build",
- ]
-
- [[package]]
- name = "syn"
--version = "1.0.5"
--source = "registry+https://github.com/rust-lang/crates.io-index"
--checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
-+version = "1.0.40"
-+source = "registry+https://github.com/rust-lang/crates.io-index"
-+checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
- dependencies = [
- "proc-macro2",
- "quote",
- "unicode-xid",
- ]
-
- [[package]]
- name = "sync-guid"
-diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
---- a/third_party/rust/proc-macro2/.cargo-checksum.json
-+++ b/third_party/rust/proc-macro2/.cargo-checksum.json
-@@ -1,1 +1,1 @@
--{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
-\ No newline at end of file
-+{"files":{"Cargo.toml":"c20c4c52342e65ea11ad8382edc636e628e8f8c5ab7cffddc32426b2fe8fe4cd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"332185d7ad4c859210f5edd7a76bc95146c8277726a2f81417f34927c4424d68","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"239f9a25c0f2ab57592288d944c7f1a0f887536b6d4dc2428a17640af8d10a41","src/lib.rs":"2b1d98424c9b23b547dabf85554120e5e65472026a0f3f711b3a097bca7c32fe","src/parse.rs":"500edee9773132e27e44d0fdaa042b1cb9451e29e65124493986f51710c0664c","src/wrapper.rs":"d36c0dced7ec0e7585c1f935cda836080bcae6de1de3d7851d962e9e11a3ac48","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"310c856e27ff61c9ec7f0a5cd96031aac02971557b1621f5e17b089d58e79bcd","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"}
-\ No newline at end of file
-diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
---- a/third_party/rust/proc-macro2/Cargo.toml
-+++ b/third_party/rust/proc-macro2/Cargo.toml
-@@ -8,36 +8,35 @@
- # If you believe there's an error in this file please file an
- # issue against the rust-lang/cargo repository. If you're
- # editing this file be aware that the upstream Cargo.toml
- # will likely look very different (and much more reasonable)
-
- [package]
- edition = "2018"
- name = "proc-macro2"
--version = "1.0.5"
--authors = ["Alex Crichton <alex at alexcrichton.com>"]
--description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
--homepage = "https://github.com/alexcrichton/proc-macro2"
-+version = "1.0.20"
-+authors = ["Alex Crichton <alex at alexcrichton.com>", "David Tolnay <dtolnay at gmail.com>"]
-+description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
- documentation = "https://docs.rs/proc-macro2"
- readme = "README.md"
- keywords = ["macros"]
-+categories = ["development-tools::procedural-macro-helpers"]
- license = "MIT OR Apache-2.0"
- repository = "https://github.com/alexcrichton/proc-macro2"
- [package.metadata.docs.rs]
- rustc-args = ["--cfg", "procmacro2_semver_exempt"]
- rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
-+targets = ["x86_64-unknown-linux-gnu"]
-
--[lib]
--name = "proc_macro2"
-+[package.metadata.playground]
-+features = ["span-locations"]
- [dependencies.unicode-xid]
- version = "0.2"
- [dev-dependencies.quote]
- version = "1.0"
- default_features = false
-
- [features]
- default = ["proc-macro"]
- nightly = []
- proc-macro = []
- span-locations = []
--[badges.travis-ci]
--repository = "alexcrichton/proc-macro2"
-diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
---- a/third_party/rust/proc-macro2/README.md
-+++ b/third_party/rust/proc-macro2/README.md
-@@ -1,11 +1,11 @@
- # proc-macro2
-
--[](https://travis-ci.com/alexcrichton/proc-macro2)
-+[](https://github.com/alexcrichton/proc-macro2/actions)
- [](https://crates.io/crates/proc-macro2)
- [](https://docs.rs/proc-macro2)
-
- A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
- This library serves two purposes:
-
- - **Bring proc-macro-like functionality to other contexts like build.rs and
- main.rs.** Types from `proc_macro` are entirely specific to procedural macros
-diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
---- a/third_party/rust/proc-macro2/build.rs
-+++ b/third_party/rust/proc-macro2/build.rs
-@@ -9,16 +9,20 @@
- // "wrap_proc_macro"
- // Wrap types from libproc_macro rather than polyfilling the whole API.
- // Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
- // because we can't emulate the unstable API without emulating everything
- // else. Also enabled unconditionally on nightly, in which case the
- // procmacro2_semver_exempt surface area is implemented by using the
- // nightly-only proc_macro API.
- //
-+// "hygiene"
-+// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
-+// and Span::located_at. Enabled on Rust 1.45+.
-+//
- // "proc_macro_span"
- // Enable non-dummy behavior of Span::start and Span::end methods which
- // requires an unstable compiler feature. Enabled when building with
- // nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
- // features.
- //
- // "super_unstable"
- // Implement the semver exempt API in terms of the nightly-only proc_macro
-@@ -52,16 +56,24 @@ fn main() {
- // https://github.com/alexcrichton/proc-macro2/issues/147
- println!("cargo:rustc-cfg=procmacro2_semver_exempt");
- }
-
- if semver_exempt || cfg!(feature = "span-locations") {
- println!("cargo:rustc-cfg=span_locations");
- }
-
-+ if version.minor < 39 {
-+ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
-+ }
-+
-+ if version.minor >= 45 {
-+ println!("cargo:rustc-cfg=hygiene");
-+ }
-+
- let target = env::var("TARGET").unwrap();
- if !enable_use_proc_macro(&target) {
- return;
- }
-
- println!("cargo:rustc-cfg=use_proc_macro");
-
- if version.nightly || !semver_exempt {
-diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
-new file mode 100644
---- /dev/null
-+++ b/third_party/rust/proc-macro2/src/detection.rs
-@@ -0,0 +1,67 @@
-+use std::panic::{self, PanicInfo};
-+use std::sync::atomic::*;
-+use std::sync::Once;
-+
-+static WORKS: AtomicUsize = AtomicUsize::new(0);
-+static INIT: Once = Once::new();
-+
-+pub(crate) fn inside_proc_macro() -> bool {
-+ match WORKS.load(Ordering::SeqCst) {
-+ 1 => return false,
-+ 2 => return true,
-+ _ => {}
-+ }
-+
-+ INIT.call_once(initialize);
-+ inside_proc_macro()
-+}
-+
-+pub(crate) fn force_fallback() {
-+ WORKS.store(1, Ordering::SeqCst);
-+}
-+
-+pub(crate) fn unforce_fallback() {
-+ initialize();
-+}
-+
-+// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
-+// then use catch_unwind to determine whether the compiler's proc_macro is
-+// working. When proc-macro2 is used from outside of a procedural macro all
-+// of the proc_macro crate's APIs currently panic.
-+//
-+// The Once is to prevent the possibility of this ordering:
-+//
-+// thread 1 calls take_hook, gets the user's original hook
-+// thread 1 calls set_hook with the null hook
-+// thread 2 calls take_hook, thinks null hook is the original hook
-+// thread 2 calls set_hook with the null hook
-+// thread 1 calls set_hook with the actual original hook
-+// thread 2 calls set_hook with what it thinks is the original hook
-+//
-+// in which the user's hook has been lost.
-+//
-+// There is still a race condition where a panic in a different thread can
-+// happen during the interval that the user's original panic hook is
-+// unregistered such that their hook is incorrectly not called. This is
-+// sufficiently unlikely and less bad than printing panic messages to stderr
-+// on correct use of this crate. Maybe there is a libstd feature request
-+// here. For now, if a user needs to guarantee that this failure mode does
-+// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
-+// the main thread before launching any other threads.
-+fn initialize() {
-+ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
-+
-+ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
-+ let sanity_check = &*null_hook as *const PanicHook;
-+ let original_hook = panic::take_hook();
-+ panic::set_hook(null_hook);
-+
-+ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
-+ WORKS.store(works as usize + 1, Ordering::SeqCst);
-+
-+ let hopefully_null_hook = panic::take_hook();
-+ panic::set_hook(original_hook);
-+ if sanity_check != &*hopefully_null_hook {
-+ panic!("observed race condition in proc_macro2::inside_proc_macro");
-+ }
-+}
-diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
---- a/third_party/rust/proc-macro2/src/fallback.rs
-+++ b/third_party/rust/proc-macro2/src/fallback.rs
-@@ -1,41 +1,121 @@
-+use crate::parse::{token_stream, Cursor};
-+use crate::{Delimiter, Spacing, TokenTree};
- #[cfg(span_locations)]
- use std::cell::RefCell;
- #[cfg(span_locations)]
- use std::cmp;
--use std::fmt;
--use std::iter;
-+use std::fmt::{self, Debug, Display};
-+use std::iter::FromIterator;
-+use std::mem;
- use std::ops::RangeBounds;
- #[cfg(procmacro2_semver_exempt)]
- use std::path::Path;
- use std::path::PathBuf;
- use std::str::FromStr;
- use std::vec;
--
--use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
--use crate::{Delimiter, Punct, Spacing, TokenTree};
- use unicode_xid::UnicodeXID;
-
-+/// Force use of proc-macro2's fallback implementation of the API for now, even
-+/// if the compiler's implementation is available.
-+pub fn force() {
-+ #[cfg(wrap_proc_macro)]
-+ crate::detection::force_fallback();
-+}
-+
-+/// Resume using the compiler's implementation of the proc macro API if it is
-+/// available.
-+pub fn unforce() {
-+ #[cfg(wrap_proc_macro)]
-+ crate::detection::unforce_fallback();
-+}
-+
- #[derive(Clone)]
--pub struct TokenStream {
-- inner: Vec<TokenTree>,
-+pub(crate) struct TokenStream {
-+ pub(crate) inner: Vec<TokenTree>,
- }
-
- #[derive(Debug)]
--pub struct LexError;
-+pub(crate) struct LexError;
-
- impl TokenStream {
- pub fn new() -> TokenStream {
- TokenStream { inner: Vec::new() }
- }
-
- pub fn is_empty(&self) -> bool {
- self.inner.len() == 0
- }
-+
-+ fn take_inner(&mut self) -> Vec<TokenTree> {
-+ mem::replace(&mut self.inner, Vec::new())
-+ }
-+
-+ fn push_token(&mut self, token: TokenTree) {
-+ // https://github.com/alexcrichton/proc-macro2/issues/235
-+ match token {
-+ #[cfg(not(no_bind_by_move_pattern_guard))]
-+ TokenTree::Literal(crate::Literal {
-+ #[cfg(wrap_proc_macro)]
-+ inner: crate::imp::Literal::Fallback(literal),
-+ #[cfg(not(wrap_proc_macro))]
-+ inner: literal,
-+ ..
-+ }) if literal.text.starts_with('-') => {
-+ push_negative_literal(self, literal);
-+ }
-+ #[cfg(no_bind_by_move_pattern_guard)]
-+ TokenTree::Literal(crate::Literal {
-+ #[cfg(wrap_proc_macro)]
-+ inner: crate::imp::Literal::Fallback(literal),
-+ #[cfg(not(wrap_proc_macro))]
-+ inner: literal,
-+ ..
-+ }) => {
-+ if literal.text.starts_with('-') {
-+ push_negative_literal(self, literal);
-+ } else {
-+ self.inner
-+ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
-+ }
-+ }
-+ _ => self.inner.push(token),
-+ }
-+
-+ #[cold]
-+ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
-+ literal.text.remove(0);
-+ let mut punct = crate::Punct::new('-', Spacing::Alone);
-+ punct.set_span(crate::Span::_new_stable(literal.span));
-+ stream.inner.push(TokenTree::Punct(punct));
-+ stream
-+ .inner
-+ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
-+ }
-+ }
-+}
-+
-+// Nonrecursive to prevent stack overflow.
-+impl Drop for TokenStream {
-+ fn drop(&mut self) {
-+ while let Some(token) = self.inner.pop() {
-+ let group = match token {
-+ TokenTree::Group(group) => group.inner,
-+ _ => continue,
-+ };
-+ #[cfg(wrap_proc_macro)]
-+ let group = match group {
-+ crate::imp::Group::Fallback(group) => group,
-+ _ => continue,
-+ };
-+ let mut group = group;
-+ self.inner.extend(group.stream.take_inner());
-+ }
-+ }
- }
-
- #[cfg(span_locations)]
- fn get_cursor(src: &str) -> Cursor {
- // Create a dummy file & add it to the source map
- SOURCE_MAP.with(|cm| {
- let mut cm = cm.borrow_mut();
- let name = format!("<parsed string {}>", cm.files.len());
-@@ -54,68 +134,49 @@ fn get_cursor(src: &str) -> Cursor {
-
- impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- // Create a dummy file & add it to the source map
- let cursor = get_cursor(src);
-
-- match token_stream(cursor) {
-- Ok((input, output)) => {
-- if skip_whitespace(input).len() != 0 {
-- Err(LexError)
-- } else {
-- Ok(output)
-- }
-- }
-- Err(LexError) => Err(LexError),
-+ let (rest, tokens) = token_stream(cursor)?;
-+ if rest.is_empty() {
-+ Ok(tokens)
-+ } else {
-+ Err(LexError)
- }
- }
- }
-
--impl fmt::Display for TokenStream {
-+impl Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut joint = false;
- for (i, tt) in self.inner.iter().enumerate() {
- if i != 0 && !joint {
- write!(f, " ")?;
- }
- joint = false;
-- match *tt {
-- TokenTree::Group(ref tt) => {
-- let (start, end) = match tt.delimiter() {
-- Delimiter::Parenthesis => ("(", ")"),
-- Delimiter::Brace => ("{", "}"),
-- Delimiter::Bracket => ("[", "]"),
-- Delimiter::None => ("", ""),
-- };
-- if tt.stream().into_iter().next().is_none() {
-- write!(f, "{} {}", start, end)?
-- } else {
-- write!(f, "{} {} {}", start, tt.stream(), end)?
-- }
-+ match tt {
-+ TokenTree::Group(tt) => Display::fmt(tt, f),
-+ TokenTree::Ident(tt) => Display::fmt(tt, f),
-+ TokenTree::Punct(tt) => {
-+ joint = tt.spacing() == Spacing::Joint;
-+ Display::fmt(tt, f)
- }
-- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
-- TokenTree::Punct(ref tt) => {
-- write!(f, "{}", tt.as_char())?;
-- match tt.spacing() {
-- Spacing::Alone => {}
-- Spacing::Joint => joint = true,
-- }
-- }
-- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
-- }
-+ TokenTree::Literal(tt) => Display::fmt(tt, f),
-+ }?
- }
-
- Ok(())
- }
- }
-
--impl fmt::Debug for TokenStream {
-+impl Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.write_str("TokenStream ")?;
- f.debug_list().entries(self.clone()).finish()
- }
- }
-
- #[cfg(use_proc_macro)]
- impl From<proc_macro::TokenStream> for TokenStream {
-@@ -134,122 +195,107 @@ impl From<TokenStream> for proc_macro::T
- .to_string()
- .parse()
- .expect("failed to parse to compiler tokens")
- }
- }
-
- impl From<TokenTree> for TokenStream {
- fn from(tree: TokenTree) -> TokenStream {
-- TokenStream { inner: vec![tree] }
-+ let mut stream = TokenStream::new();
-+ stream.push_token(tree);
-+ stream
- }
- }
-
--impl iter::FromIterator<TokenTree> for TokenStream {
-- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
-- let mut v = Vec::new();
--
-- for token in streams.into_iter() {
-- v.push(token);
-- }
--
-- TokenStream { inner: v }
-+impl FromIterator<TokenTree> for TokenStream {
-+ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
-+ let mut stream = TokenStream::new();
-+ stream.extend(tokens);
-+ stream
- }
- }
-
--impl iter::FromIterator<TokenStream> for TokenStream {
-+impl FromIterator<TokenStream> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
- let mut v = Vec::new();
-
-- for stream in streams.into_iter() {
-- v.extend(stream.inner);
-+ for mut stream in streams {
-+ v.extend(stream.take_inner());
- }
-
- TokenStream { inner: v }
- }
- }
-
- impl Extend<TokenTree> for TokenStream {
-- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
-- self.inner.extend(streams);
-+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
-+ tokens.into_iter().for_each(|token| self.push_token(token));
- }
- }
-
- impl Extend<TokenStream> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
-- self.inner
-- .extend(streams.into_iter().flat_map(|stream| stream));
-+ self.inner.extend(streams.into_iter().flatten());
- }
- }
-
--pub type TokenTreeIter = vec::IntoIter<TokenTree>;
-+pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
-
- impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = TokenTreeIter;
-
-- fn into_iter(self) -> TokenTreeIter {
-- self.inner.into_iter()
-+ fn into_iter(mut self) -> TokenTreeIter {
-+ self.take_inner().into_iter()
- }
- }
-
- #[derive(Clone, PartialEq, Eq)]
--pub struct SourceFile {
-+pub(crate) struct SourceFile {
- path: PathBuf,
- }
-
- impl SourceFile {
- /// Get the path to this source file as a string.
- pub fn path(&self) -> PathBuf {
- self.path.clone()
- }
-
- pub fn is_real(&self) -> bool {
- // XXX(nika): Support real files in the future?
- false
- }
- }
-
--impl fmt::Debug for SourceFile {
-+impl Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("SourceFile")
- .field("path", &self.path())
- .field("is_real", &self.is_real())
- .finish()
- }
- }
-
- #[derive(Clone, Copy, Debug, PartialEq, Eq)]
--pub struct LineColumn {
-+pub(crate) struct LineColumn {
- pub line: usize,
- pub column: usize,
- }
-
- #[cfg(span_locations)]
- thread_local! {
- static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
- // NOTE: We start with a single dummy file which all call_site() and
- // def_site() spans reference.
-- files: vec![{
-+ files: vec![FileInfo {
- #[cfg(procmacro2_semver_exempt)]
-- {
-- FileInfo {
-- name: "<unspecified>".to_owned(),
-- span: Span { lo: 0, hi: 0 },
-- lines: vec![0],
-- }
-- }
--
-- #[cfg(not(procmacro2_semver_exempt))]
-- {
-- FileInfo {
-- span: Span { lo: 0, hi: 0 },
-- lines: vec![0],
-- }
-- }
-+ name: "<unspecified>".to_owned(),
-+ span: Span { lo: 0, hi: 0 },
-+ lines: vec![0],
- }],
- });
- }
-
- #[cfg(span_locations)]
- struct FileInfo {
- #[cfg(procmacro2_semver_exempt)]
- name: String,
-@@ -277,26 +323,31 @@ impl FileInfo {
- }
- }
-
- fn span_within(&self, span: Span) -> bool {
- span.lo >= self.span.lo && span.hi <= self.span.hi
- }
- }
-
--/// Computesthe offsets of each line in the given source string.
-+/// Computes the offsets of each line in the given source string
-+/// and the total number of characters
- #[cfg(span_locations)]
--fn lines_offsets(s: &str) -> Vec<usize> {
-+fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
- let mut lines = vec![0];
-- let mut prev = 0;
-- while let Some(len) = s[prev..].find('\n') {
-- prev += len + 1;
-- lines.push(prev);
-+ let mut total = 0;
-+
-+ for ch in s.chars() {
-+ total += 1;
-+ if ch == '\n' {
-+ lines.push(total);
-+ }
- }
-- lines
-+
-+ (total, lines)
- }
-
- #[cfg(span_locations)]
- struct SourceMap {
- files: Vec<FileInfo>,
- }
-
- #[cfg(span_locations)]
-@@ -305,81 +356,83 @@ impl SourceMap {
- // Add 1 so there's always space between files.
- //
- // We'll always have at least 1 file, as we initialize our files list
- // with a dummy file.
- self.files.last().unwrap().span.hi + 1
- }
-
- fn add_file(&mut self, name: &str, src: &str) -> Span {
-- let lines = lines_offsets(src);
-+ let (len, lines) = lines_offsets(src);
- let lo = self.next_start_pos();
- // XXX(nika): Shouild we bother doing a checked cast or checked add here?
- let span = Span {
- lo,
-- hi: lo + (src.len() as u32),
-+ hi: lo + (len as u32),
- };
-
-- #[cfg(procmacro2_semver_exempt)]
- self.files.push(FileInfo {
-+ #[cfg(procmacro2_semver_exempt)]
- name: name.to_owned(),
- span,
- lines,
- });
-
- #[cfg(not(procmacro2_semver_exempt))]
-- self.files.push(FileInfo { span, lines });
- let _ = name;
-
- span
- }
-
- fn fileinfo(&self, span: Span) -> &FileInfo {
- for file in &self.files {
- if file.span_within(span) {
- return file;
- }
- }
- panic!("Invalid span with no related FileInfo!");
- }
- }
-
- #[derive(Clone, Copy, PartialEq, Eq)]
--pub struct Span {
-+pub(crate) struct Span {
- #[cfg(span_locations)]
-- lo: u32,
-+ pub(crate) lo: u32,
- #[cfg(span_locations)]
-- hi: u32,
-+ pub(crate) hi: u32,
- }
-
- impl Span {
- #[cfg(not(span_locations))]
- pub fn call_site() -> Span {
- Span {}
- }
-
- #[cfg(span_locations)]
- pub fn call_site() -> Span {
- Span { lo: 0, hi: 0 }
- }
-
-+ #[cfg(hygiene)]
-+ pub fn mixed_site() -> Span {
-+ Span::call_site()
-+ }
-+
- #[cfg(procmacro2_semver_exempt)]
- pub fn def_site() -> Span {
- Span::call_site()
- }
-
-- #[cfg(procmacro2_semver_exempt)]
- pub fn resolved_at(&self, _other: Span) -> Span {
- // Stable spans consist only of line/column information, so
- // `resolved_at` and `located_at` only select which span the
- // caller wants line/column information from.
- *self
- }
-
-- #[cfg(procmacro2_semver_exempt)]
- pub fn located_at(&self, other: Span) -> Span {
- other
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn source_file(&self) -> SourceFile {
- SOURCE_MAP.with(|cm| {
- let cm = cm.borrow();
-@@ -422,36 +475,69 @@ impl Span {
- return None;
- }
- Some(Span {
- lo: cmp::min(self.lo, other.lo),
- hi: cmp::max(self.hi, other.hi),
- })
- })
- }
-+
-+ #[cfg(not(span_locations))]
-+ fn first_byte(self) -> Self {
-+ self
-+ }
-+
-+ #[cfg(span_locations)]
-+ fn first_byte(self) -> Self {
-+ Span {
-+ lo: self.lo,
-+ hi: cmp::min(self.lo.saturating_add(1), self.hi),
-+ }
-+ }
-+
-+ #[cfg(not(span_locations))]
-+ fn last_byte(self) -> Self {
-+ self
-+ }
-+
-+ #[cfg(span_locations)]
-+ fn last_byte(self) -> Self {
-+ Span {
-+ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
-+ hi: self.hi,
-+ }
-+ }
- }
-
--impl fmt::Debug for Span {
-+impl Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- #[cfg(procmacro2_semver_exempt)]
-+ #[cfg(span_locations)]
- return write!(f, "bytes({}..{})", self.lo, self.hi);
-
-- #[cfg(not(procmacro2_semver_exempt))]
-+ #[cfg(not(span_locations))]
- write!(f, "Span")
- }
- }
-
--pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
-- if cfg!(procmacro2_semver_exempt) {
-+pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
-+ #[cfg(span_locations)]
-+ {
-+ if span.lo == 0 && span.hi == 0 {
-+ return;
-+ }
-+ }
-+
-+ if cfg!(span_locations) {
- debug.field("span", &span);
- }
- }
-
- #[derive(Clone)]
--pub struct Group {
-+pub(crate) struct Group {
- delimiter: Delimiter,
- stream: TokenStream,
- span: Span,
- }
-
- impl Group {
- pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
- Group {
-@@ -469,58 +555,67 @@ impl Group {
- self.stream.clone()
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn span_open(&self) -> Span {
-- self.span
-+ self.span.first_byte()
- }
-
- pub fn span_close(&self) -> Span {
-- self.span
-+ self.span.last_byte()
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
- }
-
--impl fmt::Display for Group {
-+impl Display for Group {
-+ // We attempt to match libproc_macro's formatting.
-+ // Empty parens: ()
-+ // Nonempty parens: (...)
-+ // Empty brackets: []
-+ // Nonempty brackets: [...]
-+ // Empty braces: { }
-+ // Nonempty braces: { ... }
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- let (left, right) = match self.delimiter {
-+ let (open, close) = match self.delimiter {
- Delimiter::Parenthesis => ("(", ")"),
-- Delimiter::Brace => ("{", "}"),
-+ Delimiter::Brace => ("{ ", "}"),
- Delimiter::Bracket => ("[", "]"),
- Delimiter::None => ("", ""),
- };
-
-- f.write_str(left)?;
-- self.stream.fmt(f)?;
-- f.write_str(right)?;
-+ f.write_str(open)?;
-+ Display::fmt(&self.stream, f)?;
-+ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
-+ f.write_str(" ")?;
-+ }
-+ f.write_str(close)?;
-
- Ok(())
- }
- }
-
--impl fmt::Debug for Group {
-+impl Debug for Group {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = fmt.debug_struct("Group");
- debug.field("delimiter", &self.delimiter);
- debug.field("stream", &self.stream);
-- #[cfg(procmacro2_semver_exempt)]
-- debug.field("span", &self.span);
-+ debug_span_field_if_nontrivial(&mut debug, self.span);
- debug.finish()
- }
- }
-
- #[derive(Clone)]
--pub struct Ident {
-+pub(crate) struct Ident {
- sym: String,
- span: Span,
- raw: bool,
- }
-
- impl Ident {
- fn _new(string: &str, raw: bool, span: Span) -> Ident {
- validate_ident(string);
-@@ -544,26 +639,24 @@ impl Ident {
- self.span
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
- }
-
--#[inline]
--fn is_ident_start(c: char) -> bool {
-+pub(crate) fn is_ident_start(c: char) -> bool {
- ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z')
- || c == '_'
- || (c > '\x7f' && UnicodeXID::is_xid_start(c))
- }
-
--#[inline]
--fn is_ident_continue(c: char) -> bool {
-+pub(crate) fn is_ident_continue(c: char) -> bool {
- ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z')
- || c == '_'
- || ('0' <= c && c <= '9')
- || (c > '\x7f' && UnicodeXID::is_xid_continue(c))
- }
-
- fn validate_ident(string: &str) {
-@@ -610,49 +703,49 @@ where
- if self.raw {
- other.starts_with("r#") && self.sym == other[2..]
- } else {
- self.sym == other
- }
- }
- }
-
--impl fmt::Display for Ident {
-+impl Display for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- if self.raw {
-- "r#".fmt(f)?;
-+ f.write_str("r#")?;
- }
-- self.sym.fmt(f)
-+ Display::fmt(&self.sym, f)
- }
- }
-
--impl fmt::Debug for Ident {
-+impl Debug for Ident {
- // Ident(proc_macro), Ident(r#union)
-- #[cfg(not(procmacro2_semver_exempt))]
-+ #[cfg(not(span_locations))]
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = f.debug_tuple("Ident");
- debug.field(&format_args!("{}", self));
- debug.finish()
- }
-
- // Ident {
- // sym: proc_macro,
- // span: bytes(128..138)
- // }
-- #[cfg(procmacro2_semver_exempt)]
-+ #[cfg(span_locations)]
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = f.debug_struct("Ident");
- debug.field("sym", &format_args!("{}", self));
-- debug.field("span", &self.span);
-+ debug_span_field_if_nontrivial(&mut debug, self.span);
- debug.finish()
- }
- }
-
- #[derive(Clone)]
--pub struct Literal {
-+pub(crate) struct Literal {
- text: String,
- span: Span,
- }
-
- macro_rules! suffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(format!(concat!("{}", stringify!($kind)), n))
-@@ -664,17 +757,17 @@ macro_rules! unsuffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(n.to_string())
- }
- )*)
- }
-
- impl Literal {
-- fn _new(text: String) -> Literal {
-+ pub(crate) fn _new(text: String) -> Literal {
- Literal {
- text,
- span: Span::call_site(),
- }
- }
-
- suffixed_numbers! {
- u8_suffixed => u8,
-@@ -706,61 +799,62 @@ impl Literal {
- i32_unsuffixed => i32,
- i64_unsuffixed => i64,
- i128_unsuffixed => i128,
- isize_unsuffixed => isize,
- }
-
- pub fn f32_unsuffixed(f: f32) -> Literal {
- let mut s = f.to_string();
-- if !s.contains(".") {
-+ if !s.contains('.') {
- s.push_str(".0");
- }
- Literal::_new(s)
- }
-
- pub fn f64_unsuffixed(f: f64) -> Literal {
- let mut s = f.to_string();
-- if !s.contains(".") {
-+ if !s.contains('.') {
- s.push_str(".0");
- }
- Literal::_new(s)
- }
-
- pub fn string(t: &str) -> Literal {
- let mut text = String::with_capacity(t.len() + 2);
- text.push('"');
- for c in t.chars() {
- if c == '\'' {
-- // escape_default turns this into "\'" which is unnecessary.
-+ // escape_debug turns this into "\'" which is unnecessary.
- text.push(c);
- } else {
-- text.extend(c.escape_default());
-+ text.extend(c.escape_debug());
- }
- }
- text.push('"');
- Literal::_new(text)
- }
-
- pub fn character(t: char) -> Literal {
- let mut text = String::new();
- text.push('\'');
- if t == '"' {
-- // escape_default turns this into '\"' which is unnecessary.
-+ // escape_debug turns this into '\"' which is unnecessary.
- text.push(t);
- } else {
-- text.extend(t.escape_default());
-+ text.extend(t.escape_debug());
- }
- text.push('\'');
- Literal::_new(text)
- }
-
- pub fn byte_string(bytes: &[u8]) -> Literal {
- let mut escaped = "b\"".to_string();
- for b in bytes {
-+ #[allow(clippy::match_overlapping_arm)]
- match *b {
- b'\0' => escaped.push_str(r"\0"),
- b'\t' => escaped.push_str(r"\t"),
- b'\n' => escaped.push_str(r"\n"),
- b'\r' => escaped.push_str(r"\r"),
- b'"' => escaped.push_str("\\\""),
- b'\\' => escaped.push_str("\\\\"),
- b'\x20'..=b'\x7E' => escaped.push(*b as char),
-@@ -779,656 +873,22 @@ impl Literal {
- self.span = span;
- }
-
- pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
- None
- }
- }
-
--impl fmt::Display for Literal {
-+impl Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.text.fmt(f)
-- }
--}
--
--impl fmt::Debug for Literal {
-- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
-- let mut debug = fmt.debug_struct("Literal");
-- debug.field("lit", &format_args!("{}", self.text));
-- #[cfg(procmacro2_semver_exempt)]
-- debug.field("span", &self.span);
-- debug.finish()
-- }
--}
--
--fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
-- let mut trees = Vec::new();
-- loop {
-- let input_no_ws = skip_whitespace(input);
-- if input_no_ws.rest.len() == 0 {
-- break;
-- }
-- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
-- input = a;
-- trees.extend(tokens);
-- continue;
-- }
--
-- let (a, tt) = match token_tree(input_no_ws) {
-- Ok(p) => p,
-- Err(_) => break,
-- };
-- trees.push(tt);
-- input = a;
-- }
-- Ok((input, TokenStream { inner: trees }))
--}
--
--#[cfg(not(span_locations))]
--fn spanned<'a, T>(
-- input: Cursor<'a>,
-- f: fn(Cursor<'a>) -> PResult<'a, T>,
--) -> PResult<'a, (T, crate::Span)> {
-- let (a, b) = f(skip_whitespace(input))?;
-- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
--}
--
--#[cfg(span_locations)]
--fn spanned<'a, T>(
-- input: Cursor<'a>,
-- f: fn(Cursor<'a>) -> PResult<'a, T>,
--) -> PResult<'a, (T, crate::Span)> {
-- let input = skip_whitespace(input);
-- let lo = input.off;
-- let (a, b) = f(input)?;
-- let hi = a.off;
-- let span = crate::Span::_new_stable(Span { lo, hi });
-- Ok((a, (b, span)))
--}
--
--fn token_tree(input: Cursor) -> PResult<TokenTree> {
-- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
-- tt.set_span(span);
-- Ok((rest, tt))
--}
--
--named!(token_kind -> TokenTree, alt!(
-- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
-- |
-- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
-- |
-- map!(op, TokenTree::Punct)
-- |
-- symbol_leading_ws
--));
--
--named!(group -> Group, alt!(
-- delimited!(
-- punct!("("),
-- token_stream,
-- punct!(")")
-- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
-- |
-- delimited!(
-- punct!("["),
-- token_stream,
-- punct!("]")
-- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
-- |
-- delimited!(
-- punct!("{"),
-- token_stream,
-- punct!("}")
-- ) => { |ts| Group::new(Delimiter::Brace, ts) }
--));
--
--fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
-- symbol(skip_whitespace(input))
--}
--
--fn symbol(input: Cursor) -> PResult<TokenTree> {
-- let raw = input.starts_with("r#");
-- let rest = input.advance((raw as usize) << 1);
--
-- let (rest, sym) = symbol_not_raw(rest)?;
--
-- if !raw {
-- let ident = crate::Ident::new(sym, crate::Span::call_site());
-- return Ok((rest, ident.into()));
-- }
--
-- if sym == "_" {
-- return Err(LexError);
-- }
--
-- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
-- Ok((rest, ident.into()))
--}
--
--fn symbol_not_raw(input: Cursor) -> PResult<&str> {
-- let mut chars = input.char_indices();
--
-- match chars.next() {
-- Some((_, ch)) if is_ident_start(ch) => {}
-- _ => return Err(LexError),
-- }
--
-- let mut end = input.len();
-- for (i, ch) in chars {
-- if !is_ident_continue(ch) {
-- end = i;
-- break;
-- }
-- }
--
-- Ok((input.advance(end), &input.rest[..end]))
--}
--
--fn literal(input: Cursor) -> PResult<Literal> {
-- let input_no_ws = skip_whitespace(input);
--
-- match literal_nocapture(input_no_ws) {
-- Ok((a, ())) => {
-- let start = input.len() - input_no_ws.len();
-- let len = input_no_ws.len() - a.len();
-- let end = start + len;
-- Ok((a, Literal::_new(input.rest[start..end].to_string())))
-- }
-- Err(LexError) => Err(LexError),
-+ Display::fmt(&self.text, f)
- }
- }
-
--named!(literal_nocapture -> (), alt!(
-- string
-- |
-- byte_string
-- |
-- byte
-- |
-- character
-- |
-- float
-- |
-- int
--));
--
--named!(string -> (), alt!(
-- quoted_string
-- |
-- preceded!(
-- punct!("r"),
-- raw_string
-- ) => { |_| () }
--));
--
--named!(quoted_string -> (), do_parse!(
-- punct!("\"") >>
-- cooked_string >>
-- tag!("\"") >>
-- option!(symbol_not_raw) >>
-- (())
--));
--
--fn cooked_string(input: Cursor) -> PResult<()> {
-- let mut chars = input.char_indices().peekable();
-- while let Some((byte_offset, ch)) = chars.next() {
-- match ch {
-- '"' => {
-- return Ok((input.advance(byte_offset), ()));
-- }
-- '\r' => {
-- if let Some((_, '\n')) = chars.next() {
-- // ...
-- } else {
-- break;
-- }
-- }
-- '\\' => match chars.next() {
-- Some((_, 'x')) => {
-- if !backslash_x_char(&mut chars) {
-- break;
-- }
-- }
-- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
-- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
-- Some((_, 'u')) => {
-- if !backslash_u(&mut chars) {
-- break;
-- }
-- }
-- Some((_, '\n')) | Some((_, '\r')) => {
-- while let Some(&(_, ch)) = chars.peek() {
-- if ch.is_whitespace() {
-- chars.next();
-- } else {
-- break;
-- }
-- }
-- }
-- _ => break,
-- },
-- _ch => {}
-- }
-- }
-- Err(LexError)
--}
--
--named!(byte_string -> (), alt!(
-- delimited!(
-- punct!("b\""),
-- cooked_byte_string,
-- tag!("\"")
-- ) => { |_| () }
-- |
-- preceded!(
-- punct!("br"),
-- raw_string
-- ) => { |_| () }
--));
--
--fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
-- let mut bytes = input.bytes().enumerate();
-- 'outer: while let Some((offset, b)) = bytes.next() {
-- match b {
-- b'"' => {
-- return Ok((input.advance(offset), ()));
-- }
-- b'\r' => {
-- if let Some((_, b'\n')) = bytes.next() {
-- // ...
-- } else {
-- break;
-- }
-- }
-- b'\\' => match bytes.next() {
-- Some((_, b'x')) => {
-- if !backslash_x_byte(&mut bytes) {
-- break;
-- }
-- }
-- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
-- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
-- Some((newline, b'\n')) | Some((newline, b'\r')) => {
-- let rest = input.advance(newline + 1);
-- for (offset, ch) in rest.char_indices() {
-- if !ch.is_whitespace() {
-- input = rest.advance(offset);
-- bytes = input.bytes().enumerate();
-- continue 'outer;
-- }
-- }
-- break;
-- }
-- _ => break,
-- },
-- b if b < 0x80 => {}
-- _ => break,
-- }
-- }
-- Err(LexError)
--}
--
--fn raw_string(input: Cursor) -> PResult<()> {
-- let mut chars = input.char_indices();
-- let mut n = 0;
-- while let Some((byte_offset, ch)) = chars.next() {
-- match ch {
-- '"' => {
-- n = byte_offset;
-- break;
-- }
-- '#' => {}
-- _ => return Err(LexError),
-- }
-- }
-- for (byte_offset, ch) in chars {
-- match ch {
-- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
-- let rest = input.advance(byte_offset + 1 + n);
-- return Ok((rest, ()));
-- }
-- '\r' => {}
-- _ => {}
-- }
-- }
-- Err(LexError)
--}
--
--named!(byte -> (), do_parse!(
-- punct!("b") >>
-- tag!("'") >>
-- cooked_byte >>
-- tag!("'") >>
-- (())
--));
--
--fn cooked_byte(input: Cursor) -> PResult<()> {
-- let mut bytes = input.bytes().enumerate();
-- let ok = match bytes.next().map(|(_, b)| b) {
-- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
-- Some(b'x') => backslash_x_byte(&mut bytes),
-- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
-- | Some(b'"') => true,
-- _ => false,
-- },
-- b => b.is_some(),
-- };
-- if ok {
-- match bytes.next() {
-- Some((offset, _)) => {
-- if input.chars().as_str().is_char_boundary(offset) {
-- Ok((input.advance(offset), ()))
-- } else {
-- Err(LexError)
-- }
-- }
-- None => Ok((input.advance(input.len()), ())),
-- }
-- } else {
-- Err(LexError)
-- }
--}
--
--named!(character -> (), do_parse!(
-- punct!("'") >>
-- cooked_char >>
-- tag!("'") >>
-- (())
--));
--
--fn cooked_char(input: Cursor) -> PResult<()> {
-- let mut chars = input.char_indices();
-- let ok = match chars.next().map(|(_, ch)| ch) {
-- Some('\\') => match chars.next().map(|(_, ch)| ch) {
-- Some('x') => backslash_x_char(&mut chars),
-- Some('u') => backslash_u(&mut chars),
-- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
-- true
-- }
-- _ => false,
-- },
-- ch => ch.is_some(),
-- };
-- if ok {
-- match chars.next() {
-- Some((idx, _)) => Ok((input.advance(idx), ())),
-- None => Ok((input.advance(input.len()), ())),
-- }
-- } else {
-- Err(LexError)
-+impl Debug for Literal {
-+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
-+ let mut debug = fmt.debug_struct("Literal");
-+ debug.field("lit", &format_args!("{}", self.text));
-+ debug_span_field_if_nontrivial(&mut debug, self.span);
-+ debug.finish()
- }
- }
--
--macro_rules! next_ch {
-- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
-- match $chars.next() {
-- Some((_, ch)) => match ch {
-- $pat $(| $rest)* => ch,
-- _ => return false,
-- },
-- None => return false
-- }
-- };
--}
--
--fn backslash_x_char<I>(chars: &mut I) -> bool
--where
-- I: Iterator<Item = (usize, char)>,
--{
-- next_ch!(chars @ '0'..='7');
-- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
-- true
--}
--
--fn backslash_x_byte<I>(chars: &mut I) -> bool
--where
-- I: Iterator<Item = (usize, u8)>,
--{
-- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
-- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
-- true
--}
--
--fn backslash_u<I>(chars: &mut I) -> bool
--where
-- I: Iterator<Item = (usize, char)>,
--{
-- next_ch!(chars @ '{');
-- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
-- loop {
-- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
-- if c == '}' {
-- return true;
-- }
-- }
--}
--
--fn float(input: Cursor) -> PResult<()> {
-- let (mut rest, ()) = float_digits(input)?;
-- if let Some(ch) = rest.chars().next() {
-- if is_ident_start(ch) {
-- rest = symbol_not_raw(rest)?.0;
-- }
-- }
-- word_break(rest)
--}
--
--fn float_digits(input: Cursor) -> PResult<()> {
-- let mut chars = input.chars().peekable();
-- match chars.next() {
-- Some(ch) if ch >= '0' && ch <= '9' => {}
-- _ => return Err(LexError),
-- }
--
-- let mut len = 1;
-- let mut has_dot = false;
-- let mut has_exp = false;
-- while let Some(&ch) = chars.peek() {
-- match ch {
-- '0'..='9' | '_' => {
-- chars.next();
-- len += 1;
-- }
-- '.' => {
-- if has_dot {
-- break;
-- }
-- chars.next();
-- if chars
-- .peek()
-- .map(|&ch| ch == '.' || is_ident_start(ch))
-- .unwrap_or(false)
-- {
-- return Err(LexError);
-- }
-- len += 1;
-- has_dot = true;
-- }
-- 'e' | 'E' => {
-- chars.next();
-- len += 1;
-- has_exp = true;
-- break;
-- }
-- _ => break,
-- }
-- }
--
-- let rest = input.advance(len);
-- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
-- return Err(LexError);
-- }
--
-- if has_exp {
-- let mut has_exp_value = false;
-- while let Some(&ch) = chars.peek() {
-- match ch {
-- '+' | '-' => {
-- if has_exp_value {
-- break;
-- }
-- chars.next();
-- len += 1;
-- }
-- '0'..='9' => {
-- chars.next();
-- len += 1;
-- has_exp_value = true;
-- }
-- '_' => {
-- chars.next();
-- len += 1;
-- }
-- _ => break,
-- }
-- }
-- if !has_exp_value {
-- return Err(LexError);
-- }
-- }
--
-- Ok((input.advance(len), ()))
--}
--
--fn int(input: Cursor) -> PResult<()> {
-- let (mut rest, ()) = digits(input)?;
-- if let Some(ch) = rest.chars().next() {
-- if is_ident_start(ch) {
-- rest = symbol_not_raw(rest)?.0;
-- }
-- }
-- word_break(rest)
--}
--
--fn digits(mut input: Cursor) -> PResult<()> {
-- let base = if input.starts_with("0x") {
-- input = input.advance(2);
-- 16
-- } else if input.starts_with("0o") {
-- input = input.advance(2);
-- 8
-- } else if input.starts_with("0b") {
-- input = input.advance(2);
-- 2
-- } else {
-- 10
-- };
--
-- let mut len = 0;
-- let mut empty = true;
-- for b in input.bytes() {
-- let digit = match b {
-- b'0'..=b'9' => (b - b'0') as u64,
-- b'a'..=b'f' => 10 + (b - b'a') as u64,
-- b'A'..=b'F' => 10 + (b - b'A') as u64,
-- b'_' => {
-- if empty && base == 10 {
-- return Err(LexError);
-- }
-- len += 1;
-- continue;
-- }
-- _ => break,
-- };
-- if digit >= base {
-- return Err(LexError);
-- }
-- len += 1;
-- empty = false;
-- }
-- if empty {
-- Err(LexError)
-- } else {
-- Ok((input.advance(len), ()))
-- }
--}
--
--fn op(input: Cursor) -> PResult<Punct> {
-- let input = skip_whitespace(input);
-- match op_char(input) {
-- Ok((rest, '\'')) => {
-- symbol(rest)?;
-- Ok((rest, Punct::new('\'', Spacing::Joint)))
-- }
-- Ok((rest, ch)) => {
-- let kind = match op_char(rest) {
-- Ok(_) => Spacing::Joint,
-- Err(LexError) => Spacing::Alone,
-- };
-- Ok((rest, Punct::new(ch, kind)))
-- }
-- Err(LexError) => Err(LexError),
-- }
--}
--
--fn op_char(input: Cursor) -> PResult<char> {
-- if input.starts_with("//") || input.starts_with("/*") {
-- // Do not accept `/` of a comment as an op.
-- return Err(LexError);
-- }
--
-- let mut chars = input.chars();
-- let first = match chars.next() {
-- Some(ch) => ch,
-- None => {
-- return Err(LexError);
-- }
-- };
-- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
-- if recognized.contains(first) {
-- Ok((input.advance(first.len_utf8()), first))
-- } else {
-- Err(LexError)
-- }
--}
--
--fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
-- let mut trees = Vec::new();
-- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
-- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
-- if inner {
-- trees.push(Punct::new('!', Spacing::Alone).into());
-- }
-- let mut stream = vec![
-- TokenTree::Ident(crate::Ident::new("doc", span)),
-- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
-- TokenTree::Literal(crate::Literal::string(comment)),
-- ];
-- for tt in stream.iter_mut() {
-- tt.set_span(span);
-- }
-- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
-- trees.push(crate::Group::_new_stable(group).into());
-- for tt in trees.iter_mut() {
-- tt.set_span(span);
-- }
-- Ok((rest, trees))
--}
--
--named!(doc_comment_contents -> (&str, bool), alt!(
-- do_parse!(
-- punct!("//!") >>
-- s: take_until_newline_or_eof!() >>
-- ((s, true))
-- )
-- |
-- do_parse!(
-- option!(whitespace) >>
-- peek!(tag!("/*!")) >>
-- s: block_comment >>
-- ((s, true))
-- )
-- |
-- do_parse!(
-- punct!("///") >>
-- not!(tag!("/")) >>
-- s: take_until_newline_or_eof!() >>
-- ((s, false))
-- )
-- |
-- do_parse!(
-- option!(whitespace) >>
-- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
-- s: block_comment >>
-- ((s, false))
-- )
--));
-diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
---- a/third_party/rust/proc-macro2/src/lib.rs
-+++ b/third_party/rust/proc-macro2/src/lib.rs
-@@ -73,37 +73,44 @@
- //!
- //! # Thread-Safety
- //!
- //! Most types in this crate are `!Sync` because the underlying compiler
- //! types make use of thread-local memory, meaning they cannot be accessed from
- //! a different thread.
-
- // Proc-macro2 types in rustdoc of other crates get linked to here.
--#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
-+#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.20")]
- #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
- #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
-+#![allow(clippy::needless_doctest_main)]
-
- #[cfg(use_proc_macro)]
- extern crate proc_macro;
-
- use std::cmp::Ordering;
--use std::fmt;
-+use std::fmt::{self, Debug, Display};
- use std::hash::{Hash, Hasher};
- use std::iter::FromIterator;
- use std::marker;
- use std::ops::RangeBounds;
- #[cfg(procmacro2_semver_exempt)]
- use std::path::PathBuf;
- use std::rc::Rc;
- use std::str::FromStr;
-
--#[macro_use]
--mod strnom;
--mod fallback;
-+mod parse;
-+
-+#[cfg(wrap_proc_macro)]
-+mod detection;
-+
-+// Public for proc_macro2::fallback::force() and unforce(), but those are quite
-+// a niche use case so we omit it from rustdoc.
-+#[doc(hidden)]
-+pub mod fallback;
-
- #[cfg(not(wrap_proc_macro))]
- use crate::fallback as imp;
- #[path = "wrapper.rs"]
- #[cfg(wrap_proc_macro)]
- mod imp;
-
- /// An abstract stream of tokens, or more concretely a sequence of token trees.
-@@ -223,32 +230,32 @@ impl FromIterator<TokenStream> for Token
- TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
- }
- }
-
- /// Prints the token stream as a string that is supposed to be losslessly
- /// convertible back into the same token stream (modulo spans), except for
- /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
- /// numeric literals.
--impl fmt::Display for TokenStream {
-+impl Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Display::fmt(&self.inner, f)
- }
- }
-
- /// Prints token in a form convenient for debugging.
--impl fmt::Debug for TokenStream {
-+impl Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Debug::fmt(&self.inner, f)
- }
- }
-
--impl fmt::Debug for LexError {
-+impl Debug for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Debug::fmt(&self.inner, f)
- }
- }
-
- /// The source file of a given `Span`.
- ///
- /// This type is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- #[derive(Clone, PartialEq, Eq)]
-@@ -286,19 +293,19 @@ impl SourceFile {
- /// Returns `true` if this source file is a real source file, and not
- /// generated by an external macro's expansion.
- pub fn is_real(&self) -> bool {
- self.inner.is_real()
- }
- }
-
- #[cfg(procmacro2_semver_exempt)]
--impl fmt::Debug for SourceFile {
-+impl Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Debug::fmt(&self.inner, f)
- }
- }
-
- /// A line-column pair representing the start or end of a `Span`.
- ///
- /// This type is semver exempt and not exposed by default.
- #[cfg(span_locations)]
- #[derive(Copy, Clone, Debug, PartialEq, Eq)]
-@@ -306,16 +313,32 @@ pub struct LineColumn {
- /// The 1-indexed line in the source file on which the span starts or ends
- /// (inclusive).
- pub line: usize,
- /// The 0-indexed column (in UTF-8 characters) in the source file on which
- /// the span starts or ends (inclusive).
- pub column: usize,
- }
-
-+#[cfg(span_locations)]
-+impl Ord for LineColumn {
-+ fn cmp(&self, other: &Self) -> Ordering {
-+ self.line
-+ .cmp(&other.line)
-+ .then(self.column.cmp(&other.column))
-+ }
-+}
-+
-+#[cfg(span_locations)]
-+impl PartialOrd for LineColumn {
-+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
-+ Some(self.cmp(other))
-+ }
-+}
-+
- /// A region of source code, along with macro expansion information.
- #[derive(Copy, Clone)]
- pub struct Span {
- inner: imp::Span,
- _marker: marker::PhantomData<Rc<()>>,
- }
-
- impl Span {
-@@ -337,38 +360,42 @@ impl Span {
- ///
- /// Identifiers created with this span will be resolved as if they were
- /// written directly at the macro call location (call-site hygiene) and
- /// other code at the macro call site will be able to refer to them as well.
- pub fn call_site() -> Span {
- Span::_new(imp::Span::call_site())
- }
-
-+ /// The span located at the invocation of the procedural macro, but with
-+ /// local variables, labels, and `$crate` resolved at the definition site
-+ /// of the macro. This is the same hygiene behavior as `macro_rules`.
-+ ///
-+ /// This function requires Rust 1.45 or later.
-+ #[cfg(hygiene)]
-+ pub fn mixed_site() -> Span {
-+ Span::_new(imp::Span::mixed_site())
-+ }
-+
- /// A span that resolves at the macro definition site.
- ///
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- pub fn def_site() -> Span {
- Span::_new(imp::Span::def_site())
- }
-
- /// Creates a new span with the same line/column information as `self` but
- /// that resolves symbols as though it were at `other`.
-- ///
-- /// This method is semver exempt and not exposed by default.
-- #[cfg(procmacro2_semver_exempt)]
- pub fn resolved_at(&self, other: Span) -> Span {
- Span::_new(self.inner.resolved_at(other.inner))
- }
-
- /// Creates a new span with the same name resolution behavior as `self` but
- /// with the line/column information of `other`.
-- ///
-- /// This method is semver exempt and not exposed by default.
-- #[cfg(procmacro2_semver_exempt)]
- pub fn located_at(&self, other: Span) -> Span {
- Span::_new(self.inner.located_at(other.inner))
- }
-
- /// Convert `proc_macro2::Span` to `proc_macro::Span`.
- ///
- /// This method is available when building with a nightly compiler, or when
- /// building with rustc 1.29+ *without* semver exempt features.
-@@ -434,19 +461,19 @@ impl Span {
- /// This method is semver exempt and not exposed by default.
- #[cfg(procmacro2_semver_exempt)]
- pub fn eq(&self, other: &Span) -> bool {
- self.inner.eq(&other.inner)
- }
- }
-
- /// Prints a span in a form convenient for debugging.
--impl fmt::Debug for Span {
-+impl Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Debug::fmt(&self.inner, f)
- }
- }
-
- /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
- #[derive(Clone)]
- pub enum TokenTree {
- /// A token stream surrounded by bracket delimiters.
- Group(Group),
-@@ -457,35 +484,35 @@ pub enum TokenTree {
- /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
- Literal(Literal),
- }
-
- impl TokenTree {
- /// Returns the span of this tree, delegating to the `span` method of
- /// the contained token or a delimited stream.
- pub fn span(&self) -> Span {
-- match *self {
-- TokenTree::Group(ref t) => t.span(),
-- TokenTree::Ident(ref t) => t.span(),
-- TokenTree::Punct(ref t) => t.span(),
-- TokenTree::Literal(ref t) => t.span(),
-+ match self {
-+ TokenTree::Group(t) => t.span(),
-+ TokenTree::Ident(t) => t.span(),
-+ TokenTree::Punct(t) => t.span(),
-+ TokenTree::Literal(t) => t.span(),
- }
- }
-
- /// Configures the span for *only this token*.
- ///
- /// Note that if this token is a `Group` then this method will not configure
- /// the span of each of the internal tokens, this will simply delegate to
- /// the `set_span` method of each variant.
- pub fn set_span(&mut self, span: Span) {
-- match *self {
-- TokenTree::Group(ref mut t) => t.set_span(span),
-- TokenTree::Ident(ref mut t) => t.set_span(span),
-- TokenTree::Punct(ref mut t) => t.set_span(span),
-- TokenTree::Literal(ref mut t) => t.set_span(span),
-+ match self {
-+ TokenTree::Group(t) => t.set_span(span),
-+ TokenTree::Ident(t) => t.set_span(span),
-+ TokenTree::Punct(t) => t.set_span(span),
-+ TokenTree::Literal(t) => t.set_span(span),
- }
- }
- }
-
- impl From<Group> for TokenTree {
- fn from(g: Group) -> TokenTree {
- TokenTree::Group(g)
- }
-@@ -508,42 +535,42 @@ impl From<Literal> for TokenTree {
- TokenTree::Literal(g)
- }
- }
-
- /// Prints the token tree as a string that is supposed to be losslessly
- /// convertible back into the same token tree (modulo spans), except for
- /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
- /// numeric literals.
--impl fmt::Display for TokenTree {
-+impl Display for TokenTree {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- match *self {
-- TokenTree::Group(ref t) => t.fmt(f),
-- TokenTree::Ident(ref t) => t.fmt(f),
-- TokenTree::Punct(ref t) => t.fmt(f),
-- TokenTree::Literal(ref t) => t.fmt(f),
-+ match self {
-+ TokenTree::Group(t) => Display::fmt(t, f),
-+ TokenTree::Ident(t) => Display::fmt(t, f),
-+ TokenTree::Punct(t) => Display::fmt(t, f),
-+ TokenTree::Literal(t) => Display::fmt(t, f),
- }
- }
- }
-
- /// Prints token tree in a form convenient for debugging.
--impl fmt::Debug for TokenTree {
-+impl Debug for TokenTree {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- // Each of these has the name in the struct type in the derived debug,
- // so don't bother with an extra layer of indirection
-- match *self {
-- TokenTree::Group(ref t) => t.fmt(f),
-- TokenTree::Ident(ref t) => {
-+ match self {
-+ TokenTree::Group(t) => Debug::fmt(t, f),
-+ TokenTree::Ident(t) => {
- let mut debug = f.debug_struct("Ident");
- debug.field("sym", &format_args!("{}", t));
- imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
- debug.finish()
- }
-- TokenTree::Punct(ref t) => t.fmt(f),
-- TokenTree::Literal(ref t) => t.fmt(f),
-+ TokenTree::Punct(t) => Debug::fmt(t, f),
-+ TokenTree::Literal(t) => Debug::fmt(t, f),
- }
- }
- }
-
- /// A delimited token stream.
- ///
- /// A `Group` internally contains a `TokenStream` which is surrounded by
- /// `Delimiter`s.
-@@ -646,25 +673,25 @@ impl Group {
- pub fn set_span(&mut self, span: Span) {
- self.inner.set_span(span.inner)
- }
- }
-
- /// Prints the group as a string that should be losslessly convertible back
- /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
- /// with `Delimiter::None` delimiters.
--impl fmt::Display for Group {
-+impl Display for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
-- fmt::Display::fmt(&self.inner, formatter)
-+ Display::fmt(&self.inner, formatter)
- }
- }
-
--impl fmt::Debug for Group {
-+impl Debug for Group {
- fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
-- fmt::Debug::fmt(&self.inner, formatter)
-+ Debug::fmt(&self.inner, formatter)
- }
- }
-
- /// An `Punct` is an single punctuation character like `+`, `-` or `#`.
- ///
- /// Multicharacter operators like `+=` are represented as two instances of
- /// `Punct` with different forms of `Spacing` returned.
- #[derive(Clone)]
-@@ -725,23 +752,23 @@ impl Punct {
- /// Configure the span for this punctuation character.
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
- }
-
- /// Prints the punctuation character as a string that should be losslessly
- /// convertible back into the same character.
--impl fmt::Display for Punct {
-+impl Display for Punct {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.op.fmt(f)
-+ Display::fmt(&self.op, f)
- }
- }
-
--impl fmt::Debug for Punct {
-+impl Debug for Punct {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let mut debug = fmt.debug_struct("Punct");
- debug.field("op", &self.op);
- debug.field("spacing", &self.spacing);
- imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
- debug.finish()
- }
- }
-@@ -915,25 +942,25 @@ impl Ord for Ident {
- impl Hash for Ident {
- fn hash<H: Hasher>(&self, hasher: &mut H) {
- self.to_string().hash(hasher)
- }
- }
-
- /// Prints the identifier as a string that should be losslessly convertible back
- /// into the same identifier.
--impl fmt::Display for Ident {
-+impl Display for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Display::fmt(&self.inner, f)
- }
- }
-
--impl fmt::Debug for Ident {
-+impl Debug for Ident {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Debug::fmt(&self.inner, f)
- }
- }
-
- /// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
- /// byte character (`b'a'`), an integer or floating point number with or without
- /// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
- ///
- /// Boolean literals like `true` and `false` do not belong here, they are
-@@ -1135,36 +1162,36 @@ impl Literal {
- /// nightly compiler, this method will always return `None`.
- ///
- /// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
- pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
- self.inner.subspan(range).map(Span::_new)
- }
- }
-
--impl fmt::Debug for Literal {
-+impl Debug for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Debug::fmt(&self.inner, f)
- }
- }
-
--impl fmt::Display for Literal {
-+impl Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Display::fmt(&self.inner, f)
- }
- }
-
- /// Public implementation details for the `TokenStream` type, such as iterators.
- pub mod token_stream {
-- use std::fmt;
-+ use crate::{imp, TokenTree};
-+ use std::fmt::{self, Debug};
- use std::marker;
- use std::rc::Rc;
-
- pub use crate::TokenStream;
-- use crate::{imp, TokenTree};
-
- /// An iterator over `TokenStream`'s `TokenTree`s.
- ///
- /// The iteration is "shallow", e.g. the iterator doesn't recurse into
- /// delimited groups, and returns whole groups as token trees.
- #[derive(Clone)]
- pub struct IntoIter {
- inner: imp::TokenTreeIter,
-@@ -1174,19 +1201,19 @@ pub mod token_stream {
- impl Iterator for IntoIter {
- type Item = TokenTree;
-
- fn next(&mut self) -> Option<TokenTree> {
- self.inner.next()
- }
- }
-
-- impl fmt::Debug for IntoIter {
-+ impl Debug for IntoIter {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-- self.inner.fmt(f)
-+ Debug::fmt(&self.inner, f)
- }
- }
-
- impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = IntoIter;
-
- fn into_iter(self) -> IntoIter {
-diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
-new file mode 100644
---- /dev/null
-+++ b/third_party/rust/proc-macro2/src/parse.rs
-@@ -0,0 +1,791 @@
-+use crate::fallback::{
-+ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
-+};
-+use crate::{Delimiter, Punct, Spacing, TokenTree};
-+use std::str::{Bytes, CharIndices, Chars};
-+use unicode_xid::UnicodeXID;
-+
-+#[derive(Copy, Clone, Eq, PartialEq)]
-+pub(crate) struct Cursor<'a> {
-+ pub rest: &'a str,
-+ #[cfg(span_locations)]
-+ pub off: u32,
-+}
-+
-+impl<'a> Cursor<'a> {
-+ fn advance(&self, bytes: usize) -> Cursor<'a> {
-+ let (_front, rest) = self.rest.split_at(bytes);
-+ Cursor {
-+ rest,
-+ #[cfg(span_locations)]
-+ off: self.off + _front.chars().count() as u32,
-+ }
-+ }
-+
-+ fn starts_with(&self, s: &str) -> bool {
-+ self.rest.starts_with(s)
-+ }
-+
-+ pub(crate) fn is_empty(&self) -> bool {
-+ self.rest.is_empty()
-+ }
-+
-+ fn len(&self) -> usize {
-+ self.rest.len()
-+ }
-+
-+ fn as_bytes(&self) -> &'a [u8] {
-+ self.rest.as_bytes()
-+ }
-+
-+ fn bytes(&self) -> Bytes<'a> {
-+ self.rest.bytes()
-+ }
-+
-+ fn chars(&self) -> Chars<'a> {
-+ self.rest.chars()
-+ }
-+
-+ fn char_indices(&self) -> CharIndices<'a> {
-+ self.rest.char_indices()
-+ }
-+
-+ fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
-+ if self.starts_with(tag) {
-+ Ok(self.advance(tag.len()))
-+ } else {
-+ Err(LexError)
-+ }
-+ }
-+}
-+
-+type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
-+
-+fn skip_whitespace(input: Cursor) -> Cursor {
-+ let mut s = input;
-+
-+ while !s.is_empty() {
-+ let byte = s.as_bytes()[0];
-+ if byte == b'/' {
-+ if s.starts_with("//")
-+ && (!s.starts_with("///") || s.starts_with("////"))
-+ && !s.starts_with("//!")
-+ {
-+ let (cursor, _) = take_until_newline_or_eof(s);
-+ s = cursor;
-+ continue;
-+ } else if s.starts_with("/**/") {
-+ s = s.advance(4);
-+ continue;
-+ } else if s.starts_with("/*")
-+ && (!s.starts_with("/**") || s.starts_with("/***"))
-+ && !s.starts_with("/*!")
-+ {
-+ match block_comment(s) {
-+ Ok((rest, _)) => {
-+ s = rest;
-+ continue;
-+ }
-+ Err(LexError) => return s,
-+ }
-+ }
-+ }
-+ match byte {
-+ b' ' | 0x09..=0x0d => {
-+ s = s.advance(1);
-+ continue;
-+ }
-+ b if b <= 0x7f => {}
-+ _ => {
-+ let ch = s.chars().next().unwrap();
-+ if is_whitespace(ch) {
-+ s = s.advance(ch.len_utf8());
-+ continue;
-+ }
-+ }
-+ }
-+ return s;
-+ }
-+ s
-+}
-+
-+fn block_comment(input: Cursor) -> PResult<&str> {
-+ if !input.starts_with("/*") {
-+ return Err(LexError);
-+ }
-+
-+ let mut depth = 0;
-+ let bytes = input.as_bytes();
-+ let mut i = 0;
-+ let upper = bytes.len() - 1;
-+
-+ while i < upper {
-+ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
-+ depth += 1;
-+ i += 1; // eat '*'
-+ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
-+ depth -= 1;
-+ if depth == 0 {
-+ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
-+ }
-+ i += 1; // eat '/'
-+ }
-+ i += 1;
-+ }
-+
-+ Err(LexError)
-+}
-+
-+fn is_whitespace(ch: char) -> bool {
-+ // Rust treats left-to-right mark and right-to-left mark as whitespace
-+ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
-+}
-+
-+fn word_break(input: Cursor) -> Result<Cursor, LexError> {
-+ match input.chars().next() {
-+ Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
-+ Some(_) | None => Ok(input),
-+ }
-+}
-+
-+pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
-+ let mut trees = Vec::new();
-+ let mut stack = Vec::new();
-+
-+ loop {
-+ input = skip_whitespace(input);
-+
-+ if let Ok((rest, tt)) = doc_comment(input) {
-+ trees.extend(tt);
-+ input = rest;
-+ continue;
-+ }
-+
-+ #[cfg(span_locations)]
-+ let lo = input.off;
-+
-+ let first = match input.bytes().next() {
-+ Some(first) => first,
-+ None => break,
-+ };
-+
-+ if let Some(open_delimiter) = match first {
-+ b'(' => Some(Delimiter::Parenthesis),
-+ b'[' => Some(Delimiter::Bracket),
-+ b'{' => Some(Delimiter::Brace),
-+ _ => None,
-+ } {
-+ input = input.advance(1);
-+ let frame = (open_delimiter, trees);
-+ #[cfg(span_locations)]
-+ let frame = (lo, frame);
-+ stack.push(frame);
-+ trees = Vec::new();
-+ } else if let Some(close_delimiter) = match first {
-+ b')' => Some(Delimiter::Parenthesis),
-+ b']' => Some(Delimiter::Bracket),
-+ b'}' => Some(Delimiter::Brace),
-+ _ => None,
-+ } {
-+ input = input.advance(1);
-+ let frame = stack.pop().ok_or(LexError)?;
-+ #[cfg(span_locations)]
-+ let (lo, frame) = frame;
-+ let (open_delimiter, outer) = frame;
-+ if open_delimiter != close_delimiter {
-+ return Err(LexError);
-+ }
-+ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
-+ g.set_span(Span {
-+ #[cfg(span_locations)]
-+ lo,
-+ #[cfg(span_locations)]
-+ hi: input.off,
-+ });
-+ trees = outer;
-+ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
-+ } else {
-+ let (rest, mut tt) = leaf_token(input)?;
-+ tt.set_span(crate::Span::_new_stable(Span {
-+ #[cfg(span_locations)]
-+ lo,
-+ #[cfg(span_locations)]
-+ hi: rest.off,
-+ }));
-+ trees.push(tt);
-+ input = rest;
-+ }
-+ }
-+
-+ if stack.is_empty() {
-+ Ok((input, TokenStream { inner: trees }))
-+ } else {
-+ Err(LexError)
-+ }
-+}
-+
-+fn leaf_token(input: Cursor) -> PResult<TokenTree> {
-+ if let Ok((input, l)) = literal(input) {
-+ // must be parsed before ident
-+ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
-+ } else if let Ok((input, p)) = op(input) {
-+ Ok((input, TokenTree::Punct(p)))
-+ } else if let Ok((input, i)) = ident(input) {
-+ Ok((input, TokenTree::Ident(i)))
-+ } else {
-+ Err(LexError)
-+ }
-+}
-+
-+fn ident(input: Cursor) -> PResult<crate::Ident> {
-+ let raw = input.starts_with("r#");
-+ let rest = input.advance((raw as usize) << 1);
-+
-+ let (rest, sym) = ident_not_raw(rest)?;
-+
-+ if !raw {
-+ let ident = crate::Ident::new(sym, crate::Span::call_site());
-+ return Ok((rest, ident));
-+ }
-+
-+ if sym == "_" {
-+ return Err(LexError);
-+ }
-+
-+ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
-+ Ok((rest, ident))
-+}
-+
-+fn ident_not_raw(input: Cursor) -> PResult<&str> {
-+ let mut chars = input.char_indices();
-+
-+ match chars.next() {
-+ Some((_, ch)) if is_ident_start(ch) => {}
-+ _ => return Err(LexError),
-+ }
-+
-+ let mut end = input.len();
-+ for (i, ch) in chars {
-+ if !is_ident_continue(ch) {
-+ end = i;
-+ break;
-+ }
-+ }
-+
-+ Ok((input.advance(end), &input.rest[..end]))
-+}
-+
-+fn literal(input: Cursor) -> PResult<Literal> {
-+ match literal_nocapture(input) {
-+ Ok(a) => {
-+ let end = input.len() - a.len();
-+ Ok((a, Literal::_new(input.rest[..end].to_string())))
-+ }
-+ Err(LexError) => Err(LexError),
-+ }
-+}
-+
-+fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
-+ if let Ok(ok) = string(input) {
-+ Ok(ok)
-+ } else if let Ok(ok) = byte_string(input) {
-+ Ok(ok)
-+ } else if let Ok(ok) = byte(input) {
-+ Ok(ok)
-+ } else if let Ok(ok) = character(input) {
-+ Ok(ok)
-+ } else if let Ok(ok) = float(input) {
-+ Ok(ok)
-+ } else if let Ok(ok) = int(input) {
-+ Ok(ok)
-+ } else {
-+ Err(LexError)
-+ }
-+}
-+
-+fn literal_suffix(input: Cursor) -> Cursor {
-+ match ident_not_raw(input) {
-+ Ok((input, _)) => input,
-+ Err(LexError) => input,
-+ }
-+}
-+
-+fn string(input: Cursor) -> Result<Cursor, LexError> {
-+ if let Ok(input) = input.parse("\"") {
-+ cooked_string(input)
-+ } else if let Ok(input) = input.parse("r") {
-+ raw_string(input)
-+ } else {
-+ Err(LexError)
-+ }
-+}
-+
-+fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
-+ let mut chars = input.char_indices().peekable();
-+
-+ while let Some((i, ch)) = chars.next() {
-+ match ch {
-+ '"' => {
-+ let input = input.advance(i + 1);
-+ return Ok(literal_suffix(input));
-+ }
-+ '\r' => {
-+ if let Some((_, '\n')) = chars.next() {
-+ // ...
-+ } else {
-+ break;
-+ }
-+ }
-+ '\\' => match chars.next() {
-+ Some((_, 'x')) => {
-+ if !backslash_x_char(&mut chars) {
-+ break;
-+ }
-+ }
-+ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
-+ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
-+ Some((_, 'u')) => {
-+ if !backslash_u(&mut chars) {
-+ break;
-+ }
-+ }
-+ Some((_, '\n')) | Some((_, '\r')) => {
-+ while let Some(&(_, ch)) = chars.peek() {
-+ if ch.is_whitespace() {
-+ chars.next();
-+ } else {
-+ break;
-+ }
-+ }
-+ }
-+ _ => break,
-+ },
-+ _ch => {}
-+ }
-+ }
-+ Err(LexError)
-+}
-+
-+fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
-+ if let Ok(input) = input.parse("b\"") {
-+ cooked_byte_string(input)
-+ } else if let Ok(input) = input.parse("br") {
-+ raw_string(input)
-+ } else {
-+ Err(LexError)
-+ }
-+}
-+
-+fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
-+ let mut bytes = input.bytes().enumerate();
-+ 'outer: while let Some((offset, b)) = bytes.next() {
-+ match b {
-+ b'"' => {
-+ let input = input.advance(offset + 1);
-+ return Ok(literal_suffix(input));
-+ }
-+ b'\r' => {
-+ if let Some((_, b'\n')) = bytes.next() {
-+ // ...
-+ } else {
-+ break;
-+ }
-+ }
-+ b'\\' => match bytes.next() {
-+ Some((_, b'x')) => {
-+ if !backslash_x_byte(&mut bytes) {
-+ break;
-+ }
-+ }
-+ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
-+ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
-+ Some((newline, b'\n')) | Some((newline, b'\r')) => {
-+ let rest = input.advance(newline + 1);
-+ for (offset, ch) in rest.char_indices() {
-+ if !ch.is_whitespace() {
-+ input = rest.advance(offset);
-+ bytes = input.bytes().enumerate();
-+ continue 'outer;
-+ }
-+ }
-+ break;
-+ }
-+ _ => break,
-+ },
-+ b if b < 0x80 => {}
-+ _ => break,
-+ }
-+ }
-+ Err(LexError)
-+}
-+
-+fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
-+ let mut chars = input.char_indices();
-+ let mut n = 0;
-+ while let Some((i, ch)) = chars.next() {
-+ match ch {
-+ '"' => {
-+ n = i;
-+ break;
-+ }
-+ '#' => {}
-+ _ => return Err(LexError),
-+ }
-+ }
-+ for (i, ch) in chars {
-+ match ch {
-+ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
-+ let rest = input.advance(i + 1 + n);
-+ return Ok(literal_suffix(rest));
-+ }
-+ '\r' => {}
-+ _ => {}
-+ }
-+ }
-+ Err(LexError)
-+}
-+
-+fn byte(input: Cursor) -> Result<Cursor, LexError> {
-+ let input = input.parse("b'")?;
-+ let mut bytes = input.bytes().enumerate();
-+ let ok = match bytes.next().map(|(_, b)| b) {
-+ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
-+ Some(b'x') => backslash_x_byte(&mut bytes),
-+ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
-+ | Some(b'"') => true,
-+ _ => false,
-+ },
-+ b => b.is_some(),
-+ };
-+ if !ok {
-+ return Err(LexError);
-+ }
-+ let (offset, _) = bytes.next().ok_or(LexError)?;
-+ if !input.chars().as_str().is_char_boundary(offset) {
-+ return Err(LexError);
-+ }
-+ let input = input.advance(offset).parse("'")?;
-+ Ok(literal_suffix(input))
-+}
-+
-+fn character(input: Cursor) -> Result<Cursor, LexError> {
-+ let input = input.parse("'")?;
-+ let mut chars = input.char_indices();
-+ let ok = match chars.next().map(|(_, ch)| ch) {
-+ Some('\\') => match chars.next().map(|(_, ch)| ch) {
-+ Some('x') => backslash_x_char(&mut chars),
-+ Some('u') => backslash_u(&mut chars),
-+ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
-+ true
-+ }
-+ _ => false,
-+ },
-+ ch => ch.is_some(),
-+ };
-+ if !ok {
-+ return Err(LexError);
-+ }
-+ let (idx, _) = chars.next().ok_or(LexError)?;
-+ let input = input.advance(idx).parse("'")?;
-+ Ok(literal_suffix(input))
-+}
-+
-+macro_rules! next_ch {
-+ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
-+ match $chars.next() {
-+ Some((_, ch)) => match ch {
-+ $pat $(| $rest)* => ch,
-+ _ => return false,
-+ },
-+ None => return false,
-+ }
-+ };
-+}
-+
-+fn backslash_x_char<I>(chars: &mut I) -> bool
-+where
-+ I: Iterator<Item = (usize, char)>,
-+{
-+ next_ch!(chars @ '0'..='7');
-+ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
-+ true
-+}
-+
-+fn backslash_x_byte<I>(chars: &mut I) -> bool
-+where
-+ I: Iterator<Item = (usize, u8)>,
-+{
-+ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
-+ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
-+ true
-+}
-+
-+fn backslash_u<I>(chars: &mut I) -> bool
-+where
-+ I: Iterator<Item = (usize, char)>,
-+{
-+ next_ch!(chars @ '{');
-+ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
-+ loop {
-+ let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
-+ if c == '}' {
-+ return true;
-+ }
-+ }
-+}
-+
-+fn float(input: Cursor) -> Result<Cursor, LexError> {
-+ let mut rest = float_digits(input)?;
-+ if let Some(ch) = rest.chars().next() {
-+ if is_ident_start(ch) {
-+ rest = ident_not_raw(rest)?.0;
-+ }
-+ }
-+ word_break(rest)
-+}
-+
-+fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
-+ let mut chars = input.chars().peekable();
-+ match chars.next() {
-+ Some(ch) if ch >= '0' && ch <= '9' => {}
-+ _ => return Err(LexError),
-+ }
-+
-+ let mut len = 1;
-+ let mut has_dot = false;
-+ let mut has_exp = false;
-+ while let Some(&ch) = chars.peek() {
-+ match ch {
-+ '0'..='9' | '_' => {
-+ chars.next();
-+ len += 1;
-+ }
-+ '.' => {
-+ if has_dot {
-+ break;
-+ }
-+ chars.next();
-+ if chars
-+ .peek()
-+ .map(|&ch| ch == '.' || is_ident_start(ch))
-+ .unwrap_or(false)
-+ {
-+ return Err(LexError);
-+ }
-+ len += 1;
-+ has_dot = true;
-+ }
-+ 'e' | 'E' => {
-+ chars.next();
-+ len += 1;
-+ has_exp = true;
-+ break;
-+ }
-+ _ => break,
-+ }
-+ }
-+
-+ let rest = input.advance(len);
-+ if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
-+ return Err(LexError);
-+ }
-+
-+ if has_exp {
-+ let mut has_exp_value = false;
-+ while let Some(&ch) = chars.peek() {
-+ match ch {
-+ '+' | '-' => {
-+ if has_exp_value {
-+ break;
-+ }
-+ chars.next();
-+ len += 1;
-+ }
-+ '0'..='9' => {
-+ chars.next();
-+ len += 1;
-+ has_exp_value = true;
-+ }
-+ '_' => {
-+ chars.next();
-+ len += 1;
-+ }
-+ _ => break,
-+ }
-+ }
-+ if !has_exp_value {
-+ return Err(LexError);
-+ }
-+ }
-+
-+ Ok(input.advance(len))
-+}
-+
-+fn int(input: Cursor) -> Result<Cursor, LexError> {
-+ let mut rest = digits(input)?;
-+ if let Some(ch) = rest.chars().next() {
-+ if is_ident_start(ch) {
-+ rest = ident_not_raw(rest)?.0;
-+ }
-+ }
-+ word_break(rest)
-+}
-+
-+fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
-+ let base = if input.starts_with("0x") {
-+ input = input.advance(2);
-+ 16
-+ } else if input.starts_with("0o") {
-+ input = input.advance(2);
-+ 8
-+ } else if input.starts_with("0b") {
-+ input = input.advance(2);
-+ 2
-+ } else {
-+ 10
-+ };
-+
-+ let mut len = 0;
-+ let mut empty = true;
-+ for b in input.bytes() {
-+ let digit = match b {
-+ b'0'..=b'9' => (b - b'0') as u64,
-+ b'a'..=b'f' => 10 + (b - b'a') as u64,
-+ b'A'..=b'F' => 10 + (b - b'A') as u64,
-+ b'_' => {
-+ if empty && base == 10 {
-+ return Err(LexError);
-+ }
-+ len += 1;
-+ continue;
-+ }
-+ _ => break,
-+ };
-+ if digit >= base {
-+ return Err(LexError);
-+ }
-+ len += 1;
-+ empty = false;
-+ }
-+ if empty {
-+ Err(LexError)
-+ } else {
-+ Ok(input.advance(len))
-+ }
-+}
-+
-+fn op(input: Cursor) -> PResult<Punct> {
-+ match op_char(input) {
-+ Ok((rest, '\'')) => {
-+ ident(rest)?;
-+ Ok((rest, Punct::new('\'', Spacing::Joint)))
-+ }
-+ Ok((rest, ch)) => {
-+ let kind = match op_char(rest) {
-+ Ok(_) => Spacing::Joint,
-+ Err(LexError) => Spacing::Alone,
-+ };
-+ Ok((rest, Punct::new(ch, kind)))
-+ }
-+ Err(LexError) => Err(LexError),
-+ }
-+}
-+
-+fn op_char(input: Cursor) -> PResult<char> {
-+ if input.starts_with("//") || input.starts_with("/*") {
-+ // Do not accept `/` of a comment as an op.
-+ return Err(LexError);
-+ }
-+
-+ let mut chars = input.chars();
-+ let first = match chars.next() {
-+ Some(ch) => ch,
-+ None => {
-+ return Err(LexError);
-+ }
-+ };
-+ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
-+ if recognized.contains(first) {
-+ Ok((input.advance(first.len_utf8()), first))
-+ } else {
-+ Err(LexError)
-+ }
-+}
-+
-+fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
-+ #[cfg(span_locations)]
-+ let lo = input.off;
-+ let (rest, (comment, inner)) = doc_comment_contents(input)?;
-+ let span = crate::Span::_new_stable(Span {
-+ #[cfg(span_locations)]
-+ lo,
-+ #[cfg(span_locations)]
-+ hi: rest.off,
-+ });
-+
-+ let mut scan_for_bare_cr = comment;
-+ while let Some(cr) = scan_for_bare_cr.find('\r') {
-+ let rest = &scan_for_bare_cr[cr + 1..];
-+ if !rest.starts_with('\n') {
-+ return Err(LexError);
-+ }
-+ scan_for_bare_cr = rest;
-+ }
-+
-+ let mut trees = Vec::new();
-+ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
-+ if inner {
-+ trees.push(Punct::new('!', Spacing::Alone).into());
-+ }
-+ let mut stream = vec![
-+ TokenTree::Ident(crate::Ident::new("doc", span)),
-+ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
-+ TokenTree::Literal(crate::Literal::string(comment)),
-+ ];
-+ for tt in stream.iter_mut() {
-+ tt.set_span(span);
-+ }
-+ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
-+ trees.push(crate::Group::_new_stable(group).into());
-+ for tt in trees.iter_mut() {
-+ tt.set_span(span);
-+ }
-+ Ok((rest, trees))
-+}
-+
-+fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
-+ if input.starts_with("//!") {
-+ let input = input.advance(3);
-+ let (input, s) = take_until_newline_or_eof(input);
-+ Ok((input, (s, true)))
-+ } else if input.starts_with("/*!") {
-+ let (input, s) = block_comment(input)?;
-+ Ok((input, (&s[3..s.len() - 2], true)))
-+ } else if input.starts_with("///") {
-+ let input = input.advance(3);
-+ if input.starts_with("/") {
-+ return Err(LexError);
-+ }
-+ let (input, s) = take_until_newline_or_eof(input);
-+ Ok((input, (s, false)))
-+ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
-+ let (input, s) = block_comment(input)?;
-+ Ok((input, (&s[3..s.len() - 2], false)))
-+ } else {
-+ Err(LexError)
-+ }
-+}
-+
-+fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
-+ let chars = input.char_indices();
-+
-+ for (i, ch) in chars {
-+ if ch == '\n' {
-+ return (input.advance(i), &input.rest[..i]);
-+ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
-+ return (input.advance(i + 1), &input.rest[..i]);
-+ }
-+ }
-+
-+ (input.advance(input.len()), input.rest)
-+}
-diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
-deleted file mode 100644
---- a/third_party/rust/proc-macro2/src/strnom.rs
-+++ /dev/null
-@@ -1,391 +0,0 @@
--//! Adapted from [`nom`](https://github.com/Geal/nom).
--
--use crate::fallback::LexError;
--use std::str::{Bytes, CharIndices, Chars};
--use unicode_xid::UnicodeXID;
--
--#[derive(Copy, Clone, Eq, PartialEq)]
--pub struct Cursor<'a> {
-- pub rest: &'a str,
-- #[cfg(span_locations)]
-- pub off: u32,
--}
--
--impl<'a> Cursor<'a> {
-- #[cfg(not(span_locations))]
-- pub fn advance(&self, amt: usize) -> Cursor<'a> {
-- Cursor {
-- rest: &self.rest[amt..],
-- }
-- }
-- #[cfg(span_locations)]
-- pub fn advance(&self, amt: usize) -> Cursor<'a> {
-- Cursor {
-- rest: &self.rest[amt..],
-- off: self.off + (amt as u32),
-- }
-- }
--
-- pub fn find(&self, p: char) -> Option<usize> {
-- self.rest.find(p)
-- }
--
-- pub fn starts_with(&self, s: &str) -> bool {
-- self.rest.starts_with(s)
-- }
--
-- pub fn is_empty(&self) -> bool {
-- self.rest.is_empty()
-- }
--
-- pub fn len(&self) -> usize {
-- self.rest.len()
-- }
--
-- pub fn as_bytes(&self) -> &'a [u8] {
-- self.rest.as_bytes()
-- }
--
-- pub fn bytes(&self) -> Bytes<'a> {
-- self.rest.bytes()
-- }
--
-- pub fn chars(&self) -> Chars<'a> {
-- self.rest.chars()
-- }
--
-- pub fn char_indices(&self) -> CharIndices<'a> {
-- self.rest.char_indices()
-- }
--}
--
--pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
--
--pub fn whitespace(input: Cursor) -> PResult<()> {
-- if input.is_empty() {
-- return Err(LexError);
-- }
--
-- let bytes = input.as_bytes();
-- let mut i = 0;
-- while i < bytes.len() {
-- let s = input.advance(i);
-- if bytes[i] == b'/' {
-- if s.starts_with("//")
-- && (!s.starts_with("///") || s.starts_with("////"))
-- && !s.starts_with("//!")
-- {
-- if let Some(len) = s.find('\n') {
-- i += len + 1;
-- continue;
-- }
-- break;
-- } else if s.starts_with("/**/") {
-- i += 4;
-- continue;
-- } else if s.starts_with("/*")
-- && (!s.starts_with("/**") || s.starts_with("/***"))
-- && !s.starts_with("/*!")
-- {
-- let (_, com) = block_comment(s)?;
-- i += com.len();
-- continue;
-- }
-- }
-- match bytes[i] {
-- b' ' | 0x09..=0x0d => {
-- i += 1;
-- continue;
-- }
-- b if b <= 0x7f => {}
-- _ => {
-- let ch = s.chars().next().unwrap();
-- if is_whitespace(ch) {
-- i += ch.len_utf8();
-- continue;
-- }
-- }
-- }
-- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
-- }
-- Ok((input.advance(input.len()), ()))
--}
--
--pub fn block_comment(input: Cursor) -> PResult<&str> {
-- if !input.starts_with("/*") {
-- return Err(LexError);
-- }
--
-- let mut depth = 0;
-- let bytes = input.as_bytes();
-- let mut i = 0;
-- let upper = bytes.len() - 1;
-- while i < upper {
-- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
-- depth += 1;
-- i += 1; // eat '*'
-- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
-- depth -= 1;
-- if depth == 0 {
-- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
-- }
-- i += 1; // eat '/'
-- }
-- i += 1;
-- }
-- Err(LexError)
--}
--
--pub fn skip_whitespace(input: Cursor) -> Cursor {
-- match whitespace(input) {
-- Ok((rest, _)) => rest,
-- Err(LexError) => input,
-- }
--}
--
--fn is_whitespace(ch: char) -> bool {
-- // Rust treats left-to-right mark and right-to-left mark as whitespace
-- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
--}
--
--pub fn word_break(input: Cursor) -> PResult<()> {
-- match input.chars().next() {
-- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
-- Some(_) | None => Ok((input, ())),
-- }
--}
--
--macro_rules! named {
-- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
-- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
-- $submac!(i, $($args)*)
-- }
-- };
--}
--
--macro_rules! alt {
-- ($i:expr, $e:ident | $($rest:tt)*) => {
-- alt!($i, call!($e) | $($rest)*)
-- };
--
-- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
-- match $subrule!($i, $($args)*) {
-- res @ Ok(_) => res,
-- _ => alt!($i, $($rest)*)
-- }
-- };
--
-- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
-- match $subrule!($i, $($args)*) {
-- Ok((i, o)) => Ok((i, $gen(o))),
-- Err(LexError) => alt!($i, $($rest)*)
-- }
-- };
--
-- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
-- alt!($i, call!($e) => { $gen } | $($rest)*)
-- };
--
-- ($i:expr, $e:ident => { $gen:expr }) => {
-- alt!($i, call!($e) => { $gen })
-- };
--
-- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
-- match $subrule!($i, $($args)*) {
-- Ok((i, o)) => Ok((i, $gen(o))),
-- Err(LexError) => Err(LexError),
-- }
-- };
--
-- ($i:expr, $e:ident) => {
-- alt!($i, call!($e))
-- };
--
-- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
-- $subrule!($i, $($args)*)
-- };
--}
--
--macro_rules! do_parse {
-- ($i:expr, ( $($rest:expr),* )) => {
-- Ok(($i, ( $($rest),* )))
-- };
--
-- ($i:expr, $e:ident >> $($rest:tt)*) => {
-- do_parse!($i, call!($e) >> $($rest)*)
-- };
--
-- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
-- match $submac!($i, $($args)*) {
-- Err(LexError) => Err(LexError),
-- Ok((i, _)) => do_parse!(i, $($rest)*),
-- }
-- };
--
-- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
-- do_parse!($i, $field: call!($e) >> $($rest)*)
-- };
--
-- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
-- match $submac!($i, $($args)*) {
-- Err(LexError) => Err(LexError),
-- Ok((i, o)) => {
-- let $field = o;
-- do_parse!(i, $($rest)*)
-- },
-- }
-- };
--}
--
--macro_rules! peek {
-- ($i:expr, $submac:ident!( $($args:tt)* )) => {
-- match $submac!($i, $($args)*) {
-- Ok((_, o)) => Ok(($i, o)),
-- Err(LexError) => Err(LexError),
-- }
-- };
--}
--
--macro_rules! call {
-- ($i:expr, $fun:expr $(, $args:expr)*) => {
-- $fun($i $(, $args)*)
-- };
--}
--
--macro_rules! option {
-- ($i:expr, $f:expr) => {
-- match $f($i) {
-- Ok((i, o)) => Ok((i, Some(o))),
-- Err(LexError) => Ok(($i, None)),
-- }
-- };
--}
--
--macro_rules! take_until_newline_or_eof {
-- ($i:expr,) => {{
-- if $i.len() == 0 {
-- Ok(($i, ""))
-- } else {
-- match $i.find('\n') {
-- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
-- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
-- }
-- }
-- }};
--}
--
--macro_rules! tuple {
-- ($i:expr, $($rest:tt)*) => {
-- tuple_parser!($i, (), $($rest)*)
-- };
--}
--
--/// Do not use directly. Use `tuple!`.
--macro_rules! tuple_parser {
-- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
-- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
-- };
--
-- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
-- match $submac!($i, $($args)*) {
-- Err(LexError) => Err(LexError),
-- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
-- }
-- };
--
-- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
-- match $submac!($i, $($args)*) {
-- Err(LexError) => Err(LexError),
-- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
-- }
-- };
--
-- ($i:expr, ($($parsed:tt),*), $e:ident) => {
-- tuple_parser!($i, ($($parsed),*), call!($e))
-- };
--
-- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
-- $submac!($i, $($args)*)
-- };
--
-- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
-- match $submac!($i, $($args)*) {
-- Err(LexError) => Err(LexError),
-- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
-- }
-- };
--
-- ($i:expr, ($($parsed:expr),*)) => {
-- Ok(($i, ($($parsed),*)))
-- };
--}
--
--macro_rules! not {
-- ($i:expr, $submac:ident!( $($args:tt)* )) => {
-- match $submac!($i, $($args)*) {
-- Ok((_, _)) => Err(LexError),
-- Err(LexError) => Ok(($i, ())),
-- }
-- };
--}
--
--macro_rules! tag {
-- ($i:expr, $tag:expr) => {
-- if $i.starts_with($tag) {
-- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
-- } else {
-- Err(LexError)
-- }
-- };
--}
--
--macro_rules! punct {
-- ($i:expr, $punct:expr) => {
-- $crate::strnom::punct($i, $punct)
-- };
--}
--
--/// Do not use directly. Use `punct!`.
--pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
-- let input = skip_whitespace(input);
-- if input.starts_with(token) {
-- Ok((input.advance(token.len()), token))
-- } else {
-- Err(LexError)
-- }
--}
--
--macro_rules! preceded {
-- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
-- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
-- Ok((remaining, (_, o))) => Ok((remaining, o)),
-- Err(LexError) => Err(LexError),
-- }
-- };
--
-- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
-- preceded!($i, $submac!($($args)*), call!($g))
-- };
--}
--
--macro_rules! delimited {
-- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
-- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
-- Err(LexError) => Err(LexError),
-- Ok((i1, (_, o, _))) => Ok((i1, o))
-- }
-- };
--}
--
--macro_rules! map {
-- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
-- match $submac!($i, $($args)*) {
-- Err(LexError) => Err(LexError),
-- Ok((i, o)) => Ok((i, call!(o, $g)))
-- }
-- };
--
-- ($i:expr, $f:expr, $g:expr) => {
-- map!($i, call!($f), $g)
-- };
--}
-diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
---- a/third_party/rust/proc-macro2/src/wrapper.rs
-+++ b/third_party/rust/proc-macro2/src/wrapper.rs
-@@ -1,96 +1,39 @@
--use std::fmt;
--use std::iter;
-+use crate::detection::inside_proc_macro;
-+use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
-+use std::fmt::{self, Debug, Display};
-+use std::iter::FromIterator;
- use std::ops::RangeBounds;
--use std::panic::{self, PanicInfo};
-+use std::panic;
- #[cfg(super_unstable)]
- use std::path::PathBuf;
- use std::str::FromStr;
-
--use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
--
- #[derive(Clone)]
--pub enum TokenStream {
-+pub(crate) enum TokenStream {
- Compiler(DeferredTokenStream),
- Fallback(fallback::TokenStream),
- }
-
- // Work around https://github.com/rust-lang/rust/issues/65080.
- // In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
- // we hold on to the appended tokens and do proc_macro::TokenStream::extend as
- // late as possible to batch together consecutive uses of the Extend impl.
- #[derive(Clone)]
--pub struct DeferredTokenStream {
-+pub(crate) struct DeferredTokenStream {
- stream: proc_macro::TokenStream,
- extra: Vec<proc_macro::TokenTree>,
- }
-
--pub enum LexError {
-+pub(crate) enum LexError {
- Compiler(proc_macro::LexError),
- Fallback(fallback::LexError),
- }
-
--fn nightly_works() -> bool {
-- use std::sync::atomic::*;
-- use std::sync::Once;
--
-- static WORKS: AtomicUsize = AtomicUsize::new(0);
-- static INIT: Once = Once::new();
--
-- match WORKS.load(Ordering::SeqCst) {
-- 1 => return false,
-- 2 => return true,
-- _ => {}
-- }
--
-- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
-- // then use catch_unwind to determine whether the compiler's proc_macro is
-- // working. When proc-macro2 is used from outside of a procedural macro all
-- // of the proc_macro crate's APIs currently panic.
-- //
-- // The Once is to prevent the possibility of this ordering:
-- //
-- // thread 1 calls take_hook, gets the user's original hook
-- // thread 1 calls set_hook with the null hook
-- // thread 2 calls take_hook, thinks null hook is the original hook
-- // thread 2 calls set_hook with the null hook
-- // thread 1 calls set_hook with the actual original hook
-- // thread 2 calls set_hook with what it thinks is the original hook
-- //
-- // in which the user's hook has been lost.
-- //
-- // There is still a race condition where a panic in a different thread can
-- // happen during the interval that the user's original panic hook is
-- // unregistered such that their hook is incorrectly not called. This is
-- // sufficiently unlikely and less bad than printing panic messages to stderr
-- // on correct use of this crate. Maybe there is a libstd feature request
-- // here. For now, if a user needs to guarantee that this failure mode does
-- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
-- // the main thread before launching any other threads.
-- INIT.call_once(|| {
-- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
--
-- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
-- let sanity_check = &*null_hook as *const PanicHook;
-- let original_hook = panic::take_hook();
-- panic::set_hook(null_hook);
--
-- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
-- WORKS.store(works as usize + 1, Ordering::SeqCst);
--
-- let hopefully_null_hook = panic::take_hook();
-- panic::set_hook(original_hook);
-- if sanity_check != &*hopefully_null_hook {
-- panic!("observed race condition in proc_macro2::nightly_works");
-- }
-- });
-- nightly_works()
--}
--
- fn mismatch() -> ! {
- panic!("stable/nightly mismatch")
- }
-
- impl DeferredTokenStream {
- fn new(stream: proc_macro::TokenStream) -> Self {
- DeferredTokenStream {
- stream,
-@@ -98,28 +41,33 @@ impl DeferredTokenStream {
- }
- }
-
- fn is_empty(&self) -> bool {
- self.stream.is_empty() && self.extra.is_empty()
- }
-
- fn evaluate_now(&mut self) {
-- self.stream.extend(self.extra.drain(..));
-+ // If-check provides a fast short circuit for the common case of `extra`
-+ // being empty, which saves a round trip over the proc macro bridge.
-+ // Improves macro expansion time in winrt by 6% in debug mode.
-+ if !self.extra.is_empty() {
-+ self.stream.extend(self.extra.drain(..));
-+ }
- }
-
- fn into_token_stream(mut self) -> proc_macro::TokenStream {
- self.evaluate_now();
- self.stream
- }
- }
-
- impl TokenStream {
- pub fn new() -> TokenStream {
-- if nightly_works() {
-+ if inside_proc_macro() {
- TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
- } else {
- TokenStream::Fallback(fallback::TokenStream::new())
- }
- }
-
- pub fn is_empty(&self) -> bool {
- match self {
-@@ -142,31 +90,37 @@ impl TokenStream {
- }
- }
- }
-
- impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
-- if nightly_works() {
-+ if inside_proc_macro() {
- Ok(TokenStream::Compiler(DeferredTokenStream::new(
-- src.parse()?,
-+ proc_macro_parse(src)?,
- )))
- } else {
- Ok(TokenStream::Fallback(src.parse()?))
- }
- }
- }
-
--impl fmt::Display for TokenStream {
-+// Work around https://github.com/rust-lang/rust/issues/58736.
-+fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
-+ panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
-+ .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
-+}
-+
-+impl Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
-- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
-- TokenStream::Fallback(tts) => tts.fmt(f),
-+ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
-+ TokenStream::Fallback(tts) => Display::fmt(tts, f),
- }
- }
- }
-
- impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> TokenStream {
- TokenStream::Compiler(DeferredTokenStream::new(inner))
- }
-@@ -182,17 +136,17 @@ impl From<TokenStream> for proc_macro::T
- }
-
- impl From<fallback::TokenStream> for TokenStream {
- fn from(inner: fallback::TokenStream) -> TokenStream {
- TokenStream::Fallback(inner)
- }
- }
-
--// Assumes nightly_works().
-+// Assumes inside_proc_macro().
- fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
- match token {
- TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
- TokenTree::Punct(tt) => {
- let spacing = match tt.spacing() {
- Spacing::Joint => proc_macro::Spacing::Joint,
- Spacing::Alone => proc_macro::Spacing::Alone,
- };
-@@ -202,37 +156,37 @@ fn into_compiler_token(token: TokenTree)
- }
- TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
- TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
- }
- }
-
- impl From<TokenTree> for TokenStream {
- fn from(token: TokenTree) -> TokenStream {
-- if nightly_works() {
-+ if inside_proc_macro() {
- TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
- } else {
- TokenStream::Fallback(token.into())
- }
- }
- }
-
--impl iter::FromIterator<TokenTree> for TokenStream {
-+impl FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
-- if nightly_works() {
-+ if inside_proc_macro() {
- TokenStream::Compiler(DeferredTokenStream::new(
- trees.into_iter().map(into_compiler_token).collect(),
- ))
- } else {
- TokenStream::Fallback(trees.into_iter().collect())
- }
- }
- }
-
--impl iter::FromIterator<TokenStream> for TokenStream {
-+impl FromIterator<TokenStream> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
- let mut streams = streams.into_iter();
- match streams.next() {
- Some(TokenStream::Compiler(mut first)) => {
- first.evaluate_now();
- first.stream.extend(streams.map(|s| match s {
- TokenStream::Compiler(s) => s.into_token_stream(),
- TokenStream::Fallback(_) => mismatch(),
-@@ -247,75 +201,76 @@ impl iter::FromIterator<TokenStream> for
- TokenStream::Fallback(first)
- }
- None => TokenStream::new(),
- }
- }
- }
-
- impl Extend<TokenTree> for TokenStream {
-- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
-+ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
- match self {
- TokenStream::Compiler(tts) => {
- // Here is the reason for DeferredTokenStream.
-- tts.extra
-- .extend(streams.into_iter().map(into_compiler_token));
-+ for token in stream {
-+ tts.extra.push(into_compiler_token(token));
-+ }
- }
-- TokenStream::Fallback(tts) => tts.extend(streams),
-+ TokenStream::Fallback(tts) => tts.extend(stream),
- }
- }
- }
-
- impl Extend<TokenStream> for TokenStream {
- fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
- match self {
- TokenStream::Compiler(tts) => {
- tts.evaluate_now();
- tts.stream
-- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
-+ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
- }
- TokenStream::Fallback(tts) => {
-- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
-+ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
- }
- }
- }
- }
-
--impl fmt::Debug for TokenStream {
-+impl Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
-- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
-- TokenStream::Fallback(tts) => tts.fmt(f),
-+ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
-+ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
- }
- }
- }
-
- impl From<proc_macro::LexError> for LexError {
- fn from(e: proc_macro::LexError) -> LexError {
- LexError::Compiler(e)
- }
- }
-
- impl From<fallback::LexError> for LexError {
- fn from(e: fallback::LexError) -> LexError {
- LexError::Fallback(e)
- }
- }
-
--impl fmt::Debug for LexError {
-+impl Debug for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match self {
-- LexError::Compiler(e) => e.fmt(f),
-- LexError::Fallback(e) => e.fmt(f),
-+ LexError::Compiler(e) => Debug::fmt(e, f),
-+ LexError::Fallback(e) => Debug::fmt(e, f),
- }
<Skipped 31538 lines>
================================================================
---- gitweb:
http://git.pld-linux.org/gitweb.cgi/packages/thunderbird.git/commitdiff/c82906ead315d93de1cb04763de92e5e0b033fdb
More information about the pld-cvs-commit
mailing list