[packages/thunderbird] fix build with rust 1.47

atler atler at pld-linux.org
Fri Nov 13 02:14:23 CET 2020


commit 97d60ae07f077de181cb8da727760b268847f5c0
Author: Jan Palus <atler at pld-linux.org>
Date:   Fri Nov 13 02:10:51 2020 +0100

    fix build with rust 1.47

 rust-1.47.patch  | 35276 +++++++++++++++++++++++++++++++++++++++++++++++++++++
 thunderbird.spec |     2 +
 2 files changed, 35278 insertions(+)
---
diff --git a/thunderbird.spec b/thunderbird.spec
index 4ce644e..7c691f5 100644
--- a/thunderbird.spec
+++ b/thunderbird.spec
@@ -185,6 +185,7 @@ Patch0:		prefs.patch
 Patch1:		no-subshell.patch
 Patch2:		enable-addons.patch
 Patch3:		%{name}-system-virtualenv.patch
+Patch4:		rust-1.47.patch
 URL:		http://www.mozilla.org/projects/thunderbird/
 BuildRequires:	alsa-lib-devel
 BuildRequires:	autoconf2_13 >= 2.13
@@ -1364,6 +1365,7 @@ unpack() {
 %patch1 -p1
 %patch2 -p0
 %patch3 -p2
+%patch4 -p1
 
 %build
 cp -p %{_datadir}/automake/config.* build/autoconf
diff --git a/rust-1.47.patch b/rust-1.47.patch
new file mode 100644
index 0000000..65441a1
--- /dev/null
+++ b/rust-1.47.patch
@@ -0,0 +1,35276 @@
+
+# HG changeset patch
+# User Emilio Cobos Álvarez <emilio at crisal.io>
+# Date 1599584448 0
+# Node ID 85c38ea4d34969797eb5d24265cd90cc6841e6ae
+# Parent  5aa243a2fe9d77578dd95ce3ab3a2aa6c1e92604
+Bug 1663715 - Update syn and proc-macro2 so that Firefox can build on Rust nightly again. r=froydnj, a=RyanVM
+
+Generated with:
+
+  cargo update -p syn --precise 1.0.40
+  ./mach vendor rust
+
+Rust issue: https://github.com/rust-lang/rust/issues/76482
+
+Differential Revision: https://phabricator.services.mozilla.com/D89473
+
+diff --git a/Cargo.lock b/Cargo.lock
+--- a/Cargo.lock
++++ b/Cargo.lock
+@@ -3712,19 +3712,19 @@ checksum = "ecd45702f76d6d3c75a80564378a
+ dependencies = [
+  "proc-macro2",
+  "quote",
+  "syn",
+ ]
+ 
+ [[package]]
+ name = "proc-macro2"
+-version = "1.0.5"
+-source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
++version = "1.0.20"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"
+ dependencies = [
+  "unicode-xid",
+ ]
+ 
+ [[package]]
+ name = "procedural-masquerade"
+ version = "0.1.1"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+@@ -4642,19 +4642,19 @@ dependencies = [
+  "cc",
+  "gleam",
+  "glsl-to-cxx",
+  "webrender_build",
+ ]
+ 
+ [[package]]
+ name = "syn"
+-version = "1.0.5"
+-source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
++version = "1.0.40"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
+ dependencies = [
+  "proc-macro2",
+  "quote",
+  "unicode-xid",
+ ]
+ 
+ [[package]]
+ name = "sync-guid"
+diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
+--- a/third_party/rust/proc-macro2/.cargo-checksum.json
++++ b/third_party/rust/proc-macro2/.cargo-checksum.json
+@@ -1,1 +1,1 @@
+-{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"c20c4c52342e65ea11ad8382edc636e628e8f8c5ab7cffddc32426b2fe8fe4cd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"332185d7ad4c859210f5edd7a76bc95146c8277726a2f81417f34927c4424d68","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"239f9a25c0f2ab57592288d944c7f1a0f887536b6d4dc2428a17640af8d10a41","src/lib.rs":"2b1d98424c9b23b547dabf85554120e5e65472026a0f3f711b3a097bca7c32fe","src/parse.rs":"500edee9773132e27e44d0fdaa042b1cb9451e29e65124493986f51710c0664c","src/wrapper.rs":"d36c0dced7ec0e7585c1f935cda836080bcae6de1de3d7851d962e9e11a3ac48","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"310c856e27ff61c9ec7f0a5cd96031aac02971557b1621f5e17b089d58e79bcd","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
+--- a/third_party/rust/proc-macro2/Cargo.toml
++++ b/third_party/rust/proc-macro2/Cargo.toml
+@@ -8,36 +8,35 @@
+ # If you believe there's an error in this file please file an
+ # issue against the rust-lang/cargo repository. If you're
+ # editing this file be aware that the upstream Cargo.toml
+ # will likely look very different (and much more reasonable)
+ 
+ [package]
+ edition = "2018"
+ name = "proc-macro2"
+-version = "1.0.5"
+-authors = ["Alex Crichton <alex at alexcrichton.com>"]
+-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
+-homepage = "https://github.com/alexcrichton/proc-macro2"
++version = "1.0.20"
++authors = ["Alex Crichton <alex at alexcrichton.com>", "David Tolnay <dtolnay at gmail.com>"]
++description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
+ documentation = "https://docs.rs/proc-macro2"
+ readme = "README.md"
+ keywords = ["macros"]
++categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/alexcrichton/proc-macro2"
+ [package.metadata.docs.rs]
+ rustc-args = ["--cfg", "procmacro2_semver_exempt"]
+ rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
++targets = ["x86_64-unknown-linux-gnu"]
+ 
+-[lib]
+-name = "proc_macro2"
++[package.metadata.playground]
++features = ["span-locations"]
+ [dependencies.unicode-xid]
+ version = "0.2"
+ [dev-dependencies.quote]
+ version = "1.0"
+ default_features = false
+ 
+ [features]
+ default = ["proc-macro"]
+ nightly = []
+ proc-macro = []
+ span-locations = []
+-[badges.travis-ci]
+-repository = "alexcrichton/proc-macro2"
+diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
+--- a/third_party/rust/proc-macro2/README.md
++++ b/third_party/rust/proc-macro2/README.md
+@@ -1,11 +1,11 @@
+ # proc-macro2
+ 
+-[![Build Status](https://api.travis-ci.com/alexcrichton/proc-macro2.svg?branch=master)](https://travis-ci.com/alexcrichton/proc-macro2)
++[![Build Status](https://img.shields.io/github/workflow/status/alexcrichton/proc-macro2/build%20and%20test)](https://github.com/alexcrichton/proc-macro2/actions)
+ [![Latest Version](https://img.shields.io/crates/v/proc-macro2.svg)](https://crates.io/crates/proc-macro2)
+ [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/proc-macro2)
+ 
+ A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
+ This library serves two purposes:
+ 
+ - **Bring proc-macro-like functionality to other contexts like build.rs and
+   main.rs.** Types from `proc_macro` are entirely specific to procedural macros
+diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
+--- a/third_party/rust/proc-macro2/build.rs
++++ b/third_party/rust/proc-macro2/build.rs
+@@ -9,16 +9,20 @@
+ // "wrap_proc_macro"
+ //     Wrap types from libproc_macro rather than polyfilling the whole API.
+ //     Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
+ //     because we can't emulate the unstable API without emulating everything
+ //     else. Also enabled unconditionally on nightly, in which case the
+ //     procmacro2_semver_exempt surface area is implemented by using the
+ //     nightly-only proc_macro API.
+ //
++//  "hygiene"
++//    Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
++//    and Span::located_at. Enabled on Rust 1.45+.
++//
+ // "proc_macro_span"
+ //     Enable non-dummy behavior of Span::start and Span::end methods which
+ //     requires an unstable compiler feature. Enabled when building with
+ //     nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
+ //     features.
+ //
+ // "super_unstable"
+ //     Implement the semver exempt API in terms of the nightly-only proc_macro
+@@ -52,16 +56,24 @@ fn main() {
+         // https://github.com/alexcrichton/proc-macro2/issues/147
+         println!("cargo:rustc-cfg=procmacro2_semver_exempt");
+     }
+ 
+     if semver_exempt || cfg!(feature = "span-locations") {
+         println!("cargo:rustc-cfg=span_locations");
+     }
+ 
++    if version.minor < 39 {
++        println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
++    }
++
++    if version.minor >= 45 {
++        println!("cargo:rustc-cfg=hygiene");
++    }
++
+     let target = env::var("TARGET").unwrap();
+     if !enable_use_proc_macro(&target) {
+         return;
+     }
+ 
+     println!("cargo:rustc-cfg=use_proc_macro");
+ 
+     if version.nightly || !semver_exempt {
+diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
+new file mode 100644
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/detection.rs
+@@ -0,0 +1,67 @@
++use std::panic::{self, PanicInfo};
++use std::sync::atomic::*;
++use std::sync::Once;
++
++static WORKS: AtomicUsize = AtomicUsize::new(0);
++static INIT: Once = Once::new();
++
++pub(crate) fn inside_proc_macro() -> bool {
++    match WORKS.load(Ordering::SeqCst) {
++        1 => return false,
++        2 => return true,
++        _ => {}
++    }
++
++    INIT.call_once(initialize);
++    inside_proc_macro()
++}
++
++pub(crate) fn force_fallback() {
++    WORKS.store(1, Ordering::SeqCst);
++}
++
++pub(crate) fn unforce_fallback() {
++    initialize();
++}
++
++// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
++// then use catch_unwind to determine whether the compiler's proc_macro is
++// working. When proc-macro2 is used from outside of a procedural macro all
++// of the proc_macro crate's APIs currently panic.
++//
++// The Once is to prevent the possibility of this ordering:
++//
++//     thread 1 calls take_hook, gets the user's original hook
++//     thread 1 calls set_hook with the null hook
++//     thread 2 calls take_hook, thinks null hook is the original hook
++//     thread 2 calls set_hook with the null hook
++//     thread 1 calls set_hook with the actual original hook
++//     thread 2 calls set_hook with what it thinks is the original hook
++//
++// in which the user's hook has been lost.
++//
++// There is still a race condition where a panic in a different thread can
++// happen during the interval that the user's original panic hook is
++// unregistered such that their hook is incorrectly not called. This is
++// sufficiently unlikely and less bad than printing panic messages to stderr
++// on correct use of this crate. Maybe there is a libstd feature request
++// here. For now, if a user needs to guarantee that this failure mode does
++// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
++// the main thread before launching any other threads.
++fn initialize() {
++    type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
++
++    let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
++    let sanity_check = &*null_hook as *const PanicHook;
++    let original_hook = panic::take_hook();
++    panic::set_hook(null_hook);
++
++    let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
++    WORKS.store(works as usize + 1, Ordering::SeqCst);
++
++    let hopefully_null_hook = panic::take_hook();
++    panic::set_hook(original_hook);
++    if sanity_check != &*hopefully_null_hook {
++        panic!("observed race condition in proc_macro2::inside_proc_macro");
++    }
++}
+diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
+--- a/third_party/rust/proc-macro2/src/fallback.rs
++++ b/third_party/rust/proc-macro2/src/fallback.rs
+@@ -1,41 +1,121 @@
++use crate::parse::{token_stream, Cursor};
++use crate::{Delimiter, Spacing, TokenTree};
+ #[cfg(span_locations)]
+ use std::cell::RefCell;
+ #[cfg(span_locations)]
+ use std::cmp;
+-use std::fmt;
+-use std::iter;
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
++use std::mem;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::Path;
+ use std::path::PathBuf;
+ use std::str::FromStr;
+ use std::vec;
+-
+-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
+-use crate::{Delimiter, Punct, Spacing, TokenTree};
+ use unicode_xid::UnicodeXID;
+ 
++/// Force use of proc-macro2's fallback implementation of the API for now, even
++/// if the compiler's implementation is available.
++pub fn force() {
++    #[cfg(wrap_proc_macro)]
++    crate::detection::force_fallback();
++}
++
++/// Resume using the compiler's implementation of the proc macro API if it is
++/// available.
++pub fn unforce() {
++    #[cfg(wrap_proc_macro)]
++    crate::detection::unforce_fallback();
++}
++
+ #[derive(Clone)]
+-pub struct TokenStream {
+-    inner: Vec<TokenTree>,
++pub(crate) struct TokenStream {
++    pub(crate) inner: Vec<TokenTree>,
+ }
+ 
+ #[derive(Debug)]
+-pub struct LexError;
++pub(crate) struct LexError;
+ 
+ impl TokenStream {
+     pub fn new() -> TokenStream {
+         TokenStream { inner: Vec::new() }
+     }
+ 
+     pub fn is_empty(&self) -> bool {
+         self.inner.len() == 0
+     }
++
++    fn take_inner(&mut self) -> Vec<TokenTree> {
++        mem::replace(&mut self.inner, Vec::new())
++    }
++
++    fn push_token(&mut self, token: TokenTree) {
++        // https://github.com/alexcrichton/proc-macro2/issues/235
++        match token {
++            #[cfg(not(no_bind_by_move_pattern_guard))]
++            TokenTree::Literal(crate::Literal {
++                #[cfg(wrap_proc_macro)]
++                    inner: crate::imp::Literal::Fallback(literal),
++                #[cfg(not(wrap_proc_macro))]
++                    inner: literal,
++                ..
++            }) if literal.text.starts_with('-') => {
++                push_negative_literal(self, literal);
++            }
++            #[cfg(no_bind_by_move_pattern_guard)]
++            TokenTree::Literal(crate::Literal {
++                #[cfg(wrap_proc_macro)]
++                    inner: crate::imp::Literal::Fallback(literal),
++                #[cfg(not(wrap_proc_macro))]
++                    inner: literal,
++                ..
++            }) => {
++                if literal.text.starts_with('-') {
++                    push_negative_literal(self, literal);
++                } else {
++                    self.inner
++                        .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++                }
++            }
++            _ => self.inner.push(token),
++        }
++
++        #[cold]
++        fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
++            literal.text.remove(0);
++            let mut punct = crate::Punct::new('-', Spacing::Alone);
++            punct.set_span(crate::Span::_new_stable(literal.span));
++            stream.inner.push(TokenTree::Punct(punct));
++            stream
++                .inner
++                .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++        }
++    }
++}
++
++// Nonrecursive to prevent stack overflow.
++impl Drop for TokenStream {
++    fn drop(&mut self) {
++        while let Some(token) = self.inner.pop() {
++            let group = match token {
++                TokenTree::Group(group) => group.inner,
++                _ => continue,
++            };
++            #[cfg(wrap_proc_macro)]
++            let group = match group {
++                crate::imp::Group::Fallback(group) => group,
++                _ => continue,
++            };
++            let mut group = group;
++            self.inner.extend(group.stream.take_inner());
++        }
++    }
+ }
+ 
+ #[cfg(span_locations)]
+ fn get_cursor(src: &str) -> Cursor {
+     // Create a dummy file & add it to the source map
+     SOURCE_MAP.with(|cm| {
+         let mut cm = cm.borrow_mut();
+         let name = format!("<parsed string {}>", cm.files.len());
+@@ -54,68 +134,49 @@ fn get_cursor(src: &str) -> Cursor {
+ 
+ impl FromStr for TokenStream {
+     type Err = LexError;
+ 
+     fn from_str(src: &str) -> Result<TokenStream, LexError> {
+         // Create a dummy file & add it to the source map
+         let cursor = get_cursor(src);
+ 
+-        match token_stream(cursor) {
+-            Ok((input, output)) => {
+-                if skip_whitespace(input).len() != 0 {
+-                    Err(LexError)
+-                } else {
+-                    Ok(output)
+-                }
+-            }
+-            Err(LexError) => Err(LexError),
++        let (rest, tokens) = token_stream(cursor)?;
++        if rest.is_empty() {
++            Ok(tokens)
++        } else {
++            Err(LexError)
+         }
+     }
+ }
+ 
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         let mut joint = false;
+         for (i, tt) in self.inner.iter().enumerate() {
+             if i != 0 && !joint {
+                 write!(f, " ")?;
+             }
+             joint = false;
+-            match *tt {
+-                TokenTree::Group(ref tt) => {
+-                    let (start, end) = match tt.delimiter() {
+-                        Delimiter::Parenthesis => ("(", ")"),
+-                        Delimiter::Brace => ("{", "}"),
+-                        Delimiter::Bracket => ("[", "]"),
+-                        Delimiter::None => ("", ""),
+-                    };
+-                    if tt.stream().into_iter().next().is_none() {
+-                        write!(f, "{} {}", start, end)?
+-                    } else {
+-                        write!(f, "{} {} {}", start, tt.stream(), end)?
+-                    }
++            match tt {
++                TokenTree::Group(tt) => Display::fmt(tt, f),
++                TokenTree::Ident(tt) => Display::fmt(tt, f),
++                TokenTree::Punct(tt) => {
++                    joint = tt.spacing() == Spacing::Joint;
++                    Display::fmt(tt, f)
+                 }
+-                TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
+-                TokenTree::Punct(ref tt) => {
+-                    write!(f, "{}", tt.as_char())?;
+-                    match tt.spacing() {
+-                        Spacing::Alone => {}
+-                        Spacing::Joint => joint = true,
+-                    }
+-                }
+-                TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
+-            }
++                TokenTree::Literal(tt) => Display::fmt(tt, f),
++            }?
+         }
+ 
+         Ok(())
+     }
+ }
+ 
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         f.write_str("TokenStream ")?;
+         f.debug_list().entries(self.clone()).finish()
+     }
+ }
+ 
+ #[cfg(use_proc_macro)]
+ impl From<proc_macro::TokenStream> for TokenStream {
+@@ -134,122 +195,107 @@ impl From<TokenStream> for proc_macro::T
+             .to_string()
+             .parse()
+             .expect("failed to parse to compiler tokens")
+     }
+ }
+ 
+ impl From<TokenTree> for TokenStream {
+     fn from(tree: TokenTree) -> TokenStream {
+-        TokenStream { inner: vec![tree] }
++        let mut stream = TokenStream::new();
++        stream.push_token(tree);
++        stream
+     }
+ }
+ 
+-impl iter::FromIterator<TokenTree> for TokenStream {
+-    fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+-        let mut v = Vec::new();
+-
+-        for token in streams.into_iter() {
+-            v.push(token);
+-        }
+-
+-        TokenStream { inner: v }
++impl FromIterator<TokenTree> for TokenStream {
++    fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
++        let mut stream = TokenStream::new();
++        stream.extend(tokens);
++        stream
+     }
+ }
+ 
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+     fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+         let mut v = Vec::new();
+ 
+-        for stream in streams.into_iter() {
+-            v.extend(stream.inner);
++        for mut stream in streams {
++            v.extend(stream.take_inner());
+         }
+ 
+         TokenStream { inner: v }
+     }
+ }
+ 
+ impl Extend<TokenTree> for TokenStream {
+-    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+-        self.inner.extend(streams);
++    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
++        tokens.into_iter().for_each(|token| self.push_token(token));
+     }
+ }
+ 
+ impl Extend<TokenStream> for TokenStream {
+     fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+-        self.inner
+-            .extend(streams.into_iter().flat_map(|stream| stream));
++        self.inner.extend(streams.into_iter().flatten());
+     }
+ }
+ 
+-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
++pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
+ 
+ impl IntoIterator for TokenStream {
+     type Item = TokenTree;
+     type IntoIter = TokenTreeIter;
+ 
+-    fn into_iter(self) -> TokenTreeIter {
+-        self.inner.into_iter()
++    fn into_iter(mut self) -> TokenTreeIter {
++        self.take_inner().into_iter()
+     }
+ }
+ 
+ #[derive(Clone, PartialEq, Eq)]
+-pub struct SourceFile {
++pub(crate) struct SourceFile {
+     path: PathBuf,
+ }
+ 
+ impl SourceFile {
+     /// Get the path to this source file as a string.
+     pub fn path(&self) -> PathBuf {
+         self.path.clone()
+     }
+ 
+     pub fn is_real(&self) -> bool {
+         // XXX(nika): Support real files in the future?
+         false
+     }
+ }
+ 
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         f.debug_struct("SourceFile")
+             .field("path", &self.path())
+             .field("is_real", &self.is_real())
+             .finish()
+     }
+ }
+ 
+ #[derive(Clone, Copy, Debug, PartialEq, Eq)]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+     pub line: usize,
+     pub column: usize,
+ }
+ 
+ #[cfg(span_locations)]
+ thread_local! {
+     static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
+         // NOTE: We start with a single dummy file which all call_site() and
+         // def_site() spans reference.
+-        files: vec![{
++        files: vec![FileInfo {
+             #[cfg(procmacro2_semver_exempt)]
+-            {
+-                FileInfo {
+-                    name: "<unspecified>".to_owned(),
+-                    span: Span { lo: 0, hi: 0 },
+-                    lines: vec![0],
+-                }
+-            }
+-
+-            #[cfg(not(procmacro2_semver_exempt))]
+-            {
+-                FileInfo {
+-                    span: Span { lo: 0, hi: 0 },
+-                    lines: vec![0],
+-                }
+-            }
++            name: "<unspecified>".to_owned(),
++            span: Span { lo: 0, hi: 0 },
++            lines: vec![0],
+         }],
+     });
+ }
+ 
+ #[cfg(span_locations)]
+ struct FileInfo {
+     #[cfg(procmacro2_semver_exempt)]
+     name: String,
+@@ -277,26 +323,31 @@ impl FileInfo {
+         }
+     }
+ 
+     fn span_within(&self, span: Span) -> bool {
+         span.lo >= self.span.lo && span.hi <= self.span.hi
+     }
+ }
+ 
+-/// Computesthe offsets of each line in the given source string.
++/// Computes the offsets of each line in the given source string
++/// and the total number of characters
+ #[cfg(span_locations)]
+-fn lines_offsets(s: &str) -> Vec<usize> {
++fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
+     let mut lines = vec![0];
+-    let mut prev = 0;
+-    while let Some(len) = s[prev..].find('\n') {
+-        prev += len + 1;
+-        lines.push(prev);
++    let mut total = 0;
++
++    for ch in s.chars() {
++        total += 1;
++        if ch == '\n' {
++            lines.push(total);
++        }
+     }
+-    lines
++
++    (total, lines)
+ }
+ 
+ #[cfg(span_locations)]
+ struct SourceMap {
+     files: Vec<FileInfo>,
+ }
+ 
+ #[cfg(span_locations)]
+@@ -305,81 +356,83 @@ impl SourceMap {
+         // Add 1 so there's always space between files.
+         //
+         // We'll always have at least 1 file, as we initialize our files list
+         // with a dummy file.
+         self.files.last().unwrap().span.hi + 1
+     }
+ 
+     fn add_file(&mut self, name: &str, src: &str) -> Span {
+-        let lines = lines_offsets(src);
++        let (len, lines) = lines_offsets(src);
+         let lo = self.next_start_pos();
+         // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+         let span = Span {
+             lo,
+-            hi: lo + (src.len() as u32),
++            hi: lo + (len as u32),
+         };
+ 
+-        #[cfg(procmacro2_semver_exempt)]
+         self.files.push(FileInfo {
++            #[cfg(procmacro2_semver_exempt)]
+             name: name.to_owned(),
+             span,
+             lines,
+         });
+ 
+         #[cfg(not(procmacro2_semver_exempt))]
+-        self.files.push(FileInfo { span, lines });
+         let _ = name;
+ 
+         span
+     }
+ 
+     fn fileinfo(&self, span: Span) -> &FileInfo {
+         for file in &self.files {
+             if file.span_within(span) {
+                 return file;
+             }
+         }
+         panic!("Invalid span with no related FileInfo!");
+     }
+ }
+ 
+ #[derive(Clone, Copy, PartialEq, Eq)]
+-pub struct Span {
++pub(crate) struct Span {
+     #[cfg(span_locations)]
+-    lo: u32,
++    pub(crate) lo: u32,
+     #[cfg(span_locations)]
+-    hi: u32,
++    pub(crate) hi: u32,
+ }
+ 
+ impl Span {
+     #[cfg(not(span_locations))]
+     pub fn call_site() -> Span {
+         Span {}
+     }
+ 
+     #[cfg(span_locations)]
+     pub fn call_site() -> Span {
+         Span { lo: 0, hi: 0 }
+     }
+ 
++    #[cfg(hygiene)]
++    pub fn mixed_site() -> Span {
++        Span::call_site()
++    }
++
+     #[cfg(procmacro2_semver_exempt)]
+     pub fn def_site() -> Span {
+         Span::call_site()
+     }
+ 
+-    #[cfg(procmacro2_semver_exempt)]
+     pub fn resolved_at(&self, _other: Span) -> Span {
+         // Stable spans consist only of line/column information, so
+         // `resolved_at` and `located_at` only select which span the
+         // caller wants line/column information from.
+         *self
+     }
+ 
+-    #[cfg(procmacro2_semver_exempt)]
+     pub fn located_at(&self, other: Span) -> Span {
+         other
+     }
+ 
+     #[cfg(procmacro2_semver_exempt)]
+     pub fn source_file(&self) -> SourceFile {
+         SOURCE_MAP.with(|cm| {
+             let cm = cm.borrow();
+@@ -422,36 +475,69 @@ impl Span {
+                 return None;
+             }
+             Some(Span {
+                 lo: cmp::min(self.lo, other.lo),
+                 hi: cmp::max(self.hi, other.hi),
+             })
+         })
+     }
++
++    #[cfg(not(span_locations))]
++    fn first_byte(self) -> Self {
++        self
++    }
++
++    #[cfg(span_locations)]
++    fn first_byte(self) -> Self {
++        Span {
++            lo: self.lo,
++            hi: cmp::min(self.lo.saturating_add(1), self.hi),
++        }
++    }
++
++    #[cfg(not(span_locations))]
++    fn last_byte(self) -> Self {
++        self
++    }
++
++    #[cfg(span_locations)]
++    fn last_byte(self) -> Self {
++        Span {
++            lo: cmp::max(self.hi.saturating_sub(1), self.lo),
++            hi: self.hi,
++        }
++    }
+ }
+ 
+-impl fmt::Debug for Span {
++impl Debug for Span {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        #[cfg(procmacro2_semver_exempt)]
++        #[cfg(span_locations)]
+         return write!(f, "bytes({}..{})", self.lo, self.hi);
+ 
+-        #[cfg(not(procmacro2_semver_exempt))]
++        #[cfg(not(span_locations))]
+         write!(f, "Span")
+     }
+ }
+ 
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+-    if cfg!(procmacro2_semver_exempt) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++    #[cfg(span_locations)]
++    {
++        if span.lo == 0 && span.hi == 0 {
++            return;
++        }
++    }
++
++    if cfg!(span_locations) {
+         debug.field("span", &span);
+     }
+ }
+ 
+ #[derive(Clone)]
+-pub struct Group {
++pub(crate) struct Group {
+     delimiter: Delimiter,
+     stream: TokenStream,
+     span: Span,
+ }
+ 
+ impl Group {
+     pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+         Group {
+@@ -469,58 +555,67 @@ impl Group {
+         self.stream.clone()
+     }
+ 
+     pub fn span(&self) -> Span {
+         self.span
+     }
+ 
+     pub fn span_open(&self) -> Span {
+-        self.span
++        self.span.first_byte()
+     }
+ 
+     pub fn span_close(&self) -> Span {
+-        self.span
++        self.span.last_byte()
+     }
+ 
+     pub fn set_span(&mut self, span: Span) {
+         self.span = span;
+     }
+ }
+ 
+-impl fmt::Display for Group {
++impl Display for Group {
++    // We attempt to match libproc_macro's formatting.
++    // Empty parens: ()
++    // Nonempty parens: (...)
++    // Empty brackets: []
++    // Nonempty brackets: [...]
++    // Empty braces: { }
++    // Nonempty braces: { ... }
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        let (left, right) = match self.delimiter {
++        let (open, close) = match self.delimiter {
+             Delimiter::Parenthesis => ("(", ")"),
+-            Delimiter::Brace => ("{", "}"),
++            Delimiter::Brace => ("{ ", "}"),
+             Delimiter::Bracket => ("[", "]"),
+             Delimiter::None => ("", ""),
+         };
+ 
+-        f.write_str(left)?;
+-        self.stream.fmt(f)?;
+-        f.write_str(right)?;
++        f.write_str(open)?;
++        Display::fmt(&self.stream, f)?;
++        if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
++            f.write_str(" ")?;
++        }
++        f.write_str(close)?;
+ 
+         Ok(())
+     }
+ }
+ 
+-impl fmt::Debug for Group {
++impl Debug for Group {
+     fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+         let mut debug = fmt.debug_struct("Group");
+         debug.field("delimiter", &self.delimiter);
+         debug.field("stream", &self.stream);
+-        #[cfg(procmacro2_semver_exempt)]
+-        debug.field("span", &self.span);
++        debug_span_field_if_nontrivial(&mut debug, self.span);
+         debug.finish()
+     }
+ }
+ 
+ #[derive(Clone)]
+-pub struct Ident {
++pub(crate) struct Ident {
+     sym: String,
+     span: Span,
+     raw: bool,
+ }
+ 
+ impl Ident {
+     fn _new(string: &str, raw: bool, span: Span) -> Ident {
+         validate_ident(string);
+@@ -544,26 +639,24 @@ impl Ident {
+         self.span
+     }
+ 
+     pub fn set_span(&mut self, span: Span) {
+         self.span = span;
+     }
+ }
+ 
+-#[inline]
+-fn is_ident_start(c: char) -> bool {
++pub(crate) fn is_ident_start(c: char) -> bool {
+     ('a' <= c && c <= 'z')
+         || ('A' <= c && c <= 'Z')
+         || c == '_'
+         || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+ }
+ 
+-#[inline]
+-fn is_ident_continue(c: char) -> bool {
++pub(crate) fn is_ident_continue(c: char) -> bool {
+     ('a' <= c && c <= 'z')
+         || ('A' <= c && c <= 'Z')
+         || c == '_'
+         || ('0' <= c && c <= '9')
+         || (c > '\x7f' && UnicodeXID::is_xid_continue(c))
+ }
+ 
+ fn validate_ident(string: &str) {
+@@ -610,49 +703,49 @@ where
+         if self.raw {
+             other.starts_with("r#") && self.sym == other[2..]
+         } else {
+             self.sym == other
+         }
+     }
+ }
+ 
+-impl fmt::Display for Ident {
++impl Display for Ident {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         if self.raw {
+-            "r#".fmt(f)?;
++            f.write_str("r#")?;
+         }
+-        self.sym.fmt(f)
++        Display::fmt(&self.sym, f)
+     }
+ }
+ 
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+     // Ident(proc_macro), Ident(r#union)
+-    #[cfg(not(procmacro2_semver_exempt))]
++    #[cfg(not(span_locations))]
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         let mut debug = f.debug_tuple("Ident");
+         debug.field(&format_args!("{}", self));
+         debug.finish()
+     }
+ 
+     // Ident {
+     //     sym: proc_macro,
+     //     span: bytes(128..138)
+     // }
+-    #[cfg(procmacro2_semver_exempt)]
++    #[cfg(span_locations)]
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         let mut debug = f.debug_struct("Ident");
+         debug.field("sym", &format_args!("{}", self));
+-        debug.field("span", &self.span);
++        debug_span_field_if_nontrivial(&mut debug, self.span);
+         debug.finish()
+     }
+ }
+ 
+ #[derive(Clone)]
+-pub struct Literal {
++pub(crate) struct Literal {
+     text: String,
+     span: Span,
+ }
+ 
+ macro_rules! suffixed_numbers {
+     ($($name:ident => $kind:ident,)*) => ($(
+         pub fn $name(n: $kind) -> Literal {
+             Literal::_new(format!(concat!("{}", stringify!($kind)), n))
+@@ -664,17 +757,17 @@ macro_rules! unsuffixed_numbers {
+     ($($name:ident => $kind:ident,)*) => ($(
+         pub fn $name(n: $kind) -> Literal {
+             Literal::_new(n.to_string())
+         }
+     )*)
+ }
+ 
+ impl Literal {
+-    fn _new(text: String) -> Literal {
++    pub(crate) fn _new(text: String) -> Literal {
+         Literal {
+             text,
+             span: Span::call_site(),
+         }
+     }
+ 
+     suffixed_numbers! {
+         u8_suffixed => u8,
+@@ -706,61 +799,62 @@ impl Literal {
+         i32_unsuffixed => i32,
+         i64_unsuffixed => i64,
+         i128_unsuffixed => i128,
+         isize_unsuffixed => isize,
+     }
+ 
+     pub fn f32_unsuffixed(f: f32) -> Literal {
+         let mut s = f.to_string();
+-        if !s.contains(".") {
++        if !s.contains('.') {
+             s.push_str(".0");
+         }
+         Literal::_new(s)
+     }
+ 
+     pub fn f64_unsuffixed(f: f64) -> Literal {
+         let mut s = f.to_string();
+-        if !s.contains(".") {
++        if !s.contains('.') {
+             s.push_str(".0");
+         }
+         Literal::_new(s)
+     }
+ 
+     pub fn string(t: &str) -> Literal {
+         let mut text = String::with_capacity(t.len() + 2);
+         text.push('"');
+         for c in t.chars() {
+             if c == '\'' {
+-                // escape_default turns this into "\'" which is unnecessary.
++                // escape_debug turns this into "\'" which is unnecessary.
+                 text.push(c);
+             } else {
+-                text.extend(c.escape_default());
++                text.extend(c.escape_debug());
+             }
+         }
+         text.push('"');
+         Literal::_new(text)
+     }
+ 
+     pub fn character(t: char) -> Literal {
+         let mut text = String::new();
+         text.push('\'');
+         if t == '"' {
+-            // escape_default turns this into '\"' which is unnecessary.
++            // escape_debug turns this into '\"' which is unnecessary.
+             text.push(t);
+         } else {
+-            text.extend(t.escape_default());
++            text.extend(t.escape_debug());
+         }
+         text.push('\'');
+         Literal::_new(text)
+     }
+ 
+     pub fn byte_string(bytes: &[u8]) -> Literal {
+         let mut escaped = "b\"".to_string();
+         for b in bytes {
++            #[allow(clippy::match_overlapping_arm)]
+             match *b {
+                 b'\0' => escaped.push_str(r"\0"),
+                 b'\t' => escaped.push_str(r"\t"),
+                 b'\n' => escaped.push_str(r"\n"),
+                 b'\r' => escaped.push_str(r"\r"),
+                 b'"' => escaped.push_str("\\\""),
+                 b'\\' => escaped.push_str("\\\\"),
+                 b'\x20'..=b'\x7E' => escaped.push(*b as char),
+@@ -779,656 +873,22 @@ impl Literal {
+         self.span = span;
+     }
+ 
+     pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
+         None
+     }
+ }
+ 
+-impl fmt::Display for Literal {
++impl Display for Literal {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.text.fmt(f)
+-    }
+-}
+-
+-impl fmt::Debug for Literal {
+-    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+-        let mut debug = fmt.debug_struct("Literal");
+-        debug.field("lit", &format_args!("{}", self.text));
+-        #[cfg(procmacro2_semver_exempt)]
+-        debug.field("span", &self.span);
+-        debug.finish()
+-    }
+-}
+-
+-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
+-    let mut trees = Vec::new();
+-    loop {
+-        let input_no_ws = skip_whitespace(input);
+-        if input_no_ws.rest.len() == 0 {
+-            break;
+-        }
+-        if let Ok((a, tokens)) = doc_comment(input_no_ws) {
+-            input = a;
+-            trees.extend(tokens);
+-            continue;
+-        }
+-
+-        let (a, tt) = match token_tree(input_no_ws) {
+-            Ok(p) => p,
+-            Err(_) => break,
+-        };
+-        trees.push(tt);
+-        input = a;
+-    }
+-    Ok((input, TokenStream { inner: trees }))
+-}
+-
+-#[cfg(not(span_locations))]
+-fn spanned<'a, T>(
+-    input: Cursor<'a>,
+-    f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+-    let (a, b) = f(skip_whitespace(input))?;
+-    Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
+-}
+-
+-#[cfg(span_locations)]
+-fn spanned<'a, T>(
+-    input: Cursor<'a>,
+-    f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+-    let input = skip_whitespace(input);
+-    let lo = input.off;
+-    let (a, b) = f(input)?;
+-    let hi = a.off;
+-    let span = crate::Span::_new_stable(Span { lo, hi });
+-    Ok((a, (b, span)))
+-}
+-
+-fn token_tree(input: Cursor) -> PResult<TokenTree> {
+-    let (rest, (mut tt, span)) = spanned(input, token_kind)?;
+-    tt.set_span(span);
+-    Ok((rest, tt))
+-}
+-
+-named!(token_kind -> TokenTree, alt!(
+-    map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
+-    |
+-    map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
+-    |
+-    map!(op, TokenTree::Punct)
+-    |
+-    symbol_leading_ws
+-));
+-
+-named!(group -> Group, alt!(
+-    delimited!(
+-        punct!("("),
+-        token_stream,
+-        punct!(")")
+-    ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+-    |
+-    delimited!(
+-        punct!("["),
+-        token_stream,
+-        punct!("]")
+-    ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+-    |
+-    delimited!(
+-        punct!("{"),
+-        token_stream,
+-        punct!("}")
+-    ) => { |ts| Group::new(Delimiter::Brace, ts) }
+-));
+-
+-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
+-    symbol(skip_whitespace(input))
+-}
+-
+-fn symbol(input: Cursor) -> PResult<TokenTree> {
+-    let raw = input.starts_with("r#");
+-    let rest = input.advance((raw as usize) << 1);
+-
+-    let (rest, sym) = symbol_not_raw(rest)?;
+-
+-    if !raw {
+-        let ident = crate::Ident::new(sym, crate::Span::call_site());
+-        return Ok((rest, ident.into()));
+-    }
+-
+-    if sym == "_" {
+-        return Err(LexError);
+-    }
+-
+-    let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+-    Ok((rest, ident.into()))
+-}
+-
+-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
+-    let mut chars = input.char_indices();
+-
+-    match chars.next() {
+-        Some((_, ch)) if is_ident_start(ch) => {}
+-        _ => return Err(LexError),
+-    }
+-
+-    let mut end = input.len();
+-    for (i, ch) in chars {
+-        if !is_ident_continue(ch) {
+-            end = i;
+-            break;
+-        }
+-    }
+-
+-    Ok((input.advance(end), &input.rest[..end]))
+-}
+-
+-fn literal(input: Cursor) -> PResult<Literal> {
+-    let input_no_ws = skip_whitespace(input);
+-
+-    match literal_nocapture(input_no_ws) {
+-        Ok((a, ())) => {
+-            let start = input.len() - input_no_ws.len();
+-            let len = input_no_ws.len() - a.len();
+-            let end = start + len;
+-            Ok((a, Literal::_new(input.rest[start..end].to_string())))
+-        }
+-        Err(LexError) => Err(LexError),
++        Display::fmt(&self.text, f)
+     }
+ }
+ 
+-named!(literal_nocapture -> (), alt!(
+-    string
+-    |
+-    byte_string
+-    |
+-    byte
+-    |
+-    character
+-    |
+-    float
+-    |
+-    int
+-));
+-
+-named!(string -> (), alt!(
+-    quoted_string
+-    |
+-    preceded!(
+-        punct!("r"),
+-        raw_string
+-    ) => { |_| () }
+-));
+-
+-named!(quoted_string -> (), do_parse!(
+-    punct!("\"") >>
+-    cooked_string >>
+-    tag!("\"") >>
+-    option!(symbol_not_raw) >>
+-    (())
+-));
+-
+-fn cooked_string(input: Cursor) -> PResult<()> {
+-    let mut chars = input.char_indices().peekable();
+-    while let Some((byte_offset, ch)) = chars.next() {
+-        match ch {
+-            '"' => {
+-                return Ok((input.advance(byte_offset), ()));
+-            }
+-            '\r' => {
+-                if let Some((_, '\n')) = chars.next() {
+-                    // ...
+-                } else {
+-                    break;
+-                }
+-            }
+-            '\\' => match chars.next() {
+-                Some((_, 'x')) => {
+-                    if !backslash_x_char(&mut chars) {
+-                        break;
+-                    }
+-                }
+-                Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+-                | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+-                Some((_, 'u')) => {
+-                    if !backslash_u(&mut chars) {
+-                        break;
+-                    }
+-                }
+-                Some((_, '\n')) | Some((_, '\r')) => {
+-                    while let Some(&(_, ch)) = chars.peek() {
+-                        if ch.is_whitespace() {
+-                            chars.next();
+-                        } else {
+-                            break;
+-                        }
+-                    }
+-                }
+-                _ => break,
+-            },
+-            _ch => {}
+-        }
+-    }
+-    Err(LexError)
+-}
+-
+-named!(byte_string -> (), alt!(
+-    delimited!(
+-        punct!("b\""),
+-        cooked_byte_string,
+-        tag!("\"")
+-    ) => { |_| () }
+-    |
+-    preceded!(
+-        punct!("br"),
+-        raw_string
+-    ) => { |_| () }
+-));
+-
+-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
+-    let mut bytes = input.bytes().enumerate();
+-    'outer: while let Some((offset, b)) = bytes.next() {
+-        match b {
+-            b'"' => {
+-                return Ok((input.advance(offset), ()));
+-            }
+-            b'\r' => {
+-                if let Some((_, b'\n')) = bytes.next() {
+-                    // ...
+-                } else {
+-                    break;
+-                }
+-            }
+-            b'\\' => match bytes.next() {
+-                Some((_, b'x')) => {
+-                    if !backslash_x_byte(&mut bytes) {
+-                        break;
+-                    }
+-                }
+-                Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+-                | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+-                Some((newline, b'\n')) | Some((newline, b'\r')) => {
+-                    let rest = input.advance(newline + 1);
+-                    for (offset, ch) in rest.char_indices() {
+-                        if !ch.is_whitespace() {
+-                            input = rest.advance(offset);
+-                            bytes = input.bytes().enumerate();
+-                            continue 'outer;
+-                        }
+-                    }
+-                    break;
+-                }
+-                _ => break,
+-            },
+-            b if b < 0x80 => {}
+-            _ => break,
+-        }
+-    }
+-    Err(LexError)
+-}
+-
+-fn raw_string(input: Cursor) -> PResult<()> {
+-    let mut chars = input.char_indices();
+-    let mut n = 0;
+-    while let Some((byte_offset, ch)) = chars.next() {
+-        match ch {
+-            '"' => {
+-                n = byte_offset;
+-                break;
+-            }
+-            '#' => {}
+-            _ => return Err(LexError),
+-        }
+-    }
+-    for (byte_offset, ch) in chars {
+-        match ch {
+-            '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
+-                let rest = input.advance(byte_offset + 1 + n);
+-                return Ok((rest, ()));
+-            }
+-            '\r' => {}
+-            _ => {}
+-        }
+-    }
+-    Err(LexError)
+-}
+-
+-named!(byte -> (), do_parse!(
+-    punct!("b") >>
+-    tag!("'") >>
+-    cooked_byte >>
+-    tag!("'") >>
+-    (())
+-));
+-
+-fn cooked_byte(input: Cursor) -> PResult<()> {
+-    let mut bytes = input.bytes().enumerate();
+-    let ok = match bytes.next().map(|(_, b)| b) {
+-        Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+-            Some(b'x') => backslash_x_byte(&mut bytes),
+-            Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+-            | Some(b'"') => true,
+-            _ => false,
+-        },
+-        b => b.is_some(),
+-    };
+-    if ok {
+-        match bytes.next() {
+-            Some((offset, _)) => {
+-                if input.chars().as_str().is_char_boundary(offset) {
+-                    Ok((input.advance(offset), ()))
+-                } else {
+-                    Err(LexError)
+-                }
+-            }
+-            None => Ok((input.advance(input.len()), ())),
+-        }
+-    } else {
+-        Err(LexError)
+-    }
+-}
+-
+-named!(character -> (), do_parse!(
+-    punct!("'") >>
+-    cooked_char >>
+-    tag!("'") >>
+-    (())
+-));
+-
+-fn cooked_char(input: Cursor) -> PResult<()> {
+-    let mut chars = input.char_indices();
+-    let ok = match chars.next().map(|(_, ch)| ch) {
+-        Some('\\') => match chars.next().map(|(_, ch)| ch) {
+-            Some('x') => backslash_x_char(&mut chars),
+-            Some('u') => backslash_u(&mut chars),
+-            Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+-                true
+-            }
+-            _ => false,
+-        },
+-        ch => ch.is_some(),
+-    };
+-    if ok {
+-        match chars.next() {
+-            Some((idx, _)) => Ok((input.advance(idx), ())),
+-            None => Ok((input.advance(input.len()), ())),
+-        }
+-    } else {
+-        Err(LexError)
++impl Debug for Literal {
++    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
++        let mut debug = fmt.debug_struct("Literal");
++        debug.field("lit", &format_args!("{}", self.text));
++        debug_span_field_if_nontrivial(&mut debug, self.span);
++        debug.finish()
+     }
+ }
+-
+-macro_rules! next_ch {
+-    ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+-        match $chars.next() {
+-            Some((_, ch)) => match ch {
+-                $pat $(| $rest)*  => ch,
+-                _ => return false,
+-            },
+-            None => return false
+-        }
+-    };
+-}
+-
+-fn backslash_x_char<I>(chars: &mut I) -> bool
+-where
+-    I: Iterator<Item = (usize, char)>,
+-{
+-    next_ch!(chars @ '0'..='7');
+-    next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+-    true
+-}
+-
+-fn backslash_x_byte<I>(chars: &mut I) -> bool
+-where
+-    I: Iterator<Item = (usize, u8)>,
+-{
+-    next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+-    next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+-    true
+-}
+-
+-fn backslash_u<I>(chars: &mut I) -> bool
+-where
+-    I: Iterator<Item = (usize, char)>,
+-{
+-    next_ch!(chars @ '{');
+-    next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+-    loop {
+-        let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
+-        if c == '}' {
+-            return true;
+-        }
+-    }
+-}
+-
+-fn float(input: Cursor) -> PResult<()> {
+-    let (mut rest, ()) = float_digits(input)?;
+-    if let Some(ch) = rest.chars().next() {
+-        if is_ident_start(ch) {
+-            rest = symbol_not_raw(rest)?.0;
+-        }
+-    }
+-    word_break(rest)
+-}
+-
+-fn float_digits(input: Cursor) -> PResult<()> {
+-    let mut chars = input.chars().peekable();
+-    match chars.next() {
+-        Some(ch) if ch >= '0' && ch <= '9' => {}
+-        _ => return Err(LexError),
+-    }
+-
+-    let mut len = 1;
+-    let mut has_dot = false;
+-    let mut has_exp = false;
+-    while let Some(&ch) = chars.peek() {
+-        match ch {
+-            '0'..='9' | '_' => {
+-                chars.next();
+-                len += 1;
+-            }
+-            '.' => {
+-                if has_dot {
+-                    break;
+-                }
+-                chars.next();
+-                if chars
+-                    .peek()
+-                    .map(|&ch| ch == '.' || is_ident_start(ch))
+-                    .unwrap_or(false)
+-                {
+-                    return Err(LexError);
+-                }
+-                len += 1;
+-                has_dot = true;
+-            }
+-            'e' | 'E' => {
+-                chars.next();
+-                len += 1;
+-                has_exp = true;
+-                break;
+-            }
+-            _ => break,
+-        }
+-    }
+-
+-    let rest = input.advance(len);
+-    if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
+-        return Err(LexError);
+-    }
+-
+-    if has_exp {
+-        let mut has_exp_value = false;
+-        while let Some(&ch) = chars.peek() {
+-            match ch {
+-                '+' | '-' => {
+-                    if has_exp_value {
+-                        break;
+-                    }
+-                    chars.next();
+-                    len += 1;
+-                }
+-                '0'..='9' => {
+-                    chars.next();
+-                    len += 1;
+-                    has_exp_value = true;
+-                }
+-                '_' => {
+-                    chars.next();
+-                    len += 1;
+-                }
+-                _ => break,
+-            }
+-        }
+-        if !has_exp_value {
+-            return Err(LexError);
+-        }
+-    }
+-
+-    Ok((input.advance(len), ()))
+-}
+-
+-fn int(input: Cursor) -> PResult<()> {
+-    let (mut rest, ()) = digits(input)?;
+-    if let Some(ch) = rest.chars().next() {
+-        if is_ident_start(ch) {
+-            rest = symbol_not_raw(rest)?.0;
+-        }
+-    }
+-    word_break(rest)
+-}
+-
+-fn digits(mut input: Cursor) -> PResult<()> {
+-    let base = if input.starts_with("0x") {
+-        input = input.advance(2);
+-        16
+-    } else if input.starts_with("0o") {
+-        input = input.advance(2);
+-        8
+-    } else if input.starts_with("0b") {
+-        input = input.advance(2);
+-        2
+-    } else {
+-        10
+-    };
+-
+-    let mut len = 0;
+-    let mut empty = true;
+-    for b in input.bytes() {
+-        let digit = match b {
+-            b'0'..=b'9' => (b - b'0') as u64,
+-            b'a'..=b'f' => 10 + (b - b'a') as u64,
+-            b'A'..=b'F' => 10 + (b - b'A') as u64,
+-            b'_' => {
+-                if empty && base == 10 {
+-                    return Err(LexError);
+-                }
+-                len += 1;
+-                continue;
+-            }
+-            _ => break,
+-        };
+-        if digit >= base {
+-            return Err(LexError);
+-        }
+-        len += 1;
+-        empty = false;
+-    }
+-    if empty {
+-        Err(LexError)
+-    } else {
+-        Ok((input.advance(len), ()))
+-    }
+-}
+-
+-fn op(input: Cursor) -> PResult<Punct> {
+-    let input = skip_whitespace(input);
+-    match op_char(input) {
+-        Ok((rest, '\'')) => {
+-            symbol(rest)?;
+-            Ok((rest, Punct::new('\'', Spacing::Joint)))
+-        }
+-        Ok((rest, ch)) => {
+-            let kind = match op_char(rest) {
+-                Ok(_) => Spacing::Joint,
+-                Err(LexError) => Spacing::Alone,
+-            };
+-            Ok((rest, Punct::new(ch, kind)))
+-        }
+-        Err(LexError) => Err(LexError),
+-    }
+-}
+-
+-fn op_char(input: Cursor) -> PResult<char> {
+-    if input.starts_with("//") || input.starts_with("/*") {
+-        // Do not accept `/` of a comment as an op.
+-        return Err(LexError);
+-    }
+-
+-    let mut chars = input.chars();
+-    let first = match chars.next() {
+-        Some(ch) => ch,
+-        None => {
+-            return Err(LexError);
+-        }
+-    };
+-    let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+-    if recognized.contains(first) {
+-        Ok((input.advance(first.len_utf8()), first))
+-    } else {
+-        Err(LexError)
+-    }
+-}
+-
+-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+-    let mut trees = Vec::new();
+-    let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
+-    trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+-    if inner {
+-        trees.push(Punct::new('!', Spacing::Alone).into());
+-    }
+-    let mut stream = vec![
+-        TokenTree::Ident(crate::Ident::new("doc", span)),
+-        TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+-        TokenTree::Literal(crate::Literal::string(comment)),
+-    ];
+-    for tt in stream.iter_mut() {
+-        tt.set_span(span);
+-    }
+-    let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+-    trees.push(crate::Group::_new_stable(group).into());
+-    for tt in trees.iter_mut() {
+-        tt.set_span(span);
+-    }
+-    Ok((rest, trees))
+-}
+-
+-named!(doc_comment_contents -> (&str, bool), alt!(
+-    do_parse!(
+-        punct!("//!") >>
+-        s: take_until_newline_or_eof!() >>
+-        ((s, true))
+-    )
+-    |
+-    do_parse!(
+-        option!(whitespace) >>
+-        peek!(tag!("/*!")) >>
+-        s: block_comment >>
+-        ((s, true))
+-    )
+-    |
+-    do_parse!(
+-        punct!("///") >>
+-        not!(tag!("/")) >>
+-        s: take_until_newline_or_eof!() >>
+-        ((s, false))
+-    )
+-    |
+-    do_parse!(
+-        option!(whitespace) >>
+-        peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
+-        s: block_comment >>
+-        ((s, false))
+-    )
+-));
+diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
+--- a/third_party/rust/proc-macro2/src/lib.rs
++++ b/third_party/rust/proc-macro2/src/lib.rs
+@@ -73,37 +73,44 @@
+ //!
+ //! # Thread-Safety
+ //!
+ //! Most types in this crate are `!Sync` because the underlying compiler
+ //! types make use of thread-local memory, meaning they cannot be accessed from
+ //! a different thread.
+ 
+ // Proc-macro2 types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.20")]
+ #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+ #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
++#![allow(clippy::needless_doctest_main)]
+ 
+ #[cfg(use_proc_macro)]
+ extern crate proc_macro;
+ 
+ use std::cmp::Ordering;
+-use std::fmt;
++use std::fmt::{self, Debug, Display};
+ use std::hash::{Hash, Hasher};
+ use std::iter::FromIterator;
+ use std::marker;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::PathBuf;
+ use std::rc::Rc;
+ use std::str::FromStr;
+ 
+-#[macro_use]
+-mod strnom;
+-mod fallback;
++mod parse;
++
++#[cfg(wrap_proc_macro)]
++mod detection;
++
++// Public for proc_macro2::fallback::force() and unforce(), but those are quite
++// a niche use case so we omit it from rustdoc.
++#[doc(hidden)]
++pub mod fallback;
+ 
+ #[cfg(not(wrap_proc_macro))]
+ use crate::fallback as imp;
+ #[path = "wrapper.rs"]
+ #[cfg(wrap_proc_macro)]
+ mod imp;
+ 
+ /// An abstract stream of tokens, or more concretely a sequence of token trees.
+@@ -223,32 +230,32 @@ impl FromIterator<TokenStream> for Token
+         TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
+     }
+ }
+ 
+ /// Prints the token stream as a string that is supposed to be losslessly
+ /// convertible back into the same token stream (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.inner.fmt(f)
++        Display::fmt(&self.inner, f)
+     }
+ }
+ 
+ /// Prints token in a form convenient for debugging.
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.inner.fmt(f)
++        Debug::fmt(&self.inner, f)
+     }
+ }
+ 
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.inner.fmt(f)
++        Debug::fmt(&self.inner, f)
+     }
+ }
+ 
+ /// The source file of a given `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+ #[cfg(procmacro2_semver_exempt)]
+ #[derive(Clone, PartialEq, Eq)]
+@@ -286,19 +293,19 @@ impl SourceFile {
+     /// Returns `true` if this source file is a real source file, and not
+     /// generated by an external macro's expansion.
+     pub fn is_real(&self) -> bool {
+         self.inner.is_real()
+     }
+ }
+ 
+ #[cfg(procmacro2_semver_exempt)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.inner.fmt(f)
++        Debug::fmt(&self.inner, f)
+     }
+ }
+ 
+ /// A line-column pair representing the start or end of a `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+ #[cfg(span_locations)]
+ #[derive(Copy, Clone, Debug, PartialEq, Eq)]
+@@ -306,16 +313,32 @@ pub struct LineColumn {
+     /// The 1-indexed line in the source file on which the span starts or ends
+     /// (inclusive).
+     pub line: usize,
+     /// The 0-indexed column (in UTF-8 characters) in the source file on which
+     /// the span starts or ends (inclusive).
+     pub column: usize,
+ }
+ 
++#[cfg(span_locations)]
++impl Ord for LineColumn {
++    fn cmp(&self, other: &Self) -> Ordering {
++        self.line
++            .cmp(&other.line)
++            .then(self.column.cmp(&other.column))
++    }
++}
++
++#[cfg(span_locations)]
++impl PartialOrd for LineColumn {
++    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++        Some(self.cmp(other))
++    }
++}
++
+ /// A region of source code, along with macro expansion information.
+ #[derive(Copy, Clone)]
+ pub struct Span {
+     inner: imp::Span,
+     _marker: marker::PhantomData<Rc<()>>,
+ }
+ 
+ impl Span {
+@@ -337,38 +360,42 @@ impl Span {
+     ///
+     /// Identifiers created with this span will be resolved as if they were
+     /// written directly at the macro call location (call-site hygiene) and
+     /// other code at the macro call site will be able to refer to them as well.
+     pub fn call_site() -> Span {
+         Span::_new(imp::Span::call_site())
+     }
+ 
++    /// The span located at the invocation of the procedural macro, but with
++    /// local variables, labels, and `$crate` resolved at the definition site
++    /// of the macro. This is the same hygiene behavior as `macro_rules`.
++    ///
++    /// This function requires Rust 1.45 or later.
++    #[cfg(hygiene)]
++    pub fn mixed_site() -> Span {
++        Span::_new(imp::Span::mixed_site())
++    }
++
+     /// A span that resolves at the macro definition site.
+     ///
+     /// This method is semver exempt and not exposed by default.
+     #[cfg(procmacro2_semver_exempt)]
+     pub fn def_site() -> Span {
+         Span::_new(imp::Span::def_site())
+     }
+ 
+     /// Creates a new span with the same line/column information as `self` but
+     /// that resolves symbols as though it were at `other`.
+-    ///
+-    /// This method is semver exempt and not exposed by default.
+-    #[cfg(procmacro2_semver_exempt)]
+     pub fn resolved_at(&self, other: Span) -> Span {
+         Span::_new(self.inner.resolved_at(other.inner))
+     }
+ 
+     /// Creates a new span with the same name resolution behavior as `self` but
+     /// with the line/column information of `other`.
+-    ///
+-    /// This method is semver exempt and not exposed by default.
+-    #[cfg(procmacro2_semver_exempt)]
+     pub fn located_at(&self, other: Span) -> Span {
+         Span::_new(self.inner.located_at(other.inner))
+     }
+ 
+     /// Convert `proc_macro2::Span` to `proc_macro::Span`.
+     ///
+     /// This method is available when building with a nightly compiler, or when
+     /// building with rustc 1.29+ *without* semver exempt features.
+@@ -434,19 +461,19 @@ impl Span {
+     /// This method is semver exempt and not exposed by default.
+     #[cfg(procmacro2_semver_exempt)]
+     pub fn eq(&self, other: &Span) -> bool {
+         self.inner.eq(&other.inner)
+     }
+ }
+ 
+ /// Prints a span in a form convenient for debugging.
+-impl fmt::Debug for Span {
++impl Debug for Span {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.inner.fmt(f)
++        Debug::fmt(&self.inner, f)
+     }
+ }
+ 
+ /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
+ #[derive(Clone)]
+ pub enum TokenTree {
+     /// A token stream surrounded by bracket delimiters.
+     Group(Group),
+@@ -457,35 +484,35 @@ pub enum TokenTree {
+     /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+     Literal(Literal),
+ }
+ 
+ impl TokenTree {
+     /// Returns the span of this tree, delegating to the `span` method of
+     /// the contained token or a delimited stream.
+     pub fn span(&self) -> Span {
+-        match *self {
+-            TokenTree::Group(ref t) => t.span(),
+-            TokenTree::Ident(ref t) => t.span(),
+-            TokenTree::Punct(ref t) => t.span(),
+-            TokenTree::Literal(ref t) => t.span(),
++        match self {
++            TokenTree::Group(t) => t.span(),
++            TokenTree::Ident(t) => t.span(),
++            TokenTree::Punct(t) => t.span(),
++            TokenTree::Literal(t) => t.span(),
+         }
+     }
+ 
+     /// Configures the span for *only this token*.
+     ///
+     /// Note that if this token is a `Group` then this method will not configure
+     /// the span of each of the internal tokens, this will simply delegate to
+     /// the `set_span` method of each variant.
+     pub fn set_span(&mut self, span: Span) {
+-        match *self {
+-            TokenTree::Group(ref mut t) => t.set_span(span),
+-            TokenTree::Ident(ref mut t) => t.set_span(span),
+-            TokenTree::Punct(ref mut t) => t.set_span(span),
+-            TokenTree::Literal(ref mut t) => t.set_span(span),
++        match self {
++            TokenTree::Group(t) => t.set_span(span),
++            TokenTree::Ident(t) => t.set_span(span),
++            TokenTree::Punct(t) => t.set_span(span),
++            TokenTree::Literal(t) => t.set_span(span),
+         }
+     }
+ }
+ 
+ impl From<Group> for TokenTree {
+     fn from(g: Group) -> TokenTree {
+         TokenTree::Group(g)
+     }
+@@ -508,42 +535,42 @@ impl From<Literal> for TokenTree {
+         TokenTree::Literal(g)
+     }
+ }
+ 
+ /// Prints the token tree as a string that is supposed to be losslessly
+ /// convertible back into the same token tree (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenTree {
++impl Display for TokenTree {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        match *self {
+-            TokenTree::Group(ref t) => t.fmt(f),
+-            TokenTree::Ident(ref t) => t.fmt(f),
+-            TokenTree::Punct(ref t) => t.fmt(f),
+-            TokenTree::Literal(ref t) => t.fmt(f),
++        match self {
++            TokenTree::Group(t) => Display::fmt(t, f),
++            TokenTree::Ident(t) => Display::fmt(t, f),
++            TokenTree::Punct(t) => Display::fmt(t, f),
++            TokenTree::Literal(t) => Display::fmt(t, f),
+         }
+     }
+ }
+ 
+ /// Prints token tree in a form convenient for debugging.
+-impl fmt::Debug for TokenTree {
++impl Debug for TokenTree {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         // Each of these has the name in the struct type in the derived debug,
+         // so don't bother with an extra layer of indirection
+-        match *self {
+-            TokenTree::Group(ref t) => t.fmt(f),
+-            TokenTree::Ident(ref t) => {
++        match self {
++            TokenTree::Group(t) => Debug::fmt(t, f),
++            TokenTree::Ident(t) => {
+                 let mut debug = f.debug_struct("Ident");
+                 debug.field("sym", &format_args!("{}", t));
+                 imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
+                 debug.finish()
+             }
+-            TokenTree::Punct(ref t) => t.fmt(f),
+-            TokenTree::Literal(ref t) => t.fmt(f),
++            TokenTree::Punct(t) => Debug::fmt(t, f),
++            TokenTree::Literal(t) => Debug::fmt(t, f),
+         }
+     }
+ }
+ 
+ /// A delimited token stream.
+ ///
+ /// A `Group` internally contains a `TokenStream` which is surrounded by
+ /// `Delimiter`s.
+@@ -646,25 +673,25 @@ impl Group {
+     pub fn set_span(&mut self, span: Span) {
+         self.inner.set_span(span.inner)
+     }
+ }
+ 
+ /// Prints the group as a string that should be losslessly convertible back
+ /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+ /// with `Delimiter::None` delimiters.
+-impl fmt::Display for Group {
++impl Display for Group {
+     fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+-        fmt::Display::fmt(&self.inner, formatter)
++        Display::fmt(&self.inner, formatter)
+     }
+ }
+ 
+-impl fmt::Debug for Group {
++impl Debug for Group {
+     fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+-        fmt::Debug::fmt(&self.inner, formatter)
++        Debug::fmt(&self.inner, formatter)
+     }
+ }
+ 
+ /// An `Punct` is an single punctuation character like `+`, `-` or `#`.
+ ///
+ /// Multicharacter operators like `+=` are represented as two instances of
+ /// `Punct` with different forms of `Spacing` returned.
+ #[derive(Clone)]
+@@ -725,23 +752,23 @@ impl Punct {
+     /// Configure the span for this punctuation character.
+     pub fn set_span(&mut self, span: Span) {
+         self.span = span;
+     }
+ }
+ 
+ /// Prints the punctuation character as a string that should be losslessly
+ /// convertible back into the same character.
+-impl fmt::Display for Punct {
++impl Display for Punct {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.op.fmt(f)
++        Display::fmt(&self.op, f)
+     }
+ }
+ 
+-impl fmt::Debug for Punct {
++impl Debug for Punct {
+     fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+         let mut debug = fmt.debug_struct("Punct");
+         debug.field("op", &self.op);
+         debug.field("spacing", &self.spacing);
+         imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
+         debug.finish()
+     }
+ }
+@@ -915,25 +942,25 @@ impl Ord for Ident {
+ impl Hash for Ident {
+     fn hash<H: Hasher>(&self, hasher: &mut H) {
+         self.to_string().hash(hasher)
+     }
+ }
+ 
+ /// Prints the identifier as a string that should be losslessly convertible back
+ /// into the same identifier.
+-impl fmt::Display for Ident {
++impl Display for Ident {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.inner.fmt(f)
++        Display::fmt(&self.inner, f)
+     }
+ }
+ 
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.inner.fmt(f)
++        Debug::fmt(&self.inner, f)
+     }
+ }
+ 
+ /// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
+ /// byte character (`b'a'`), an integer or floating point number with or without
+ /// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+ ///
+ /// Boolean literals like `true` and `false` do not belong here, they are
+@@ -1135,36 +1162,36 @@ impl Literal {
+     /// nightly compiler, this method will always return `None`.
+     ///
+     /// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
+     pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+         self.inner.subspan(range).map(Span::_new)
+     }
+ }
+ 
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.inner.fmt(f)
++        Debug::fmt(&self.inner, f)
+     }
+ }
+ 
+-impl fmt::Display for Literal {
++impl Display for Literal {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-        self.inner.fmt(f)
++        Display::fmt(&self.inner, f)
+     }
+ }
+ 
+ /// Public implementation details for the `TokenStream` type, such as iterators.
+ pub mod token_stream {
+-    use std::fmt;
++    use crate::{imp, TokenTree};
++    use std::fmt::{self, Debug};
+     use std::marker;
+     use std::rc::Rc;
+ 
+     pub use crate::TokenStream;
+-    use crate::{imp, TokenTree};
+ 
+     /// An iterator over `TokenStream`'s `TokenTree`s.
+     ///
+     /// The iteration is "shallow", e.g. the iterator doesn't recurse into
+     /// delimited groups, and returns whole groups as token trees.
+     #[derive(Clone)]
+     pub struct IntoIter {
+         inner: imp::TokenTreeIter,
+@@ -1174,19 +1201,19 @@ pub mod token_stream {
+     impl Iterator for IntoIter {
+         type Item = TokenTree;
+ 
+         fn next(&mut self) -> Option<TokenTree> {
+             self.inner.next()
+         }
+     }
+ 
+-    impl fmt::Debug for IntoIter {
++    impl Debug for IntoIter {
+         fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+-            self.inner.fmt(f)
++            Debug::fmt(&self.inner, f)
+         }
+     }
+ 
+     impl IntoIterator for TokenStream {
+         type Item = TokenTree;
+         type IntoIter = IntoIter;
+ 
+         fn into_iter(self) -> IntoIter {
+diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
+new file mode 100644
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/parse.rs
+@@ -0,0 +1,791 @@
++use crate::fallback::{
++    is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
++};
++use crate::{Delimiter, Punct, Spacing, TokenTree};
++use std::str::{Bytes, CharIndices, Chars};
++use unicode_xid::UnicodeXID;
++
++#[derive(Copy, Clone, Eq, PartialEq)]
++pub(crate) struct Cursor<'a> {
++    pub rest: &'a str,
++    #[cfg(span_locations)]
++    pub off: u32,
++}
++
++impl<'a> Cursor<'a> {
++    fn advance(&self, bytes: usize) -> Cursor<'a> {
++        let (_front, rest) = self.rest.split_at(bytes);
++        Cursor {
++            rest,
++            #[cfg(span_locations)]
++            off: self.off + _front.chars().count() as u32,
++        }
++    }
++
++    fn starts_with(&self, s: &str) -> bool {
++        self.rest.starts_with(s)
++    }
++
++    pub(crate) fn is_empty(&self) -> bool {
++        self.rest.is_empty()
++    }
++
++    fn len(&self) -> usize {
++        self.rest.len()
++    }
++
++    fn as_bytes(&self) -> &'a [u8] {
++        self.rest.as_bytes()
++    }
++
++    fn bytes(&self) -> Bytes<'a> {
++        self.rest.bytes()
++    }
++
++    fn chars(&self) -> Chars<'a> {
++        self.rest.chars()
++    }
++
++    fn char_indices(&self) -> CharIndices<'a> {
++        self.rest.char_indices()
++    }
++
++    fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
++        if self.starts_with(tag) {
++            Ok(self.advance(tag.len()))
++        } else {
++            Err(LexError)
++        }
++    }
++}
++
++type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
++
++fn skip_whitespace(input: Cursor) -> Cursor {
++    let mut s = input;
++
++    while !s.is_empty() {
++        let byte = s.as_bytes()[0];
++        if byte == b'/' {
++            if s.starts_with("//")
++                && (!s.starts_with("///") || s.starts_with("////"))
++                && !s.starts_with("//!")
++            {
++                let (cursor, _) = take_until_newline_or_eof(s);
++                s = cursor;
++                continue;
++            } else if s.starts_with("/**/") {
++                s = s.advance(4);
++                continue;
++            } else if s.starts_with("/*")
++                && (!s.starts_with("/**") || s.starts_with("/***"))
++                && !s.starts_with("/*!")
++            {
++                match block_comment(s) {
++                    Ok((rest, _)) => {
++                        s = rest;
++                        continue;
++                    }
++                    Err(LexError) => return s,
++                }
++            }
++        }
++        match byte {
++            b' ' | 0x09..=0x0d => {
++                s = s.advance(1);
++                continue;
++            }
++            b if b <= 0x7f => {}
++            _ => {
++                let ch = s.chars().next().unwrap();
++                if is_whitespace(ch) {
++                    s = s.advance(ch.len_utf8());
++                    continue;
++                }
++            }
++        }
++        return s;
++    }
++    s
++}
++
++fn block_comment(input: Cursor) -> PResult<&str> {
++    if !input.starts_with("/*") {
++        return Err(LexError);
++    }
++
++    let mut depth = 0;
++    let bytes = input.as_bytes();
++    let mut i = 0;
++    let upper = bytes.len() - 1;
++
++    while i < upper {
++        if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++            depth += 1;
++            i += 1; // eat '*'
++        } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++            depth -= 1;
++            if depth == 0 {
++                return Ok((input.advance(i + 2), &input.rest[..i + 2]));
++            }
++            i += 1; // eat '/'
++        }
++        i += 1;
++    }
++
++    Err(LexError)
++}
++
++fn is_whitespace(ch: char) -> bool {
++    // Rust treats left-to-right mark and right-to-left mark as whitespace
++    ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
++
++fn word_break(input: Cursor) -> Result<Cursor, LexError> {
++    match input.chars().next() {
++        Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
++        Some(_) | None => Ok(input),
++    }
++}
++
++pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
++    let mut trees = Vec::new();
++    let mut stack = Vec::new();
++
++    loop {
++        input = skip_whitespace(input);
++
++        if let Ok((rest, tt)) = doc_comment(input) {
++            trees.extend(tt);
++            input = rest;
++            continue;
++        }
++
++        #[cfg(span_locations)]
++        let lo = input.off;
++
++        let first = match input.bytes().next() {
++            Some(first) => first,
++            None => break,
++        };
++
++        if let Some(open_delimiter) = match first {
++            b'(' => Some(Delimiter::Parenthesis),
++            b'[' => Some(Delimiter::Bracket),
++            b'{' => Some(Delimiter::Brace),
++            _ => None,
++        } {
++            input = input.advance(1);
++            let frame = (open_delimiter, trees);
++            #[cfg(span_locations)]
++            let frame = (lo, frame);
++            stack.push(frame);
++            trees = Vec::new();
++        } else if let Some(close_delimiter) = match first {
++            b')' => Some(Delimiter::Parenthesis),
++            b']' => Some(Delimiter::Bracket),
++            b'}' => Some(Delimiter::Brace),
++            _ => None,
++        } {
++            input = input.advance(1);
++            let frame = stack.pop().ok_or(LexError)?;
++            #[cfg(span_locations)]
++            let (lo, frame) = frame;
++            let (open_delimiter, outer) = frame;
++            if open_delimiter != close_delimiter {
++                return Err(LexError);
++            }
++            let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
++            g.set_span(Span {
++                #[cfg(span_locations)]
++                lo,
++                #[cfg(span_locations)]
++                hi: input.off,
++            });
++            trees = outer;
++            trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
++        } else {
++            let (rest, mut tt) = leaf_token(input)?;
++            tt.set_span(crate::Span::_new_stable(Span {
++                #[cfg(span_locations)]
++                lo,
++                #[cfg(span_locations)]
++                hi: rest.off,
++            }));
++            trees.push(tt);
++            input = rest;
++        }
++    }
++
++    if stack.is_empty() {
++        Ok((input, TokenStream { inner: trees }))
++    } else {
++        Err(LexError)
++    }
++}
++
++fn leaf_token(input: Cursor) -> PResult<TokenTree> {
++    if let Ok((input, l)) = literal(input) {
++        // must be parsed before ident
++        Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
++    } else if let Ok((input, p)) = op(input) {
++        Ok((input, TokenTree::Punct(p)))
++    } else if let Ok((input, i)) = ident(input) {
++        Ok((input, TokenTree::Ident(i)))
++    } else {
++        Err(LexError)
++    }
++}
++
++fn ident(input: Cursor) -> PResult<crate::Ident> {
++    let raw = input.starts_with("r#");
++    let rest = input.advance((raw as usize) << 1);
++
++    let (rest, sym) = ident_not_raw(rest)?;
++
++    if !raw {
++        let ident = crate::Ident::new(sym, crate::Span::call_site());
++        return Ok((rest, ident));
++    }
++
++    if sym == "_" {
++        return Err(LexError);
++    }
++
++    let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
++    Ok((rest, ident))
++}
++
++fn ident_not_raw(input: Cursor) -> PResult<&str> {
++    let mut chars = input.char_indices();
++
++    match chars.next() {
++        Some((_, ch)) if is_ident_start(ch) => {}
++        _ => return Err(LexError),
++    }
++
++    let mut end = input.len();
++    for (i, ch) in chars {
++        if !is_ident_continue(ch) {
++            end = i;
++            break;
++        }
++    }
++
++    Ok((input.advance(end), &input.rest[..end]))
++}
++
++fn literal(input: Cursor) -> PResult<Literal> {
++    match literal_nocapture(input) {
++        Ok(a) => {
++            let end = input.len() - a.len();
++            Ok((a, Literal::_new(input.rest[..end].to_string())))
++        }
++        Err(LexError) => Err(LexError),
++    }
++}
++
++fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
++    if let Ok(ok) = string(input) {
++        Ok(ok)
++    } else if let Ok(ok) = byte_string(input) {
++        Ok(ok)
++    } else if let Ok(ok) = byte(input) {
++        Ok(ok)
++    } else if let Ok(ok) = character(input) {
++        Ok(ok)
++    } else if let Ok(ok) = float(input) {
++        Ok(ok)
++    } else if let Ok(ok) = int(input) {
++        Ok(ok)
++    } else {
++        Err(LexError)
++    }
++}
++
++fn literal_suffix(input: Cursor) -> Cursor {
++    match ident_not_raw(input) {
++        Ok((input, _)) => input,
++        Err(LexError) => input,
++    }
++}
++
++fn string(input: Cursor) -> Result<Cursor, LexError> {
++    if let Ok(input) = input.parse("\"") {
++        cooked_string(input)
++    } else if let Ok(input) = input.parse("r") {
++        raw_string(input)
++    } else {
++        Err(LexError)
++    }
++}
++
++fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
++    let mut chars = input.char_indices().peekable();
++
++    while let Some((i, ch)) = chars.next() {
++        match ch {
++            '"' => {
++                let input = input.advance(i + 1);
++                return Ok(literal_suffix(input));
++            }
++            '\r' => {
++                if let Some((_, '\n')) = chars.next() {
++                    // ...
++                } else {
++                    break;
++                }
++            }
++            '\\' => match chars.next() {
++                Some((_, 'x')) => {
++                    if !backslash_x_char(&mut chars) {
++                        break;
++                    }
++                }
++                Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
++                | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
++                Some((_, 'u')) => {
++                    if !backslash_u(&mut chars) {
++                        break;
++                    }
++                }
++                Some((_, '\n')) | Some((_, '\r')) => {
++                    while let Some(&(_, ch)) = chars.peek() {
++                        if ch.is_whitespace() {
++                            chars.next();
++                        } else {
++                            break;
++                        }
++                    }
++                }
++                _ => break,
++            },
++            _ch => {}
++        }
++    }
++    Err(LexError)
++}
++
++fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
++    if let Ok(input) = input.parse("b\"") {
++        cooked_byte_string(input)
++    } else if let Ok(input) = input.parse("br") {
++        raw_string(input)
++    } else {
++        Err(LexError)
++    }
++}
++
++fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
++    let mut bytes = input.bytes().enumerate();
++    'outer: while let Some((offset, b)) = bytes.next() {
++        match b {
++            b'"' => {
++                let input = input.advance(offset + 1);
++                return Ok(literal_suffix(input));
++            }
++            b'\r' => {
++                if let Some((_, b'\n')) = bytes.next() {
++                    // ...
++                } else {
++                    break;
++                }
++            }
++            b'\\' => match bytes.next() {
++                Some((_, b'x')) => {
++                    if !backslash_x_byte(&mut bytes) {
++                        break;
++                    }
++                }
++                Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
++                | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
++                Some((newline, b'\n')) | Some((newline, b'\r')) => {
++                    let rest = input.advance(newline + 1);
++                    for (offset, ch) in rest.char_indices() {
++                        if !ch.is_whitespace() {
++                            input = rest.advance(offset);
++                            bytes = input.bytes().enumerate();
++                            continue 'outer;
++                        }
++                    }
++                    break;
++                }
++                _ => break,
++            },
++            b if b < 0x80 => {}
++            _ => break,
++        }
++    }
++    Err(LexError)
++}
++
++fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
++    let mut chars = input.char_indices();
++    let mut n = 0;
++    while let Some((i, ch)) = chars.next() {
++        match ch {
++            '"' => {
++                n = i;
++                break;
++            }
++            '#' => {}
++            _ => return Err(LexError),
++        }
++    }
++    for (i, ch) in chars {
++        match ch {
++            '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
++                let rest = input.advance(i + 1 + n);
++                return Ok(literal_suffix(rest));
++            }
++            '\r' => {}
++            _ => {}
++        }
++    }
++    Err(LexError)
++}
++
++fn byte(input: Cursor) -> Result<Cursor, LexError> {
++    let input = input.parse("b'")?;
++    let mut bytes = input.bytes().enumerate();
++    let ok = match bytes.next().map(|(_, b)| b) {
++        Some(b'\\') => match bytes.next().map(|(_, b)| b) {
++            Some(b'x') => backslash_x_byte(&mut bytes),
++            Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
++            | Some(b'"') => true,
++            _ => false,
++        },
++        b => b.is_some(),
++    };
++    if !ok {
++        return Err(LexError);
++    }
++    let (offset, _) = bytes.next().ok_or(LexError)?;
++    if !input.chars().as_str().is_char_boundary(offset) {
++        return Err(LexError);
++    }
++    let input = input.advance(offset).parse("'")?;
++    Ok(literal_suffix(input))
++}
++
++fn character(input: Cursor) -> Result<Cursor, LexError> {
++    let input = input.parse("'")?;
++    let mut chars = input.char_indices();
++    let ok = match chars.next().map(|(_, ch)| ch) {
++        Some('\\') => match chars.next().map(|(_, ch)| ch) {
++            Some('x') => backslash_x_char(&mut chars),
++            Some('u') => backslash_u(&mut chars),
++            Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
++                true
++            }
++            _ => false,
++        },
++        ch => ch.is_some(),
++    };
++    if !ok {
++        return Err(LexError);
++    }
++    let (idx, _) = chars.next().ok_or(LexError)?;
++    let input = input.advance(idx).parse("'")?;
++    Ok(literal_suffix(input))
++}
++
++macro_rules! next_ch {
++    ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
++        match $chars.next() {
++            Some((_, ch)) => match ch {
++                $pat $(| $rest)* => ch,
++                _ => return false,
++            },
++            None => return false,
++        }
++    };
++}
++
++fn backslash_x_char<I>(chars: &mut I) -> bool
++where
++    I: Iterator<Item = (usize, char)>,
++{
++    next_ch!(chars @ '0'..='7');
++    next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++    true
++}
++
++fn backslash_x_byte<I>(chars: &mut I) -> bool
++where
++    I: Iterator<Item = (usize, u8)>,
++{
++    next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++    next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++    true
++}
++
++fn backslash_u<I>(chars: &mut I) -> bool
++where
++    I: Iterator<Item = (usize, char)>,
++{
++    next_ch!(chars @ '{');
++    next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++    loop {
++        let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
++        if c == '}' {
++            return true;
++        }
++    }
++}
++
++fn float(input: Cursor) -> Result<Cursor, LexError> {
++    let mut rest = float_digits(input)?;
++    if let Some(ch) = rest.chars().next() {
++        if is_ident_start(ch) {
++            rest = ident_not_raw(rest)?.0;
++        }
++    }
++    word_break(rest)
++}
++
++fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
++    let mut chars = input.chars().peekable();
++    match chars.next() {
++        Some(ch) if ch >= '0' && ch <= '9' => {}
++        _ => return Err(LexError),
++    }
++
++    let mut len = 1;
++    let mut has_dot = false;
++    let mut has_exp = false;
++    while let Some(&ch) = chars.peek() {
++        match ch {
++            '0'..='9' | '_' => {
++                chars.next();
++                len += 1;
++            }
++            '.' => {
++                if has_dot {
++                    break;
++                }
++                chars.next();
++                if chars
++                    .peek()
++                    .map(|&ch| ch == '.' || is_ident_start(ch))
++                    .unwrap_or(false)
++                {
++                    return Err(LexError);
++                }
++                len += 1;
++                has_dot = true;
++            }
++            'e' | 'E' => {
++                chars.next();
++                len += 1;
++                has_exp = true;
++                break;
++            }
++            _ => break,
++        }
++    }
++
++    let rest = input.advance(len);
++    if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
++        return Err(LexError);
++    }
++
++    if has_exp {
++        let mut has_exp_value = false;
++        while let Some(&ch) = chars.peek() {
++            match ch {
++                '+' | '-' => {
++                    if has_exp_value {
++                        break;
++                    }
++                    chars.next();
++                    len += 1;
++                }
++                '0'..='9' => {
++                    chars.next();
++                    len += 1;
++                    has_exp_value = true;
++                }
++                '_' => {
++                    chars.next();
++                    len += 1;
++                }
++                _ => break,
++            }
++        }
++        if !has_exp_value {
++            return Err(LexError);
++        }
++    }
++
++    Ok(input.advance(len))
++}
++
++fn int(input: Cursor) -> Result<Cursor, LexError> {
++    let mut rest = digits(input)?;
++    if let Some(ch) = rest.chars().next() {
++        if is_ident_start(ch) {
++            rest = ident_not_raw(rest)?.0;
++        }
++    }
++    word_break(rest)
++}
++
++fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
++    let base = if input.starts_with("0x") {
++        input = input.advance(2);
++        16
++    } else if input.starts_with("0o") {
++        input = input.advance(2);
++        8
++    } else if input.starts_with("0b") {
++        input = input.advance(2);
++        2
++    } else {
++        10
++    };
++
++    let mut len = 0;
++    let mut empty = true;
++    for b in input.bytes() {
++        let digit = match b {
++            b'0'..=b'9' => (b - b'0') as u64,
++            b'a'..=b'f' => 10 + (b - b'a') as u64,
++            b'A'..=b'F' => 10 + (b - b'A') as u64,
++            b'_' => {
++                if empty && base == 10 {
++                    return Err(LexError);
++                }
++                len += 1;
++                continue;
++            }
++            _ => break,
++        };
++        if digit >= base {
++            return Err(LexError);
++        }
++        len += 1;
++        empty = false;
++    }
++    if empty {
++        Err(LexError)
++    } else {
++        Ok(input.advance(len))
++    }
++}
++
++fn op(input: Cursor) -> PResult<Punct> {
++    match op_char(input) {
++        Ok((rest, '\'')) => {
++            ident(rest)?;
++            Ok((rest, Punct::new('\'', Spacing::Joint)))
++        }
++        Ok((rest, ch)) => {
++            let kind = match op_char(rest) {
++                Ok(_) => Spacing::Joint,
++                Err(LexError) => Spacing::Alone,
++            };
++            Ok((rest, Punct::new(ch, kind)))
++        }
++        Err(LexError) => Err(LexError),
++    }
++}
++
++fn op_char(input: Cursor) -> PResult<char> {
++    if input.starts_with("//") || input.starts_with("/*") {
++        // Do not accept `/` of a comment as an op.
++        return Err(LexError);
++    }
++
++    let mut chars = input.chars();
++    let first = match chars.next() {
++        Some(ch) => ch,
++        None => {
++            return Err(LexError);
++        }
++    };
++    let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
++    if recognized.contains(first) {
++        Ok((input.advance(first.len_utf8()), first))
++    } else {
++        Err(LexError)
++    }
++}
++
++fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
++    #[cfg(span_locations)]
++    let lo = input.off;
++    let (rest, (comment, inner)) = doc_comment_contents(input)?;
++    let span = crate::Span::_new_stable(Span {
++        #[cfg(span_locations)]
++        lo,
++        #[cfg(span_locations)]
++        hi: rest.off,
++    });
++
++    let mut scan_for_bare_cr = comment;
++    while let Some(cr) = scan_for_bare_cr.find('\r') {
++        let rest = &scan_for_bare_cr[cr + 1..];
++        if !rest.starts_with('\n') {
++            return Err(LexError);
++        }
++        scan_for_bare_cr = rest;
++    }
++
++    let mut trees = Vec::new();
++    trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
++    if inner {
++        trees.push(Punct::new('!', Spacing::Alone).into());
++    }
++    let mut stream = vec![
++        TokenTree::Ident(crate::Ident::new("doc", span)),
++        TokenTree::Punct(Punct::new('=', Spacing::Alone)),
++        TokenTree::Literal(crate::Literal::string(comment)),
++    ];
++    for tt in stream.iter_mut() {
++        tt.set_span(span);
++    }
++    let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
++    trees.push(crate::Group::_new_stable(group).into());
++    for tt in trees.iter_mut() {
++        tt.set_span(span);
++    }
++    Ok((rest, trees))
++}
++
++fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
++    if input.starts_with("//!") {
++        let input = input.advance(3);
++        let (input, s) = take_until_newline_or_eof(input);
++        Ok((input, (s, true)))
++    } else if input.starts_with("/*!") {
++        let (input, s) = block_comment(input)?;
++        Ok((input, (&s[3..s.len() - 2], true)))
++    } else if input.starts_with("///") {
++        let input = input.advance(3);
++        if input.starts_with("/") {
++            return Err(LexError);
++        }
++        let (input, s) = take_until_newline_or_eof(input);
++        Ok((input, (s, false)))
++    } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
++        let (input, s) = block_comment(input)?;
++        Ok((input, (&s[3..s.len() - 2], false)))
++    } else {
++        Err(LexError)
++    }
++}
++
++fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
++    let chars = input.char_indices();
++
++    for (i, ch) in chars {
++        if ch == '\n' {
++            return (input.advance(i), &input.rest[..i]);
++        } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
++            return (input.advance(i + 1), &input.rest[..i]);
++        }
++    }
++
++    (input.advance(input.len()), input.rest)
++}
+diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
+deleted file mode 100644
+--- a/third_party/rust/proc-macro2/src/strnom.rs
++++ /dev/null
+@@ -1,391 +0,0 @@
+-//! Adapted from [`nom`](https://github.com/Geal/nom).
+-
+-use crate::fallback::LexError;
+-use std::str::{Bytes, CharIndices, Chars};
+-use unicode_xid::UnicodeXID;
+-
+-#[derive(Copy, Clone, Eq, PartialEq)]
+-pub struct Cursor<'a> {
+-    pub rest: &'a str,
+-    #[cfg(span_locations)]
+-    pub off: u32,
+-}
+-
+-impl<'a> Cursor<'a> {
+-    #[cfg(not(span_locations))]
+-    pub fn advance(&self, amt: usize) -> Cursor<'a> {
+-        Cursor {
+-            rest: &self.rest[amt..],
+-        }
+-    }
+-    #[cfg(span_locations)]
+-    pub fn advance(&self, amt: usize) -> Cursor<'a> {
+-        Cursor {
+-            rest: &self.rest[amt..],
+-            off: self.off + (amt as u32),
+-        }
+-    }
+-
+-    pub fn find(&self, p: char) -> Option<usize> {
+-        self.rest.find(p)
+-    }
+-
+-    pub fn starts_with(&self, s: &str) -> bool {
+-        self.rest.starts_with(s)
+-    }
+-
+-    pub fn is_empty(&self) -> bool {
+-        self.rest.is_empty()
+-    }
+-
+-    pub fn len(&self) -> usize {
+-        self.rest.len()
+-    }
+-
+-    pub fn as_bytes(&self) -> &'a [u8] {
+-        self.rest.as_bytes()
+-    }
+-
+-    pub fn bytes(&self) -> Bytes<'a> {
+-        self.rest.bytes()
+-    }
+-
+-    pub fn chars(&self) -> Chars<'a> {
+-        self.rest.chars()
+-    }
+-
+-    pub fn char_indices(&self) -> CharIndices<'a> {
+-        self.rest.char_indices()
+-    }
+-}
+-
+-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
+-
+-pub fn whitespace(input: Cursor) -> PResult<()> {
+-    if input.is_empty() {
+-        return Err(LexError);
+-    }
+-
+-    let bytes = input.as_bytes();
+-    let mut i = 0;
+-    while i < bytes.len() {
+-        let s = input.advance(i);
+-        if bytes[i] == b'/' {
+-            if s.starts_with("//")
+-                && (!s.starts_with("///") || s.starts_with("////"))
+-                && !s.starts_with("//!")
+-            {
+-                if let Some(len) = s.find('\n') {
+-                    i += len + 1;
+-                    continue;
+-                }
+-                break;
+-            } else if s.starts_with("/**/") {
+-                i += 4;
+-                continue;
+-            } else if s.starts_with("/*")
+-                && (!s.starts_with("/**") || s.starts_with("/***"))
+-                && !s.starts_with("/*!")
+-            {
+-                let (_, com) = block_comment(s)?;
+-                i += com.len();
+-                continue;
+-            }
+-        }
+-        match bytes[i] {
+-            b' ' | 0x09..=0x0d => {
+-                i += 1;
+-                continue;
+-            }
+-            b if b <= 0x7f => {}
+-            _ => {
+-                let ch = s.chars().next().unwrap();
+-                if is_whitespace(ch) {
+-                    i += ch.len_utf8();
+-                    continue;
+-                }
+-            }
+-        }
+-        return if i > 0 { Ok((s, ())) } else { Err(LexError) };
+-    }
+-    Ok((input.advance(input.len()), ()))
+-}
+-
+-pub fn block_comment(input: Cursor) -> PResult<&str> {
+-    if !input.starts_with("/*") {
+-        return Err(LexError);
+-    }
+-
+-    let mut depth = 0;
+-    let bytes = input.as_bytes();
+-    let mut i = 0;
+-    let upper = bytes.len() - 1;
+-    while i < upper {
+-        if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+-            depth += 1;
+-            i += 1; // eat '*'
+-        } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+-            depth -= 1;
+-            if depth == 0 {
+-                return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+-            }
+-            i += 1; // eat '/'
+-        }
+-        i += 1;
+-    }
+-    Err(LexError)
+-}
+-
+-pub fn skip_whitespace(input: Cursor) -> Cursor {
+-    match whitespace(input) {
+-        Ok((rest, _)) => rest,
+-        Err(LexError) => input,
+-    }
+-}
+-
+-fn is_whitespace(ch: char) -> bool {
+-    // Rust treats left-to-right mark and right-to-left mark as whitespace
+-    ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+-}
+-
+-pub fn word_break(input: Cursor) -> PResult<()> {
+-    match input.chars().next() {
+-        Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
+-        Some(_) | None => Ok((input, ())),
+-    }
+-}
+-
+-macro_rules! named {
+-    ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
+-        fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
+-            $submac!(i, $($args)*)
+-        }
+-    };
+-}
+-
+-macro_rules! alt {
+-    ($i:expr, $e:ident | $($rest:tt)*) => {
+-        alt!($i, call!($e) | $($rest)*)
+-    };
+-
+-    ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
+-        match $subrule!($i, $($args)*) {
+-            res @ Ok(_) => res,
+-            _ => alt!($i, $($rest)*)
+-        }
+-    };
+-
+-    ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
+-        match $subrule!($i, $($args)*) {
+-            Ok((i, o)) => Ok((i, $gen(o))),
+-            Err(LexError) => alt!($i, $($rest)*)
+-        }
+-    };
+-
+-    ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
+-        alt!($i, call!($e) => { $gen } | $($rest)*)
+-    };
+-
+-    ($i:expr, $e:ident => { $gen:expr }) => {
+-        alt!($i, call!($e) => { $gen })
+-    };
+-
+-    ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
+-        match $subrule!($i, $($args)*) {
+-            Ok((i, o)) => Ok((i, $gen(o))),
+-            Err(LexError) => Err(LexError),
+-        }
+-    };
+-
+-    ($i:expr, $e:ident) => {
+-        alt!($i, call!($e))
+-    };
+-
+-    ($i:expr, $subrule:ident!( $($args:tt)*)) => {
+-        $subrule!($i, $($args)*)
+-    };
+-}
+-
+-macro_rules! do_parse {
+-    ($i:expr, ( $($rest:expr),* )) => {
+-        Ok(($i, ( $($rest),* )))
+-    };
+-
+-    ($i:expr, $e:ident >> $($rest:tt)*) => {
+-        do_parse!($i, call!($e) >> $($rest)*)
+-    };
+-
+-    ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+-        match $submac!($i, $($args)*) {
+-            Err(LexError) => Err(LexError),
+-            Ok((i, _)) => do_parse!(i, $($rest)*),
+-        }
+-    };
+-
+-    ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
+-        do_parse!($i, $field: call!($e) >> $($rest)*)
+-    };
+-
+-    ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+-        match $submac!($i, $($args)*) {
+-            Err(LexError) => Err(LexError),
+-            Ok((i, o)) => {
+-                let $field = o;
+-                do_parse!(i, $($rest)*)
+-            },
+-        }
+-    };
+-}
+-
+-macro_rules! peek {
+-    ($i:expr, $submac:ident!( $($args:tt)* )) => {
+-        match $submac!($i, $($args)*) {
+-            Ok((_, o)) => Ok(($i, o)),
+-            Err(LexError) => Err(LexError),
+-        }
+-    };
+-}
+-
+-macro_rules! call {
+-    ($i:expr, $fun:expr $(, $args:expr)*) => {
+-        $fun($i $(, $args)*)
+-    };
+-}
+-
+-macro_rules! option {
+-    ($i:expr, $f:expr) => {
+-        match $f($i) {
+-            Ok((i, o)) => Ok((i, Some(o))),
+-            Err(LexError) => Ok(($i, None)),
+-        }
+-    };
+-}
+-
+-macro_rules! take_until_newline_or_eof {
+-    ($i:expr,) => {{
+-        if $i.len() == 0 {
+-            Ok(($i, ""))
+-        } else {
+-            match $i.find('\n') {
+-                Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
+-                None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
+-            }
+-        }
+-    }};
+-}
+-
+-macro_rules! tuple {
+-    ($i:expr, $($rest:tt)*) => {
+-        tuple_parser!($i, (), $($rest)*)
+-    };
+-}
+-
+-/// Do not use directly. Use `tuple!`.
+-macro_rules! tuple_parser {
+-    ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
+-        tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
+-    };
+-
+-    ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+-        match $submac!($i, $($args)*) {
+-            Err(LexError) => Err(LexError),
+-            Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
+-        }
+-    };
+-
+-    ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+-        match $submac!($i, $($args)*) {
+-            Err(LexError) => Err(LexError),
+-            Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
+-        }
+-    };
+-
+-    ($i:expr, ($($parsed:tt),*), $e:ident) => {
+-        tuple_parser!($i, ($($parsed),*), call!($e))
+-    };
+-
+-    ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
+-        $submac!($i, $($args)*)
+-    };
+-
+-    ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
+-        match $submac!($i, $($args)*) {
+-            Err(LexError) => Err(LexError),
+-            Ok((i, o)) => Ok((i, ($($parsed),*, o)))
+-        }
+-    };
+-
+-    ($i:expr, ($($parsed:expr),*)) => {
+-        Ok(($i, ($($parsed),*)))
+-    };
+-}
+-
+-macro_rules! not {
+-    ($i:expr, $submac:ident!( $($args:tt)* )) => {
+-        match $submac!($i, $($args)*) {
+-            Ok((_, _)) => Err(LexError),
+-            Err(LexError) => Ok(($i, ())),
+-        }
+-    };
+-}
+-
+-macro_rules! tag {
+-    ($i:expr, $tag:expr) => {
+-        if $i.starts_with($tag) {
+-            Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
+-        } else {
+-            Err(LexError)
+-        }
+-    };
+-}
+-
+-macro_rules! punct {
+-    ($i:expr, $punct:expr) => {
+-        $crate::strnom::punct($i, $punct)
+-    };
+-}
+-
+-/// Do not use directly. Use `punct!`.
+-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
+-    let input = skip_whitespace(input);
+-    if input.starts_with(token) {
+-        Ok((input.advance(token.len()), token))
+-    } else {
+-        Err(LexError)
+-    }
+-}
+-
+-macro_rules! preceded {
+-    ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
+-        match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
+-            Ok((remaining, (_, o))) => Ok((remaining, o)),
+-            Err(LexError) => Err(LexError),
+-        }
+-    };
+-
+-    ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+-        preceded!($i, $submac!($($args)*), call!($g))
+-    };
+-}
+-
+-macro_rules! delimited {
+-    ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
+-        match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
+-            Err(LexError) => Err(LexError),
+-            Ok((i1, (_, o, _))) => Ok((i1, o))
+-        }
+-    };
+-}
+-
+-macro_rules! map {
+-    ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+-        match $submac!($i, $($args)*) {
+-            Err(LexError) => Err(LexError),
+-            Ok((i, o)) => Ok((i, call!(o, $g)))
+-        }
+-    };
+-
+-    ($i:expr, $f:expr, $g:expr) => {
+-        map!($i, call!($f), $g)
+-    };
+-}
+diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
+--- a/third_party/rust/proc-macro2/src/wrapper.rs
++++ b/third_party/rust/proc-macro2/src/wrapper.rs
+@@ -1,96 +1,39 @@
+-use std::fmt;
+-use std::iter;
++use crate::detection::inside_proc_macro;
++use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
+ use std::ops::RangeBounds;
+-use std::panic::{self, PanicInfo};
++use std::panic;
+ #[cfg(super_unstable)]
+ use std::path::PathBuf;
+ use std::str::FromStr;
+ 
+-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+-
+ #[derive(Clone)]
+-pub enum TokenStream {
++pub(crate) enum TokenStream {
+     Compiler(DeferredTokenStream),
+     Fallback(fallback::TokenStream),
+ }
+ 
+ // Work around https://github.com/rust-lang/rust/issues/65080.
+ // In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
+ // we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+ // late as possible to batch together consecutive uses of the Extend impl.
+ #[derive(Clone)]
+-pub struct DeferredTokenStream {
++pub(crate) struct DeferredTokenStream {
+     stream: proc_macro::TokenStream,
+     extra: Vec<proc_macro::TokenTree>,
+ }
+ 
+-pub enum LexError {
++pub(crate) enum LexError {
+     Compiler(proc_macro::LexError),
+     Fallback(fallback::LexError),
+ }
+ 
+-fn nightly_works() -> bool {
+-    use std::sync::atomic::*;
+-    use std::sync::Once;
+-
+-    static WORKS: AtomicUsize = AtomicUsize::new(0);
+-    static INIT: Once = Once::new();
+-
+-    match WORKS.load(Ordering::SeqCst) {
+-        1 => return false,
+-        2 => return true,
+-        _ => {}
+-    }
+-
+-    // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+-    // then use catch_unwind to determine whether the compiler's proc_macro is
+-    // working. When proc-macro2 is used from outside of a procedural macro all
+-    // of the proc_macro crate's APIs currently panic.
+-    //
+-    // The Once is to prevent the possibility of this ordering:
+-    //
+-    //     thread 1 calls take_hook, gets the user's original hook
+-    //     thread 1 calls set_hook with the null hook
+-    //     thread 2 calls take_hook, thinks null hook is the original hook
+-    //     thread 2 calls set_hook with the null hook
+-    //     thread 1 calls set_hook with the actual original hook
+-    //     thread 2 calls set_hook with what it thinks is the original hook
+-    //
+-    // in which the user's hook has been lost.
+-    //
+-    // There is still a race condition where a panic in a different thread can
+-    // happen during the interval that the user's original panic hook is
+-    // unregistered such that their hook is incorrectly not called. This is
+-    // sufficiently unlikely and less bad than printing panic messages to stderr
+-    // on correct use of this crate. Maybe there is a libstd feature request
+-    // here. For now, if a user needs to guarantee that this failure mode does
+-    // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+-    // the main thread before launching any other threads.
+-    INIT.call_once(|| {
+-        type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+-
+-        let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+-        let sanity_check = &*null_hook as *const PanicHook;
+-        let original_hook = panic::take_hook();
+-        panic::set_hook(null_hook);
+-
+-        let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+-        WORKS.store(works as usize + 1, Ordering::SeqCst);
+-
+-        let hopefully_null_hook = panic::take_hook();
+-        panic::set_hook(original_hook);
+-        if sanity_check != &*hopefully_null_hook {
+-            panic!("observed race condition in proc_macro2::nightly_works");
+-        }
+-    });
+-    nightly_works()
+-}
+-
+ fn mismatch() -> ! {
+     panic!("stable/nightly mismatch")
+ }
+ 
+ impl DeferredTokenStream {
+     fn new(stream: proc_macro::TokenStream) -> Self {
+         DeferredTokenStream {
+             stream,
+@@ -98,28 +41,33 @@ impl DeferredTokenStream {
+         }
+     }
+ 
+     fn is_empty(&self) -> bool {
+         self.stream.is_empty() && self.extra.is_empty()
+     }
+ 
+     fn evaluate_now(&mut self) {
+-        self.stream.extend(self.extra.drain(..));
++        // If-check provides a fast short circuit for the common case of `extra`
++        // being empty, which saves a round trip over the proc macro bridge.
++        // Improves macro expansion time in winrt by 6% in debug mode.
++        if !self.extra.is_empty() {
++            self.stream.extend(self.extra.drain(..));
++        }
+     }
+ 
+     fn into_token_stream(mut self) -> proc_macro::TokenStream {
+         self.evaluate_now();
+         self.stream
+     }
+ }
+ 
+ impl TokenStream {
+     pub fn new() -> TokenStream {
+-        if nightly_works() {
++        if inside_proc_macro() {
+             TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
+         } else {
+             TokenStream::Fallback(fallback::TokenStream::new())
+         }
+     }
+ 
+     pub fn is_empty(&self) -> bool {
+         match self {
+@@ -142,31 +90,37 @@ impl TokenStream {
+         }
+     }
+ }
+ 
+ impl FromStr for TokenStream {
+     type Err = LexError;
+ 
+     fn from_str(src: &str) -> Result<TokenStream, LexError> {
+-        if nightly_works() {
++        if inside_proc_macro() {
+             Ok(TokenStream::Compiler(DeferredTokenStream::new(
+-                src.parse()?,
++                proc_macro_parse(src)?,
+             )))
+         } else {
+             Ok(TokenStream::Fallback(src.parse()?))
+         }
+     }
+ }
+ 
+-impl fmt::Display for TokenStream {
++// Work around https://github.com/rust-lang/rust/issues/58736.
++fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
++    panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
++        .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
++}
++
++impl Display for TokenStream {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         match self {
+-            TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+-            TokenStream::Fallback(tts) => tts.fmt(f),
++            TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
++            TokenStream::Fallback(tts) => Display::fmt(tts, f),
+         }
+     }
+ }
+ 
+ impl From<proc_macro::TokenStream> for TokenStream {
+     fn from(inner: proc_macro::TokenStream) -> TokenStream {
+         TokenStream::Compiler(DeferredTokenStream::new(inner))
+     }
+@@ -182,17 +136,17 @@ impl From<TokenStream> for proc_macro::T
+ }
+ 
+ impl From<fallback::TokenStream> for TokenStream {
+     fn from(inner: fallback::TokenStream) -> TokenStream {
+         TokenStream::Fallback(inner)
+     }
+ }
+ 
+-// Assumes nightly_works().
++// Assumes inside_proc_macro().
+ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+     match token {
+         TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+         TokenTree::Punct(tt) => {
+             let spacing = match tt.spacing() {
+                 Spacing::Joint => proc_macro::Spacing::Joint,
+                 Spacing::Alone => proc_macro::Spacing::Alone,
+             };
+@@ -202,37 +156,37 @@ fn into_compiler_token(token: TokenTree)
+         }
+         TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+         TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+     }
+ }
+ 
+ impl From<TokenTree> for TokenStream {
+     fn from(token: TokenTree) -> TokenStream {
+-        if nightly_works() {
++        if inside_proc_macro() {
+             TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+         } else {
+             TokenStream::Fallback(token.into())
+         }
+     }
+ }
+ 
+-impl iter::FromIterator<TokenTree> for TokenStream {
++impl FromIterator<TokenTree> for TokenStream {
+     fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+-        if nightly_works() {
++        if inside_proc_macro() {
+             TokenStream::Compiler(DeferredTokenStream::new(
+                 trees.into_iter().map(into_compiler_token).collect(),
+             ))
+         } else {
+             TokenStream::Fallback(trees.into_iter().collect())
+         }
+     }
+ }
+ 
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+     fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+         let mut streams = streams.into_iter();
+         match streams.next() {
+             Some(TokenStream::Compiler(mut first)) => {
+                 first.evaluate_now();
+                 first.stream.extend(streams.map(|s| match s {
+                     TokenStream::Compiler(s) => s.into_token_stream(),
+                     TokenStream::Fallback(_) => mismatch(),
+@@ -247,75 +201,76 @@ impl iter::FromIterator<TokenStream> for
+                 TokenStream::Fallback(first)
+             }
+             None => TokenStream::new(),
+         }
+     }
+ }
+ 
+ impl Extend<TokenTree> for TokenStream {
+-    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
++    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
+         match self {
+             TokenStream::Compiler(tts) => {
+                 // Here is the reason for DeferredTokenStream.
+-                tts.extra
+-                    .extend(streams.into_iter().map(into_compiler_token));
++                for token in stream {
++                    tts.extra.push(into_compiler_token(token));
++                }
+             }
+-            TokenStream::Fallback(tts) => tts.extend(streams),
++            TokenStream::Fallback(tts) => tts.extend(stream),
+         }
+     }
+ }
+ 
+ impl Extend<TokenStream> for TokenStream {
+     fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+         match self {
+             TokenStream::Compiler(tts) => {
+                 tts.evaluate_now();
+                 tts.stream
+-                    .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
++                    .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
+             }
+             TokenStream::Fallback(tts) => {
+-                tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
++                tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
+             }
+         }
+     }
+ }
+ 
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         match self {
+-            TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+-            TokenStream::Fallback(tts) => tts.fmt(f),
++            TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
++            TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+         }
+     }
+ }
+ 
+ impl From<proc_macro::LexError> for LexError {
+     fn from(e: proc_macro::LexError) -> LexError {
+         LexError::Compiler(e)
+     }
+ }
+ 
+ impl From<fallback::LexError> for LexError {
+     fn from(e: fallback::LexError) -> LexError {
+         LexError::Fallback(e)
+     }
+ }
+ 
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         match self {
+-            LexError::Compiler(e) => e.fmt(f),
+-            LexError::Fallback(e) => e.fmt(f),
++            LexError::Compiler(e) => Debug::fmt(e, f),
++            LexError::Fallback(e) => Debug::fmt(e, f),
+         }
+     }
+ }
+ 
+ #[derive(Clone)]
+-pub enum TokenTreeIter {
++pub(crate) enum TokenTreeIter {
+     Compiler(proc_macro::token_stream::IntoIter),
+     Fallback(fallback::TokenTreeIter),
+ }
+ 
+ impl IntoIterator for TokenStream {
+     type Item = TokenTree;
+     type IntoIter = TokenTreeIter;
+ 
+@@ -356,25 +311,25 @@ impl Iterator for TokenTreeIter {
+     fn size_hint(&self) -> (usize, Option<usize>) {
+         match self {
+             TokenTreeIter::Compiler(tts) => tts.size_hint(),
+             TokenTreeIter::Fallback(tts) => tts.size_hint(),
+         }
+     }
+ }
+ 
+-impl fmt::Debug for TokenTreeIter {
++impl Debug for TokenTreeIter {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         f.debug_struct("TokenTreeIter").finish()
+     }
+ }
+ 
+ #[derive(Clone, PartialEq, Eq)]
+ #[cfg(super_unstable)]
+-pub enum SourceFile {
++pub(crate) enum SourceFile {
+     Compiler(proc_macro::SourceFile),
+     Fallback(fallback::SourceFile),
+ }
+ 
+ #[cfg(super_unstable)]
+ impl SourceFile {
+     fn nightly(sf: proc_macro::SourceFile) -> Self {
+         SourceFile::Compiler(sf)
+@@ -392,68 +347,87 @@ impl SourceFile {
+         match self {
+             SourceFile::Compiler(a) => a.is_real(),
+             SourceFile::Fallback(a) => a.is_real(),
+         }
+     }
+ }
+ 
+ #[cfg(super_unstable)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         match self {
+-            SourceFile::Compiler(a) => a.fmt(f),
+-            SourceFile::Fallback(a) => a.fmt(f),
++            SourceFile::Compiler(a) => Debug::fmt(a, f),
++            SourceFile::Fallback(a) => Debug::fmt(a, f),
+         }
+     }
+ }
+ 
+ #[cfg(any(super_unstable, feature = "span-locations"))]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+     pub line: usize,
+     pub column: usize,
+ }
+ 
+ #[derive(Copy, Clone)]
+-pub enum Span {
++pub(crate) enum Span {
+     Compiler(proc_macro::Span),
+     Fallback(fallback::Span),
+ }
+ 
+ impl Span {
+     pub fn call_site() -> Span {
+-        if nightly_works() {
++        if inside_proc_macro() {
+             Span::Compiler(proc_macro::Span::call_site())
+         } else {
+             Span::Fallback(fallback::Span::call_site())
+         }
+     }
+ 
++    #[cfg(hygiene)]
++    pub fn mixed_site() -> Span {
++        if inside_proc_macro() {
++            Span::Compiler(proc_macro::Span::mixed_site())
++        } else {
++            Span::Fallback(fallback::Span::mixed_site())
++        }
++    }
++
+     #[cfg(super_unstable)]
+     pub fn def_site() -> Span {
+-        if nightly_works() {
++        if inside_proc_macro() {
+             Span::Compiler(proc_macro::Span::def_site())
+         } else {
+             Span::Fallback(fallback::Span::def_site())
+         }
+     }
+ 
+-    #[cfg(super_unstable)]
+     pub fn resolved_at(&self, other: Span) -> Span {
+         match (self, other) {
++            #[cfg(hygiene)]
+             (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
++
++            // Name resolution affects semantics, but location is only cosmetic
++            #[cfg(not(hygiene))]
++            (Span::Compiler(_), Span::Compiler(_)) => other,
++
+             (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
+             _ => mismatch(),
+         }
+     }
+ 
+-    #[cfg(super_unstable)]
+     pub fn located_at(&self, other: Span) -> Span {
+         match (self, other) {
++            #[cfg(hygiene)]
+             (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
++
++            // Name resolution affects semantics, but location is only cosmetic
++            #[cfg(not(hygiene))]
++            (Span::Compiler(_), Span::Compiler(_)) => *self,
++
+             (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
+             _ => mismatch(),
+         }
+     }
+ 
+     pub fn unwrap(self) -> proc_macro::Span {
+         match self {
+             Span::Compiler(s) => s,
+@@ -537,36 +511,36 @@ impl From<proc_macro::Span> for crate::S
+ }
+ 
+ impl From<fallback::Span> for Span {
+     fn from(inner: fallback::Span) -> Span {
+         Span::Fallback(inner)
+     }
+ }
+ 
+-impl fmt::Debug for Span {
++impl Debug for Span {
+     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+         match self {
+-            Span::Compiler(s) => s.fmt(f),
+-            Span::Fallback(s) => s.fmt(f),
++            Span::Compiler(s) => Debug::fmt(s, f),
++            Span::Fallback(s) => Debug::fmt(s, f),
+         }
+     }
+ }
+ 
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+     match span {
+         Span::Compiler(s) => {
+             debug.field("span", &s);
+         }
+         Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
+     }
+ }
+ 
+ #[derive(Clone)]
+-pub enum Group {
++pub(crate) enum Group {
+     Compiler(proc_macro::Group),
+     Fallback(fallback::Group),
+ }
+ 
+ impl Group {
+     pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+         match stream {
+             TokenStream::Compiler(tts) => {
+@@ -647,36 +621,36 @@ impl Group {
+ }
+ 
+ impl From<fallback::Group> for Group {
+     fn from(g: fallback::Group) -> Self {
+         Group::Fallback(g)
+     }
+ }
+ 
+-impl fmt::Display for Group {
++impl Display for Group {
+     fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+         match self {
+-            Group::Compiler(group) => group.fmt(formatter),
+-            Group::Fallback(group) => group.fmt(formatter),
++            Group::Compiler(group) => Display::fmt(group, formatter),
++            Group::Fallback(group) => Display::fmt(group, formatter),
+         }
+     }
+ }
+ 
+-impl fmt::Debug for Group {
++impl Debug for Group {
+     fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+         match self {
+-            Group::Compiler(group) => group.fmt(formatter),
+-            Group::Fallback(group) => group.fmt(formatter),
++            Group::Compiler(group) => Debug::fmt(group, formatter),
++            Group::Fallback(group) => Debug::fmt(group, formatter),
+         }
+     }
<Skipped 31326 lines>
================================================================

---- gitweb:

http://git.pld-linux.org/gitweb.cgi/packages/thunderbird.git/commitdiff/97d60ae07f077de181cb8da727760b268847f5c0



More information about the pld-cvs-commit mailing list