--- /dev/null
+
+# HG changeset patch
+# User Emilio Cobos Álvarez <emilio@crisal.io>
+# Date 1599584448 0
+# Node ID 85c38ea4d34969797eb5d24265cd90cc6841e6ae
+# Parent 5aa243a2fe9d77578dd95ce3ab3a2aa6c1e92604
+Bug 1663715 - Update syn and proc-macro2 so that Firefox can build on Rust nightly again. r=froydnj, a=RyanVM
+
+Generated with:
+
+ cargo update -p syn --precise 1.0.40
+ ./mach vendor rust
+
+Rust issue: https://github.com/rust-lang/rust/issues/76482
+
+Differential Revision: https://phabricator.services.mozilla.com/D89473
+
+diff --git a/Cargo.lock b/Cargo.lock
+--- a/Cargo.lock
++++ b/Cargo.lock
+@@ -3712,19 +3712,19 @@ checksum = "ecd45702f76d6d3c75a80564378a
+ dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ ]
+
+ [[package]]
+ name = "proc-macro2"
+-version = "1.0.5"
+-source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"
++version = "1.0.20"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"
+ dependencies = [
+ "unicode-xid",
+ ]
+
+ [[package]]
+ name = "procedural-masquerade"
+ version = "0.1.1"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+@@ -4642,19 +4642,19 @@ dependencies = [
+ "cc",
+ "gleam",
+ "glsl-to-cxx",
+ "webrender_build",
+ ]
+
+ [[package]]
+ name = "syn"
+-version = "1.0.5"
+-source = "registry+https://github.com/rust-lang/crates.io-index"
+-checksum = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"
++version = "1.0.40"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"
+ dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+ ]
+
+ [[package]]
+ name = "sync-guid"
+diff --git a/third_party/rust/proc-macro2/.cargo-checksum.json b/third_party/rust/proc-macro2/.cargo-checksum.json
+--- a/third_party/rust/proc-macro2/.cargo-checksum.json
++++ b/third_party/rust/proc-macro2/.cargo-checksum.json
+@@ -1,1 +1,1 @@
+-{"files":{"Cargo.toml":"e2c1fc6ed317eeef8462fcd192f6b6389e1d84f0d7afeac78f12c23903deddf8","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"362a2156f7645528061b6e8487a2eb0f32f1693012ed82ee57afa05c039bba0d","build.rs":"0cc6e2cb919ddbff59cf1d810283939f97a59f0037540c0f2ee3453237635ff8","src/fallback.rs":"5c6379a90735e27abcc40253b223158c6b1e5784f3850bc423335363e87ef038","src/lib.rs":"ae5251296ad3fcd8b600919a993fec0afd8b56da3e11fef6bc7265b273129936","src/strnom.rs":"37f7791f73f123817ad5403af1d4e2a0714be27401729a2d451bc80b1f26bac9","src/wrapper.rs":"81372e910604217a625aa71c47d43e65f4e008456eae93ac39325c9abf10701a","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"8c427be9cba1fa8d4a16647e53e3545e5863e29e2c0b311c93c9dd1399abf6a1"},"package":"90cf5f418035b98e655e9cdb225047638296b862b42411c4e45bb88d700f7fc0"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"c20c4c52342e65ea11ad8382edc636e628e8f8c5ab7cffddc32426b2fe8fe4cd","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"e1f9d4fc22cff2c049f166a403b41458632a94357890d31cf0e3ad83807fb430","build.rs":"332185d7ad4c859210f5edd7a76bc95146c8277726a2f81417f34927c4424d68","src/detection.rs":"9d25d896889e65330858f2d6f6223c1b98cd1dad189813ad4161ff189fbda2b8","src/fallback.rs":"239f9a25c0f2ab57592288d944c7f1a0f887536b6d4dc2428a17640af8d10a41","src/lib.rs":"2b1d98424c9b23b547dabf85554120e5e65472026a0f3f711b3a097bca7c32fe","src/parse.rs":"500edee9773132e27e44d0fdaa042b1cb9451e29e65124493986f51710c0664c","src/wrapper.rs":"d36c0dced7ec0e7585c1f935cda836080bcae6de1de3d7851d962e9e11a3ac48","tests/comments.rs":"ea6cbe6f4c8852e6a0612893c7d4f2c144a2e6a134a6c3db641a320cbfc3c800","tests/features.rs":"a86deb8644992a4eb64d9fd493eff16f9cf9c5cb6ade3a634ce0c990cf87d559","tests/marker.rs":"c2652e3ae1dfcb94d2e6313b29712c5dcbd0fe62026913e67bb7cebd7560aade","tests/test.rs":"310c856e27ff61c9ec7f0a5cd96031aac02971557b1621f5e17b089d58e79bcd","tests/test_fmt.rs":"745dfdc41d09c5308c221395eb43f2041f0a1413d2927a813bc2ad4554438fe2"},"package":"175c513d55719db99da20232b06cda8bab6b83ec2d04e3283edf0213c37c1a29"}
+\ No newline at end of file
+diff --git a/third_party/rust/proc-macro2/Cargo.toml b/third_party/rust/proc-macro2/Cargo.toml
+--- a/third_party/rust/proc-macro2/Cargo.toml
++++ b/third_party/rust/proc-macro2/Cargo.toml
+@@ -8,36 +8,35 @@
+ # If you believe there's an error in this file please file an
+ # issue against the rust-lang/cargo repository. If you're
+ # editing this file be aware that the upstream Cargo.toml
+ # will likely look very different (and much more reasonable)
+
+ [package]
+ edition = "2018"
+ name = "proc-macro2"
+-version = "1.0.5"
+-authors = ["Alex Crichton <alex@alexcrichton.com>"]
+-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
+-homepage = "https://github.com/alexcrichton/proc-macro2"
++version = "1.0.20"
++authors = ["Alex Crichton <alex@alexcrichton.com>", "David Tolnay <dtolnay@gmail.com>"]
++description = "A substitute implementation of the compiler's `proc_macro` API to decouple\ntoken-based libraries from the procedural macro use case.\n"
+ documentation = "https://docs.rs/proc-macro2"
+ readme = "README.md"
+ keywords = ["macros"]
++categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/alexcrichton/proc-macro2"
+ [package.metadata.docs.rs]
+ rustc-args = ["--cfg", "procmacro2_semver_exempt"]
+ rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
++targets = ["x86_64-unknown-linux-gnu"]
+
+-[lib]
+-name = "proc_macro2"
++[package.metadata.playground]
++features = ["span-locations"]
+ [dependencies.unicode-xid]
+ version = "0.2"
+ [dev-dependencies.quote]
+ version = "1.0"
+ default_features = false
+
+ [features]
+ default = ["proc-macro"]
+ nightly = []
+ proc-macro = []
+ span-locations = []
+-[badges.travis-ci]
+-repository = "alexcrichton/proc-macro2"
+diff --git a/third_party/rust/proc-macro2/README.md b/third_party/rust/proc-macro2/README.md
+--- a/third_party/rust/proc-macro2/README.md
++++ b/third_party/rust/proc-macro2/README.md
+@@ -1,11 +1,11 @@
+ # proc-macro2
+
+-[](https://travis-ci.com/alexcrichton/proc-macro2)
++[](https://github.com/alexcrichton/proc-macro2/actions)
+ [](https://crates.io/crates/proc-macro2)
+ [](https://docs.rs/proc-macro2)
+
+ A wrapper around the procedural macro API of the compiler's `proc_macro` crate.
+ This library serves two purposes:
+
+ - **Bring proc-macro-like functionality to other contexts like build.rs and
+ main.rs.** Types from `proc_macro` are entirely specific to procedural macros
+diff --git a/third_party/rust/proc-macro2/build.rs b/third_party/rust/proc-macro2/build.rs
+--- a/third_party/rust/proc-macro2/build.rs
++++ b/third_party/rust/proc-macro2/build.rs
+@@ -9,16 +9,20 @@
+ // "wrap_proc_macro"
+ // Wrap types from libproc_macro rather than polyfilling the whole API.
+ // Enabled on rustc 1.29+ as long as procmacro2_semver_exempt is not set,
+ // because we can't emulate the unstable API without emulating everything
+ // else. Also enabled unconditionally on nightly, in which case the
+ // procmacro2_semver_exempt surface area is implemented by using the
+ // nightly-only proc_macro API.
+ //
++// "hygiene"
++// Enable Span::mixed_site() and non-dummy behavior of Span::resolved_at
++// and Span::located_at. Enabled on Rust 1.45+.
++//
+ // "proc_macro_span"
+ // Enable non-dummy behavior of Span::start and Span::end methods which
+ // requires an unstable compiler feature. Enabled when building with
+ // nightly, unless `-Z allow-feature` in RUSTFLAGS disallows unstable
+ // features.
+ //
+ // "super_unstable"
+ // Implement the semver exempt API in terms of the nightly-only proc_macro
+@@ -52,16 +56,24 @@ fn main() {
+ // https://github.com/alexcrichton/proc-macro2/issues/147
+ println!("cargo:rustc-cfg=procmacro2_semver_exempt");
+ }
+
+ if semver_exempt || cfg!(feature = "span-locations") {
+ println!("cargo:rustc-cfg=span_locations");
+ }
+
++ if version.minor < 39 {
++ println!("cargo:rustc-cfg=no_bind_by_move_pattern_guard");
++ }
++
++ if version.minor >= 45 {
++ println!("cargo:rustc-cfg=hygiene");
++ }
++
+ let target = env::var("TARGET").unwrap();
+ if !enable_use_proc_macro(&target) {
+ return;
+ }
+
+ println!("cargo:rustc-cfg=use_proc_macro");
+
+ if version.nightly || !semver_exempt {
+diff --git a/third_party/rust/proc-macro2/src/detection.rs b/third_party/rust/proc-macro2/src/detection.rs
+new file mode 100644
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/detection.rs
+@@ -0,0 +1,67 @@
++use std::panic::{self, PanicInfo};
++use std::sync::atomic::*;
++use std::sync::Once;
++
++static WORKS: AtomicUsize = AtomicUsize::new(0);
++static INIT: Once = Once::new();
++
++pub(crate) fn inside_proc_macro() -> bool {
++ match WORKS.load(Ordering::SeqCst) {
++ 1 => return false,
++ 2 => return true,
++ _ => {}
++ }
++
++ INIT.call_once(initialize);
++ inside_proc_macro()
++}
++
++pub(crate) fn force_fallback() {
++ WORKS.store(1, Ordering::SeqCst);
++}
++
++pub(crate) fn unforce_fallback() {
++ initialize();
++}
++
++// Swap in a null panic hook to avoid printing "thread panicked" to stderr,
++// then use catch_unwind to determine whether the compiler's proc_macro is
++// working. When proc-macro2 is used from outside of a procedural macro all
++// of the proc_macro crate's APIs currently panic.
++//
++// The Once is to prevent the possibility of this ordering:
++//
++// thread 1 calls take_hook, gets the user's original hook
++// thread 1 calls set_hook with the null hook
++// thread 2 calls take_hook, thinks null hook is the original hook
++// thread 2 calls set_hook with the null hook
++// thread 1 calls set_hook with the actual original hook
++// thread 2 calls set_hook with what it thinks is the original hook
++//
++// in which the user's hook has been lost.
++//
++// There is still a race condition where a panic in a different thread can
++// happen during the interval that the user's original panic hook is
++// unregistered such that their hook is incorrectly not called. This is
++// sufficiently unlikely and less bad than printing panic messages to stderr
++// on correct use of this crate. Maybe there is a libstd feature request
++// here. For now, if a user needs to guarantee that this failure mode does
++// not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
++// the main thread before launching any other threads.
++fn initialize() {
++ type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
++
++ let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
++ let sanity_check = &*null_hook as *const PanicHook;
++ let original_hook = panic::take_hook();
++ panic::set_hook(null_hook);
++
++ let works = panic::catch_unwind(proc_macro::Span::call_site).is_ok();
++ WORKS.store(works as usize + 1, Ordering::SeqCst);
++
++ let hopefully_null_hook = panic::take_hook();
++ panic::set_hook(original_hook);
++ if sanity_check != &*hopefully_null_hook {
++ panic!("observed race condition in proc_macro2::inside_proc_macro");
++ }
++}
+diff --git a/third_party/rust/proc-macro2/src/fallback.rs b/third_party/rust/proc-macro2/src/fallback.rs
+--- a/third_party/rust/proc-macro2/src/fallback.rs
++++ b/third_party/rust/proc-macro2/src/fallback.rs
+@@ -1,41 +1,121 @@
++use crate::parse::{token_stream, Cursor};
++use crate::{Delimiter, Spacing, TokenTree};
+ #[cfg(span_locations)]
+ use std::cell::RefCell;
+ #[cfg(span_locations)]
+ use std::cmp;
+-use std::fmt;
+-use std::iter;
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
++use std::mem;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::Path;
+ use std::path::PathBuf;
+ use std::str::FromStr;
+ use std::vec;
+-
+-use crate::strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
+-use crate::{Delimiter, Punct, Spacing, TokenTree};
+ use unicode_xid::UnicodeXID;
+
++/// Force use of proc-macro2's fallback implementation of the API for now, even
++/// if the compiler's implementation is available.
++pub fn force() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::force_fallback();
++}
++
++/// Resume using the compiler's implementation of the proc macro API if it is
++/// available.
++pub fn unforce() {
++ #[cfg(wrap_proc_macro)]
++ crate::detection::unforce_fallback();
++}
++
+ #[derive(Clone)]
+-pub struct TokenStream {
+- inner: Vec<TokenTree>,
++pub(crate) struct TokenStream {
++ pub(crate) inner: Vec<TokenTree>,
+ }
+
+ #[derive(Debug)]
+-pub struct LexError;
++pub(crate) struct LexError;
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+ TokenStream { inner: Vec::new() }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0
+ }
++
++ fn take_inner(&mut self) -> Vec<TokenTree> {
++ mem::replace(&mut self.inner, Vec::new())
++ }
++
++ fn push_token(&mut self, token: TokenTree) {
++ // https://github.com/alexcrichton/proc-macro2/issues/235
++ match token {
++ #[cfg(not(no_bind_by_move_pattern_guard))]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) if literal.text.starts_with('-') => {
++ push_negative_literal(self, literal);
++ }
++ #[cfg(no_bind_by_move_pattern_guard)]
++ TokenTree::Literal(crate::Literal {
++ #[cfg(wrap_proc_macro)]
++ inner: crate::imp::Literal::Fallback(literal),
++ #[cfg(not(wrap_proc_macro))]
++ inner: literal,
++ ..
++ }) => {
++ if literal.text.starts_with('-') {
++ push_negative_literal(self, literal);
++ } else {
++ self.inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++ _ => self.inner.push(token),
++ }
++
++ #[cold]
++ fn push_negative_literal(stream: &mut TokenStream, mut literal: Literal) {
++ literal.text.remove(0);
++ let mut punct = crate::Punct::new('-', Spacing::Alone);
++ punct.set_span(crate::Span::_new_stable(literal.span));
++ stream.inner.push(TokenTree::Punct(punct));
++ stream
++ .inner
++ .push(TokenTree::Literal(crate::Literal::_new_stable(literal)));
++ }
++ }
++}
++
++// Nonrecursive to prevent stack overflow.
++impl Drop for TokenStream {
++ fn drop(&mut self) {
++ while let Some(token) = self.inner.pop() {
++ let group = match token {
++ TokenTree::Group(group) => group.inner,
++ _ => continue,
++ };
++ #[cfg(wrap_proc_macro)]
++ let group = match group {
++ crate::imp::Group::Fallback(group) => group,
++ _ => continue,
++ };
++ let mut group = group;
++ self.inner.extend(group.stream.take_inner());
++ }
++ }
+ }
+
+ #[cfg(span_locations)]
+ fn get_cursor(src: &str) -> Cursor {
+ // Create a dummy file & add it to the source map
+ SOURCE_MAP.with(|cm| {
+ let mut cm = cm.borrow_mut();
+ let name = format!("<parsed string {}>", cm.files.len());
+@@ -54,68 +134,49 @@ fn get_cursor(src: &str) -> Cursor {
+
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ // Create a dummy file & add it to the source map
+ let cursor = get_cursor(src);
+
+- match token_stream(cursor) {
+- Ok((input, output)) => {
+- if skip_whitespace(input).len() != 0 {
+- Err(LexError)
+- } else {
+- Ok(output)
+- }
+- }
+- Err(LexError) => Err(LexError),
++ let (rest, tokens) = token_stream(cursor)?;
++ if rest.is_empty() {
++ Ok(tokens)
++ } else {
++ Err(LexError)
+ }
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut joint = false;
+ for (i, tt) in self.inner.iter().enumerate() {
+ if i != 0 && !joint {
+ write!(f, " ")?;
+ }
+ joint = false;
+- match *tt {
+- TokenTree::Group(ref tt) => {
+- let (start, end) = match tt.delimiter() {
+- Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
+- Delimiter::Bracket => ("[", "]"),
+- Delimiter::None => ("", ""),
+- };
+- if tt.stream().into_iter().next().is_none() {
+- write!(f, "{} {}", start, end)?
+- } else {
+- write!(f, "{} {} {}", start, tt.stream(), end)?
+- }
++ match tt {
++ TokenTree::Group(tt) => Display::fmt(tt, f),
++ TokenTree::Ident(tt) => Display::fmt(tt, f),
++ TokenTree::Punct(tt) => {
++ joint = tt.spacing() == Spacing::Joint;
++ Display::fmt(tt, f)
+ }
+- TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
+- TokenTree::Punct(ref tt) => {
+- write!(f, "{}", tt.as_char())?;
+- match tt.spacing() {
+- Spacing::Alone => {}
+- Spacing::Joint => joint = true,
+- }
+- }
+- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
+- }
++ TokenTree::Literal(tt) => Display::fmt(tt, f),
++ }?
+ }
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+ }
+
+ #[cfg(use_proc_macro)]
+ impl From<proc_macro::TokenStream> for TokenStream {
+@@ -134,122 +195,107 @@ impl From<TokenStream> for proc_macro::T
+ .to_string()
+ .parse()
+ .expect("failed to parse to compiler tokens")
+ }
+ }
+
+ impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+- TokenStream { inner: vec![tree] }
++ let mut stream = TokenStream::new();
++ stream.push_token(tree);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
+- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+- let mut v = Vec::new();
+-
+- for token in streams.into_iter() {
+- v.push(token);
+- }
+-
+- TokenStream { inner: v }
++impl FromIterator<TokenTree> for TokenStream {
++ fn from_iter<I: IntoIterator<Item = TokenTree>>(tokens: I) -> Self {
++ let mut stream = TokenStream::new();
++ stream.extend(tokens);
++ stream
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut v = Vec::new();
+
+- for stream in streams.into_iter() {
+- v.extend(stream.inner);
++ for mut stream in streams {
++ v.extend(stream.take_inner());
+ }
+
+ TokenStream { inner: v }
+ }
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+- self.inner.extend(streams);
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, tokens: I) {
++ tokens.into_iter().for_each(|token| self.push_token(token));
+ }
+ }
+
+ impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+- self.inner
+- .extend(streams.into_iter().flat_map(|stream| stream));
++ self.inner.extend(streams.into_iter().flatten());
+ }
+ }
+
+-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
++pub(crate) type TokenTreeIter = vec::IntoIter<TokenTree>;
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+- fn into_iter(self) -> TokenTreeIter {
+- self.inner.into_iter()
++ fn into_iter(mut self) -> TokenTreeIter {
++ self.take_inner().into_iter()
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq)]
+-pub struct SourceFile {
++pub(crate) struct SourceFile {
+ path: PathBuf,
+ }
+
+ impl SourceFile {
+ /// Get the path to this source file as a string.
+ pub fn path(&self) -> PathBuf {
+ self.path.clone()
+ }
+
+ pub fn is_real(&self) -> bool {
+ // XXX(nika): Support real files in the future?
+ false
+ }
+ }
+
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+ }
+
+ #[derive(Clone, Copy, Debug, PartialEq, Eq)]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+
+ #[cfg(span_locations)]
+ thread_local! {
+ static SOURCE_MAP: RefCell<SourceMap> = RefCell::new(SourceMap {
+ // NOTE: We start with a single dummy file which all call_site() and
+ // def_site() spans reference.
+- files: vec![{
++ files: vec![FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
+- {
+- FileInfo {
+- name: "<unspecified>".to_owned(),
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
+-
+- #[cfg(not(procmacro2_semver_exempt))]
+- {
+- FileInfo {
+- span: Span { lo: 0, hi: 0 },
+- lines: vec![0],
+- }
+- }
++ name: "<unspecified>".to_owned(),
++ span: Span { lo: 0, hi: 0 },
++ lines: vec![0],
+ }],
+ });
+ }
+
+ #[cfg(span_locations)]
+ struct FileInfo {
+ #[cfg(procmacro2_semver_exempt)]
+ name: String,
+@@ -277,26 +323,31 @@ impl FileInfo {
+ }
+ }
+
+ fn span_within(&self, span: Span) -> bool {
+ span.lo >= self.span.lo && span.hi <= self.span.hi
+ }
+ }
+
+-/// Computesthe offsets of each line in the given source string.
++/// Computes the offsets of each line in the given source string
++/// and the total number of characters
+ #[cfg(span_locations)]
+-fn lines_offsets(s: &str) -> Vec<usize> {
++fn lines_offsets(s: &str) -> (usize, Vec<usize>) {
+ let mut lines = vec![0];
+- let mut prev = 0;
+- while let Some(len) = s[prev..].find('\n') {
+- prev += len + 1;
+- lines.push(prev);
++ let mut total = 0;
++
++ for ch in s.chars() {
++ total += 1;
++ if ch == '\n' {
++ lines.push(total);
++ }
+ }
+- lines
++
++ (total, lines)
+ }
+
+ #[cfg(span_locations)]
+ struct SourceMap {
+ files: Vec<FileInfo>,
+ }
+
+ #[cfg(span_locations)]
+@@ -305,81 +356,83 @@ impl SourceMap {
+ // Add 1 so there's always space between files.
+ //
+ // We'll always have at least 1 file, as we initialize our files list
+ // with a dummy file.
+ self.files.last().unwrap().span.hi + 1
+ }
+
+ fn add_file(&mut self, name: &str, src: &str) -> Span {
+- let lines = lines_offsets(src);
++ let (len, lines) = lines_offsets(src);
+ let lo = self.next_start_pos();
+ // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+ let span = Span {
+ lo,
+- hi: lo + (src.len() as u32),
++ hi: lo + (len as u32),
+ };
+
+- #[cfg(procmacro2_semver_exempt)]
+ self.files.push(FileInfo {
++ #[cfg(procmacro2_semver_exempt)]
+ name: name.to_owned(),
+ span,
+ lines,
+ });
+
+ #[cfg(not(procmacro2_semver_exempt))]
+- self.files.push(FileInfo { span, lines });
+ let _ = name;
+
+ span
+ }
+
+ fn fileinfo(&self, span: Span) -> &FileInfo {
+ for file in &self.files {
+ if file.span_within(span) {
+ return file;
+ }
+ }
+ panic!("Invalid span with no related FileInfo!");
+ }
+ }
+
+ #[derive(Clone, Copy, PartialEq, Eq)]
+-pub struct Span {
++pub(crate) struct Span {
+ #[cfg(span_locations)]
+- lo: u32,
++ pub(crate) lo: u32,
+ #[cfg(span_locations)]
+- hi: u32,
++ pub(crate) hi: u32,
+ }
+
+ impl Span {
+ #[cfg(not(span_locations))]
+ pub fn call_site() -> Span {
+ Span {}
+ }
+
+ #[cfg(span_locations)]
+ pub fn call_site() -> Span {
+ Span { lo: 0, hi: 0 }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::call_site()
++ }
++
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::call_site()
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, _other: Span) -> Span {
+ // Stable spans consist only of line/column information, so
+ // `resolved_at` and `located_at` only select which span the
+ // caller wants line/column information from.
+ *self
+ }
+
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ other
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn source_file(&self) -> SourceFile {
+ SOURCE_MAP.with(|cm| {
+ let cm = cm.borrow();
+@@ -422,36 +475,69 @@ impl Span {
+ return None;
+ }
+ Some(Span {
+ lo: cmp::min(self.lo, other.lo),
+ hi: cmp::max(self.hi, other.hi),
+ })
+ })
+ }
++
++ #[cfg(not(span_locations))]
++ fn first_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn first_byte(self) -> Self {
++ Span {
++ lo: self.lo,
++ hi: cmp::min(self.lo.saturating_add(1), self.hi),
++ }
++ }
++
++ #[cfg(not(span_locations))]
++ fn last_byte(self) -> Self {
++ self
++ }
++
++ #[cfg(span_locations)]
++ fn last_byte(self) -> Self {
++ Span {
++ lo: cmp::max(self.hi.saturating_sub(1), self.lo),
++ hi: self.hi,
++ }
++ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ return write!(f, "bytes({}..{})", self.lo, self.hi);
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ write!(f, "Span")
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+- if cfg!(procmacro2_semver_exempt) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++ #[cfg(span_locations)]
++ {
++ if span.lo == 0 && span.hi == 0 {
++ return;
++ }
++ }
++
++ if cfg!(span_locations) {
+ debug.field("span", &span);
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Group {
++pub(crate) struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+ }
+
+ impl Group {
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group {
+@@ -469,58 +555,67 @@ impl Group {
+ self.stream.clone()
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn span_open(&self) -> Span {
+- self.span
++ self.span.first_byte()
+ }
+
+ pub fn span_close(&self) -> Span {
+- self.span
++ self.span.last_byte()
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
++ // We attempt to match libproc_macro's formatting.
++ // Empty parens: ()
++ // Nonempty parens: (...)
++ // Empty brackets: []
++ // Nonempty brackets: [...]
++ // Empty braces: { }
++ // Nonempty braces: { ... }
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- let (left, right) = match self.delimiter {
++ let (open, close) = match self.delimiter {
+ Delimiter::Parenthesis => ("(", ")"),
+- Delimiter::Brace => ("{", "}"),
++ Delimiter::Brace => ("{ ", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+
+- f.write_str(left)?;
+- self.stream.fmt(f)?;
+- f.write_str(right)?;
++ f.write_str(open)?;
++ Display::fmt(&self.stream, f)?;
++ if self.delimiter == Delimiter::Brace && !self.stream.inner.is_empty() {
++ f.write_str(" ")?;
++ }
++ f.write_str(close)?;
+
+ Ok(())
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Group");
+ debug.field("delimiter", &self.delimiter);
+ debug.field("stream", &self.stream);
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Ident {
++pub(crate) struct Ident {
+ sym: String,
+ span: Span,
+ raw: bool,
+ }
+
+ impl Ident {
+ fn _new(string: &str, raw: bool, span: Span) -> Ident {
+ validate_ident(string);
+@@ -544,26 +639,24 @@ impl Ident {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+ }
+
+-#[inline]
+-fn is_ident_start(c: char) -> bool {
++pub(crate) fn is_ident_start(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+ || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+ }
+
+-#[inline]
+-fn is_ident_continue(c: char) -> bool {
++pub(crate) fn is_ident_continue(c: char) -> bool {
+ ('a' <= c && c <= 'z')
+ || ('A' <= c && c <= 'Z')
+ || c == '_'
+ || ('0' <= c && c <= '9')
+ || (c > '\x7f' && UnicodeXID::is_xid_continue(c))
+ }
+
+ fn validate_ident(string: &str) {
+@@ -610,49 +703,49 @@ where
+ if self.raw {
+ other.starts_with("r#") && self.sym == other[2..]
+ } else {
+ self.sym == other
+ }
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.raw {
+- "r#".fmt(f)?;
++ f.write_str("r#")?;
+ }
+- self.sym.fmt(f)
++ Display::fmt(&self.sym, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ // Ident(proc_macro), Ident(r#union)
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_tuple("Ident");
+ debug.field(&format_args!("{}", self));
+ debug.finish()
+ }
+
+ // Ident {
+ // sym: proc_macro,
+ // span: bytes(128..138)
+ // }
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", self));
+- debug.field("span", &self.span);
++ debug_span_field_if_nontrivial(&mut debug, self.span);
+ debug.finish()
+ }
+ }
+
+ #[derive(Clone)]
+-pub struct Literal {
++pub(crate) struct Literal {
+ text: String,
+ span: Span,
+ }
+
+ macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(format!(concat!("{}", stringify!($kind)), n))
+@@ -664,17 +757,17 @@ macro_rules! unsuffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(n.to_string())
+ }
+ )*)
+ }
+
+ impl Literal {
+- fn _new(text: String) -> Literal {
++ pub(crate) fn _new(text: String) -> Literal {
+ Literal {
+ text,
+ span: Span::call_site(),
+ }
+ }
+
+ suffixed_numbers! {
+ u8_suffixed => u8,
+@@ -706,61 +799,62 @@ impl Literal {
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ let mut s = f.to_string();
+- if !s.contains(".") {
++ if !s.contains('.') {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+ }
+
+ pub fn string(t: &str) -> Literal {
+ let mut text = String::with_capacity(t.len() + 2);
+ text.push('"');
+ for c in t.chars() {
+ if c == '\'' {
+- // escape_default turns this into "\'" which is unnecessary.
++ // escape_debug turns this into "\'" which is unnecessary.
+ text.push(c);
+ } else {
+- text.extend(c.escape_default());
++ text.extend(c.escape_debug());
+ }
+ }
+ text.push('"');
+ Literal::_new(text)
+ }
+
+ pub fn character(t: char) -> Literal {
+ let mut text = String::new();
+ text.push('\'');
+ if t == '"' {
+- // escape_default turns this into '\"' which is unnecessary.
++ // escape_debug turns this into '\"' which is unnecessary.
+ text.push(t);
+ } else {
+- text.extend(t.escape_default());
++ text.extend(t.escape_debug());
+ }
+ text.push('\'');
+ Literal::_new(text)
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ let mut escaped = "b\"".to_string();
+ for b in bytes {
++ #[allow(clippy::match_overlapping_arm)]
+ match *b {
+ b'\0' => escaped.push_str(r"\0"),
+ b'\t' => escaped.push_str(r"\t"),
+ b'\n' => escaped.push_str(r"\n"),
+ b'\r' => escaped.push_str(r"\r"),
+ b'"' => escaped.push_str("\\\""),
+ b'\\' => escaped.push_str("\\\\"),
+ b'\x20'..=b'\x7E' => escaped.push(*b as char),
+@@ -779,656 +873,22 @@ impl Literal {
+ self.span = span;
+ }
+
+ pub fn subspan<R: RangeBounds<usize>>(&self, _range: R) -> Option<Span> {
+ None
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.text.fmt(f)
+- }
+-}
+-
+-impl fmt::Debug for Literal {
+- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+- let mut debug = fmt.debug_struct("Literal");
+- debug.field("lit", &format_args!("{}", self.text));
+- #[cfg(procmacro2_semver_exempt)]
+- debug.field("span", &self.span);
+- debug.finish()
+- }
+-}
+-
+-fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
+- let mut trees = Vec::new();
+- loop {
+- let input_no_ws = skip_whitespace(input);
+- if input_no_ws.rest.len() == 0 {
+- break;
+- }
+- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
+- input = a;
+- trees.extend(tokens);
+- continue;
+- }
+-
+- let (a, tt) = match token_tree(input_no_ws) {
+- Ok(p) => p,
+- Err(_) => break,
+- };
+- trees.push(tt);
+- input = a;
+- }
+- Ok((input, TokenStream { inner: trees }))
+-}
+-
+-#[cfg(not(span_locations))]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let (a, b) = f(skip_whitespace(input))?;
+- Ok((a, ((b, crate::Span::_new_stable(Span::call_site())))))
+-}
+-
+-#[cfg(span_locations)]
+-fn spanned<'a, T>(
+- input: Cursor<'a>,
+- f: fn(Cursor<'a>) -> PResult<'a, T>,
+-) -> PResult<'a, (T, crate::Span)> {
+- let input = skip_whitespace(input);
+- let lo = input.off;
+- let (a, b) = f(input)?;
+- let hi = a.off;
+- let span = crate::Span::_new_stable(Span { lo, hi });
+- Ok((a, (b, span)))
+-}
+-
+-fn token_tree(input: Cursor) -> PResult<TokenTree> {
+- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
+- tt.set_span(span);
+- Ok((rest, tt))
+-}
+-
+-named!(token_kind -> TokenTree, alt!(
+- map!(group, |g| TokenTree::Group(crate::Group::_new_stable(g)))
+- |
+- map!(literal, |l| TokenTree::Literal(crate::Literal::_new_stable(l))) // must be before symbol
+- |
+- map!(op, TokenTree::Punct)
+- |
+- symbol_leading_ws
+-));
+-
+-named!(group -> Group, alt!(
+- delimited!(
+- punct!("("),
+- token_stream,
+- punct!(")")
+- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+- |
+- delimited!(
+- punct!("["),
+- token_stream,
+- punct!("]")
+- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+- |
+- delimited!(
+- punct!("{"),
+- token_stream,
+- punct!("}")
+- ) => { |ts| Group::new(Delimiter::Brace, ts) }
+-));
+-
+-fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
+- symbol(skip_whitespace(input))
+-}
+-
+-fn symbol(input: Cursor) -> PResult<TokenTree> {
+- let raw = input.starts_with("r#");
+- let rest = input.advance((raw as usize) << 1);
+-
+- let (rest, sym) = symbol_not_raw(rest)?;
+-
+- if !raw {
+- let ident = crate::Ident::new(sym, crate::Span::call_site());
+- return Ok((rest, ident.into()));
+- }
+-
+- if sym == "_" {
+- return Err(LexError);
+- }
+-
+- let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
+- Ok((rest, ident.into()))
+-}
+-
+-fn symbol_not_raw(input: Cursor) -> PResult<&str> {
+- let mut chars = input.char_indices();
+-
+- match chars.next() {
+- Some((_, ch)) if is_ident_start(ch) => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut end = input.len();
+- for (i, ch) in chars {
+- if !is_ident_continue(ch) {
+- end = i;
+- break;
+- }
+- }
+-
+- Ok((input.advance(end), &input.rest[..end]))
+-}
+-
+-fn literal(input: Cursor) -> PResult<Literal> {
+- let input_no_ws = skip_whitespace(input);
+-
+- match literal_nocapture(input_no_ws) {
+- Ok((a, ())) => {
+- let start = input.len() - input_no_ws.len();
+- let len = input_no_ws.len() - a.len();
+- let end = start + len;
+- Ok((a, Literal::_new(input.rest[start..end].to_string())))
+- }
+- Err(LexError) => Err(LexError),
++ Display::fmt(&self.text, f)
+ }
+ }
+
+-named!(literal_nocapture -> (), alt!(
+- string
+- |
+- byte_string
+- |
+- byte
+- |
+- character
+- |
+- float
+- |
+- int
+-));
+-
+-named!(string -> (), alt!(
+- quoted_string
+- |
+- preceded!(
+- punct!("r"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-named!(quoted_string -> (), do_parse!(
+- punct!("\"") >>
+- cooked_string >>
+- tag!("\"") >>
+- option!(symbol_not_raw) >>
+- (())
+-));
+-
+-fn cooked_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices().peekable();
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- return Ok((input.advance(byte_offset), ()));
+- }
+- '\r' => {
+- if let Some((_, '\n')) = chars.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- '\\' => match chars.next() {
+- Some((_, 'x')) => {
+- if !backslash_x_char(&mut chars) {
+- break;
+- }
+- }
+- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+- Some((_, 'u')) => {
+- if !backslash_u(&mut chars) {
+- break;
+- }
+- }
+- Some((_, '\n')) | Some((_, '\r')) => {
+- while let Some(&(_, ch)) = chars.peek() {
+- if ch.is_whitespace() {
+- chars.next();
+- } else {
+- break;
+- }
+- }
+- }
+- _ => break,
+- },
+- _ch => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte_string -> (), alt!(
+- delimited!(
+- punct!("b\""),
+- cooked_byte_string,
+- tag!("\"")
+- ) => { |_| () }
+- |
+- preceded!(
+- punct!("br"),
+- raw_string
+- ) => { |_| () }
+-));
+-
+-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- 'outer: while let Some((offset, b)) = bytes.next() {
+- match b {
+- b'"' => {
+- return Ok((input.advance(offset), ()));
+- }
+- b'\r' => {
+- if let Some((_, b'\n')) = bytes.next() {
+- // ...
+- } else {
+- break;
+- }
+- }
+- b'\\' => match bytes.next() {
+- Some((_, b'x')) => {
+- if !backslash_x_byte(&mut bytes) {
+- break;
+- }
+- }
+- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+- Some((newline, b'\n')) | Some((newline, b'\r')) => {
+- let rest = input.advance(newline + 1);
+- for (offset, ch) in rest.char_indices() {
+- if !ch.is_whitespace() {
+- input = rest.advance(offset);
+- bytes = input.bytes().enumerate();
+- continue 'outer;
+- }
+- }
+- break;
+- }
+- _ => break,
+- },
+- b if b < 0x80 => {}
+- _ => break,
+- }
+- }
+- Err(LexError)
+-}
+-
+-fn raw_string(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let mut n = 0;
+- while let Some((byte_offset, ch)) = chars.next() {
+- match ch {
+- '"' => {
+- n = byte_offset;
+- break;
+- }
+- '#' => {}
+- _ => return Err(LexError),
+- }
+- }
+- for (byte_offset, ch) in chars {
+- match ch {
+- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
+- let rest = input.advance(byte_offset + 1 + n);
+- return Ok((rest, ()));
+- }
+- '\r' => {}
+- _ => {}
+- }
+- }
+- Err(LexError)
+-}
+-
+-named!(byte -> (), do_parse!(
+- punct!("b") >>
+- tag!("'") >>
+- cooked_byte >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_byte(input: Cursor) -> PResult<()> {
+- let mut bytes = input.bytes().enumerate();
+- let ok = match bytes.next().map(|(_, b)| b) {
+- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+- Some(b'x') => backslash_x_byte(&mut bytes),
+- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+- | Some(b'"') => true,
+- _ => false,
+- },
+- b => b.is_some(),
+- };
+- if ok {
+- match bytes.next() {
+- Some((offset, _)) => {
+- if input.chars().as_str().is_char_boundary(offset) {
+- Ok((input.advance(offset), ()))
+- } else {
+- Err(LexError)
+- }
+- }
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-named!(character -> (), do_parse!(
+- punct!("'") >>
+- cooked_char >>
+- tag!("'") >>
+- (())
+-));
+-
+-fn cooked_char(input: Cursor) -> PResult<()> {
+- let mut chars = input.char_indices();
+- let ok = match chars.next().map(|(_, ch)| ch) {
+- Some('\\') => match chars.next().map(|(_, ch)| ch) {
+- Some('x') => backslash_x_char(&mut chars),
+- Some('u') => backslash_u(&mut chars),
+- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+- true
+- }
+- _ => false,
+- },
+- ch => ch.is_some(),
+- };
+- if ok {
+- match chars.next() {
+- Some((idx, _)) => Ok((input.advance(idx), ())),
+- None => Ok((input.advance(input.len()), ())),
+- }
+- } else {
+- Err(LexError)
++impl Debug for Literal {
++ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
++ let mut debug = fmt.debug_struct("Literal");
++ debug.field("lit", &format_args!("{}", self.text));
++ debug_span_field_if_nontrivial(&mut debug, self.span);
++ debug.finish()
+ }
+ }
+-
+-macro_rules! next_ch {
+- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+- match $chars.next() {
+- Some((_, ch)) => match ch {
+- $pat $(| $rest)* => ch,
+- _ => return false,
+- },
+- None => return false
+- }
+- };
+-}
+-
+-fn backslash_x_char<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '0'..='7');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- true
+-}
+-
+-fn backslash_x_byte<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, u8)>,
+-{
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
+- true
+-}
+-
+-fn backslash_u<I>(chars: &mut I) -> bool
+-where
+- I: Iterator<Item = (usize, char)>,
+-{
+- next_ch!(chars @ '{');
+- next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
+- loop {
+- let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
+- if c == '}' {
+- return true;
+- }
+- }
+-}
+-
+-fn float(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = float_digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn float_digits(input: Cursor) -> PResult<()> {
+- let mut chars = input.chars().peekable();
+- match chars.next() {
+- Some(ch) if ch >= '0' && ch <= '9' => {}
+- _ => return Err(LexError),
+- }
+-
+- let mut len = 1;
+- let mut has_dot = false;
+- let mut has_exp = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '0'..='9' | '_' => {
+- chars.next();
+- len += 1;
+- }
+- '.' => {
+- if has_dot {
+- break;
+- }
+- chars.next();
+- if chars
+- .peek()
+- .map(|&ch| ch == '.' || is_ident_start(ch))
+- .unwrap_or(false)
+- {
+- return Err(LexError);
+- }
+- len += 1;
+- has_dot = true;
+- }
+- 'e' | 'E' => {
+- chars.next();
+- len += 1;
+- has_exp = true;
+- break;
+- }
+- _ => break,
+- }
+- }
+-
+- let rest = input.advance(len);
+- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
+- return Err(LexError);
+- }
+-
+- if has_exp {
+- let mut has_exp_value = false;
+- while let Some(&ch) = chars.peek() {
+- match ch {
+- '+' | '-' => {
+- if has_exp_value {
+- break;
+- }
+- chars.next();
+- len += 1;
+- }
+- '0'..='9' => {
+- chars.next();
+- len += 1;
+- has_exp_value = true;
+- }
+- '_' => {
+- chars.next();
+- len += 1;
+- }
+- _ => break,
+- }
+- }
+- if !has_exp_value {
+- return Err(LexError);
+- }
+- }
+-
+- Ok((input.advance(len), ()))
+-}
+-
+-fn int(input: Cursor) -> PResult<()> {
+- let (mut rest, ()) = digits(input)?;
+- if let Some(ch) = rest.chars().next() {
+- if is_ident_start(ch) {
+- rest = symbol_not_raw(rest)?.0;
+- }
+- }
+- word_break(rest)
+-}
+-
+-fn digits(mut input: Cursor) -> PResult<()> {
+- let base = if input.starts_with("0x") {
+- input = input.advance(2);
+- 16
+- } else if input.starts_with("0o") {
+- input = input.advance(2);
+- 8
+- } else if input.starts_with("0b") {
+- input = input.advance(2);
+- 2
+- } else {
+- 10
+- };
+-
+- let mut len = 0;
+- let mut empty = true;
+- for b in input.bytes() {
+- let digit = match b {
+- b'0'..=b'9' => (b - b'0') as u64,
+- b'a'..=b'f' => 10 + (b - b'a') as u64,
+- b'A'..=b'F' => 10 + (b - b'A') as u64,
+- b'_' => {
+- if empty && base == 10 {
+- return Err(LexError);
+- }
+- len += 1;
+- continue;
+- }
+- _ => break,
+- };
+- if digit >= base {
+- return Err(LexError);
+- }
+- len += 1;
+- empty = false;
+- }
+- if empty {
+- Err(LexError)
+- } else {
+- Ok((input.advance(len), ()))
+- }
+-}
+-
+-fn op(input: Cursor) -> PResult<Punct> {
+- let input = skip_whitespace(input);
+- match op_char(input) {
+- Ok((rest, '\'')) => {
+- symbol(rest)?;
+- Ok((rest, Punct::new('\'', Spacing::Joint)))
+- }
+- Ok((rest, ch)) => {
+- let kind = match op_char(rest) {
+- Ok(_) => Spacing::Joint,
+- Err(LexError) => Spacing::Alone,
+- };
+- Ok((rest, Punct::new(ch, kind)))
+- }
+- Err(LexError) => Err(LexError),
+- }
+-}
+-
+-fn op_char(input: Cursor) -> PResult<char> {
+- if input.starts_with("//") || input.starts_with("/*") {
+- // Do not accept `/` of a comment as an op.
+- return Err(LexError);
+- }
+-
+- let mut chars = input.chars();
+- let first = match chars.next() {
+- Some(ch) => ch,
+- None => {
+- return Err(LexError);
+- }
+- };
+- let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
+- if recognized.contains(first) {
+- Ok((input.advance(first.len_utf8()), first))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+- let mut trees = Vec::new();
+- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
+- trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
+- if inner {
+- trees.push(Punct::new('!', Spacing::Alone).into());
+- }
+- let mut stream = vec![
+- TokenTree::Ident(crate::Ident::new("doc", span)),
+- TokenTree::Punct(Punct::new('=', Spacing::Alone)),
+- TokenTree::Literal(crate::Literal::string(comment)),
+- ];
+- for tt in stream.iter_mut() {
+- tt.set_span(span);
+- }
+- let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
+- trees.push(crate::Group::_new_stable(group).into());
+- for tt in trees.iter_mut() {
+- tt.set_span(span);
+- }
+- Ok((rest, trees))
+-}
+-
+-named!(doc_comment_contents -> (&str, bool), alt!(
+- do_parse!(
+- punct!("//!") >>
+- s: take_until_newline_or_eof!() >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tag!("/*!")) >>
+- s: block_comment >>
+- ((s, true))
+- )
+- |
+- do_parse!(
+- punct!("///") >>
+- not!(tag!("/")) >>
+- s: take_until_newline_or_eof!() >>
+- ((s, false))
+- )
+- |
+- do_parse!(
+- option!(whitespace) >>
+- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
+- s: block_comment >>
+- ((s, false))
+- )
+-));
+diff --git a/third_party/rust/proc-macro2/src/lib.rs b/third_party/rust/proc-macro2/src/lib.rs
+--- a/third_party/rust/proc-macro2/src/lib.rs
++++ b/third_party/rust/proc-macro2/src/lib.rs
+@@ -73,37 +73,44 @@
+ //!
+ //! # Thread-Safety
+ //!
+ //! Most types in this crate are `!Sync` because the underlying compiler
+ //! types make use of thread-local memory, meaning they cannot be accessed from
+ //! a different thread.
+
+ // Proc-macro2 types in rustdoc of other crates get linked to here.
+-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.5")]
++#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.20")]
+ #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
+ #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
++#![allow(clippy::needless_doctest_main)]
+
+ #[cfg(use_proc_macro)]
+ extern crate proc_macro;
+
+ use std::cmp::Ordering;
+-use std::fmt;
++use std::fmt::{self, Debug, Display};
+ use std::hash::{Hash, Hasher};
+ use std::iter::FromIterator;
+ use std::marker;
+ use std::ops::RangeBounds;
+ #[cfg(procmacro2_semver_exempt)]
+ use std::path::PathBuf;
+ use std::rc::Rc;
+ use std::str::FromStr;
+
+-#[macro_use]
+-mod strnom;
+-mod fallback;
++mod parse;
++
++#[cfg(wrap_proc_macro)]
++mod detection;
++
++// Public for proc_macro2::fallback::force() and unforce(), but those are quite
++// a niche use case so we omit it from rustdoc.
++#[doc(hidden)]
++pub mod fallback;
+
+ #[cfg(not(wrap_proc_macro))]
+ use crate::fallback as imp;
+ #[path = "wrapper.rs"]
+ #[cfg(wrap_proc_macro)]
+ mod imp;
+
+ /// An abstract stream of tokens, or more concretely a sequence of token trees.
+@@ -223,32 +230,32 @@ impl FromIterator<TokenStream> for Token
+ TokenStream::_new(streams.into_iter().map(|i| i.inner).collect())
+ }
+ }
+
+ /// Prints the token stream as a string that is supposed to be losslessly
+ /// convertible back into the same token stream (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenStream {
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Prints token in a form convenient for debugging.
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ /// The source file of a given `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+ #[cfg(procmacro2_semver_exempt)]
+ #[derive(Clone, PartialEq, Eq)]
+@@ -286,19 +293,19 @@ impl SourceFile {
+ /// Returns `true` if this source file is a real source file, and not
+ /// generated by an external macro's expansion.
+ pub fn is_real(&self) -> bool {
+ self.inner.is_real()
+ }
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ /// A line-column pair representing the start or end of a `Span`.
+ ///
+ /// This type is semver exempt and not exposed by default.
+ #[cfg(span_locations)]
+ #[derive(Copy, Clone, Debug, PartialEq, Eq)]
+@@ -306,16 +313,32 @@ pub struct LineColumn {
+ /// The 1-indexed line in the source file on which the span starts or ends
+ /// (inclusive).
+ pub line: usize,
+ /// The 0-indexed column (in UTF-8 characters) in the source file on which
+ /// the span starts or ends (inclusive).
+ pub column: usize,
+ }
+
++#[cfg(span_locations)]
++impl Ord for LineColumn {
++ fn cmp(&self, other: &Self) -> Ordering {
++ self.line
++ .cmp(&other.line)
++ .then(self.column.cmp(&other.column))
++ }
++}
++
++#[cfg(span_locations)]
++impl PartialOrd for LineColumn {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ Some(self.cmp(other))
++ }
++}
++
+ /// A region of source code, along with macro expansion information.
+ #[derive(Copy, Clone)]
+ pub struct Span {
+ inner: imp::Span,
+ _marker: marker::PhantomData<Rc<()>>,
+ }
+
+ impl Span {
+@@ -337,38 +360,42 @@ impl Span {
+ ///
+ /// Identifiers created with this span will be resolved as if they were
+ /// written directly at the macro call location (call-site hygiene) and
+ /// other code at the macro call site will be able to refer to them as well.
+ pub fn call_site() -> Span {
+ Span::_new(imp::Span::call_site())
+ }
+
++ /// The span located at the invocation of the procedural macro, but with
++ /// local variables, labels, and `$crate` resolved at the definition site
++ /// of the macro. This is the same hygiene behavior as `macro_rules`.
++ ///
++ /// This function requires Rust 1.45 or later.
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ Span::_new(imp::Span::mixed_site())
++ }
++
+ /// A span that resolves at the macro definition site.
+ ///
+ /// This method is semver exempt and not exposed by default.
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::_new(imp::Span::def_site())
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.resolved_at(other.inner))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+- ///
+- /// This method is semver exempt and not exposed by default.
+- #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.located_at(other.inner))
+ }
+
+ /// Convert `proc_macro2::Span` to `proc_macro::Span`.
+ ///
+ /// This method is available when building with a nightly compiler, or when
+ /// building with rustc 1.29+ *without* semver exempt features.
+@@ -434,19 +461,19 @@ impl Span {
+ /// This method is semver exempt and not exposed by default.
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn eq(&self, other: &Span) -> bool {
+ self.inner.eq(&other.inner)
+ }
+ }
+
+ /// Prints a span in a form convenient for debugging.
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
+ #[derive(Clone)]
+ pub enum TokenTree {
+ /// A token stream surrounded by bracket delimiters.
+ Group(Group),
+@@ -457,35 +484,35 @@ pub enum TokenTree {
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+ }
+
+ impl TokenTree {
+ /// Returns the span of this tree, delegating to the `span` method of
+ /// the contained token or a delimited stream.
+ pub fn span(&self) -> Span {
+- match *self {
+- TokenTree::Group(ref t) => t.span(),
+- TokenTree::Ident(ref t) => t.span(),
+- TokenTree::Punct(ref t) => t.span(),
+- TokenTree::Literal(ref t) => t.span(),
++ match self {
++ TokenTree::Group(t) => t.span(),
++ TokenTree::Ident(t) => t.span(),
++ TokenTree::Punct(t) => t.span(),
++ TokenTree::Literal(t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ pub fn set_span(&mut self, span: Span) {
+- match *self {
+- TokenTree::Group(ref mut t) => t.set_span(span),
+- TokenTree::Ident(ref mut t) => t.set_span(span),
+- TokenTree::Punct(ref mut t) => t.set_span(span),
+- TokenTree::Literal(ref mut t) => t.set_span(span),
++ match self {
++ TokenTree::Group(t) => t.set_span(span),
++ TokenTree::Ident(t) => t.set_span(span),
++ TokenTree::Punct(t) => t.set_span(span),
++ TokenTree::Literal(t) => t.set_span(span),
+ }
+ }
+ }
+
+ impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+@@ -508,42 +535,42 @@ impl From<Literal> for TokenTree {
+ TokenTree::Literal(g)
+ }
+ }
+
+ /// Prints the token tree as a string that is supposed to be losslessly
+ /// convertible back into the same token tree (modulo spans), except for
+ /// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+ /// numeric literals.
+-impl fmt::Display for TokenTree {
++impl Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => t.fmt(f),
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ match self {
++ TokenTree::Group(t) => Display::fmt(t, f),
++ TokenTree::Ident(t) => Display::fmt(t, f),
++ TokenTree::Punct(t) => Display::fmt(t, f),
++ TokenTree::Literal(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+ /// Prints token tree in a form convenient for debugging.
+-impl fmt::Debug for TokenTree {
++impl Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+- match *self {
+- TokenTree::Group(ref t) => t.fmt(f),
+- TokenTree::Ident(ref t) => {
++ match self {
++ TokenTree::Group(t) => Debug::fmt(t, f),
++ TokenTree::Ident(t) => {
+ let mut debug = f.debug_struct("Ident");
+ debug.field("sym", &format_args!("{}", t));
+ imp::debug_span_field_if_nontrivial(&mut debug, t.span().inner);
+ debug.finish()
+ }
+- TokenTree::Punct(ref t) => t.fmt(f),
+- TokenTree::Literal(ref t) => t.fmt(f),
++ TokenTree::Punct(t) => Debug::fmt(t, f),
++ TokenTree::Literal(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+
+ /// A delimited token stream.
+ ///
+ /// A `Group` internally contains a `TokenStream` which is surrounded by
+ /// `Delimiter`s.
+@@ -646,25 +673,25 @@ impl Group {
+ pub fn set_span(&mut self, span: Span) {
+ self.inner.set_span(span.inner)
+ }
+ }
+
+ /// Prints the group as a string that should be losslessly convertible back
+ /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+ /// with `Delimiter::None` delimiters.
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Display::fmt(&self.inner, formatter)
++ Display::fmt(&self.inner, formatter)
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+- fmt::Debug::fmt(&self.inner, formatter)
++ Debug::fmt(&self.inner, formatter)
+ }
+ }
+
+ /// An `Punct` is an single punctuation character like `+`, `-` or `#`.
+ ///
+ /// Multicharacter operators like `+=` are represented as two instances of
+ /// `Punct` with different forms of `Spacing` returned.
+ #[derive(Clone)]
+@@ -725,23 +752,23 @@ impl Punct {
+ /// Configure the span for this punctuation character.
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+ }
+
+ /// Prints the punctuation character as a string that should be losslessly
+ /// convertible back into the same character.
+-impl fmt::Display for Punct {
++impl Display for Punct {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.op.fmt(f)
++ Display::fmt(&self.op, f)
+ }
+ }
+
+-impl fmt::Debug for Punct {
++impl Debug for Punct {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let mut debug = fmt.debug_struct("Punct");
+ debug.field("op", &self.op);
+ debug.field("spacing", &self.spacing);
+ imp::debug_span_field_if_nontrivial(&mut debug, self.span.inner);
+ debug.finish()
+ }
+ }
+@@ -915,25 +942,25 @@ impl Ord for Ident {
+ impl Hash for Ident {
+ fn hash<H: Hasher>(&self, hasher: &mut H) {
+ self.to_string().hash(hasher)
+ }
+ }
+
+ /// Prints the identifier as a string that should be losslessly convertible back
+ /// into the same identifier.
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ /// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
+ /// byte character (`b'a'`), an integer or floating point number with or without
+ /// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+ ///
+ /// Boolean literals like `true` and `false` do not belong here, they are
+@@ -1135,36 +1162,36 @@ impl Literal {
+ /// nightly compiler, this method will always return `None`.
+ ///
+ /// [`proc_macro::Literal::subspan`]: https://doc.rust-lang.org/proc_macro/struct.Literal.html#method.subspan
+ pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
+ self.inner.subspan(range).map(Span::_new)
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Display::fmt(&self.inner, f)
+ }
+ }
+
+ /// Public implementation details for the `TokenStream` type, such as iterators.
+ pub mod token_stream {
+- use std::fmt;
++ use crate::{imp, TokenTree};
++ use std::fmt::{self, Debug};
+ use std::marker;
+ use std::rc::Rc;
+
+ pub use crate::TokenStream;
+- use crate::{imp, TokenTree};
+
+ /// An iterator over `TokenStream`'s `TokenTree`s.
+ ///
+ /// The iteration is "shallow", e.g. the iterator doesn't recurse into
+ /// delimited groups, and returns whole groups as token trees.
+ #[derive(Clone)]
+ pub struct IntoIter {
+ inner: imp::TokenTreeIter,
+@@ -1174,19 +1201,19 @@ pub mod token_stream {
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.inner.next()
+ }
+ }
+
+- impl fmt::Debug for IntoIter {
++ impl Debug for IntoIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+- self.inner.fmt(f)
++ Debug::fmt(&self.inner, f)
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+diff --git a/third_party/rust/proc-macro2/src/parse.rs b/third_party/rust/proc-macro2/src/parse.rs
+new file mode 100644
+--- /dev/null
++++ b/third_party/rust/proc-macro2/src/parse.rs
+@@ -0,0 +1,791 @@
++use crate::fallback::{
++ is_ident_continue, is_ident_start, Group, LexError, Literal, Span, TokenStream,
++};
++use crate::{Delimiter, Punct, Spacing, TokenTree};
++use std::str::{Bytes, CharIndices, Chars};
++use unicode_xid::UnicodeXID;
++
++#[derive(Copy, Clone, Eq, PartialEq)]
++pub(crate) struct Cursor<'a> {
++ pub rest: &'a str,
++ #[cfg(span_locations)]
++ pub off: u32,
++}
++
++impl<'a> Cursor<'a> {
++ fn advance(&self, bytes: usize) -> Cursor<'a> {
++ let (_front, rest) = self.rest.split_at(bytes);
++ Cursor {
++ rest,
++ #[cfg(span_locations)]
++ off: self.off + _front.chars().count() as u32,
++ }
++ }
++
++ fn starts_with(&self, s: &str) -> bool {
++ self.rest.starts_with(s)
++ }
++
++ pub(crate) fn is_empty(&self) -> bool {
++ self.rest.is_empty()
++ }
++
++ fn len(&self) -> usize {
++ self.rest.len()
++ }
++
++ fn as_bytes(&self) -> &'a [u8] {
++ self.rest.as_bytes()
++ }
++
++ fn bytes(&self) -> Bytes<'a> {
++ self.rest.bytes()
++ }
++
++ fn chars(&self) -> Chars<'a> {
++ self.rest.chars()
++ }
++
++ fn char_indices(&self) -> CharIndices<'a> {
++ self.rest.char_indices()
++ }
++
++ fn parse(&self, tag: &str) -> Result<Cursor<'a>, LexError> {
++ if self.starts_with(tag) {
++ Ok(self.advance(tag.len()))
++ } else {
++ Err(LexError)
++ }
++ }
++}
++
++type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
++
++fn skip_whitespace(input: Cursor) -> Cursor {
++ let mut s = input;
++
++ while !s.is_empty() {
++ let byte = s.as_bytes()[0];
++ if byte == b'/' {
++ if s.starts_with("//")
++ && (!s.starts_with("///") || s.starts_with("////"))
++ && !s.starts_with("//!")
++ {
++ let (cursor, _) = take_until_newline_or_eof(s);
++ s = cursor;
++ continue;
++ } else if s.starts_with("/**/") {
++ s = s.advance(4);
++ continue;
++ } else if s.starts_with("/*")
++ && (!s.starts_with("/**") || s.starts_with("/***"))
++ && !s.starts_with("/*!")
++ {
++ match block_comment(s) {
++ Ok((rest, _)) => {
++ s = rest;
++ continue;
++ }
++ Err(LexError) => return s,
++ }
++ }
++ }
++ match byte {
++ b' ' | 0x09..=0x0d => {
++ s = s.advance(1);
++ continue;
++ }
++ b if b <= 0x7f => {}
++ _ => {
++ let ch = s.chars().next().unwrap();
++ if is_whitespace(ch) {
++ s = s.advance(ch.len_utf8());
++ continue;
++ }
++ }
++ }
++ return s;
++ }
++ s
++}
++
++fn block_comment(input: Cursor) -> PResult<&str> {
++ if !input.starts_with("/*") {
++ return Err(LexError);
++ }
++
++ let mut depth = 0;
++ let bytes = input.as_bytes();
++ let mut i = 0;
++ let upper = bytes.len() - 1;
++
++ while i < upper {
++ if bytes[i] == b'/' && bytes[i + 1] == b'*' {
++ depth += 1;
++ i += 1; // eat '*'
++ } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
++ depth -= 1;
++ if depth == 0 {
++ return Ok((input.advance(i + 2), &input.rest[..i + 2]));
++ }
++ i += 1; // eat '/'
++ }
++ i += 1;
++ }
++
++ Err(LexError)
++}
++
++fn is_whitespace(ch: char) -> bool {
++ // Rust treats left-to-right mark and right-to-left mark as whitespace
++ ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
++}
++
++fn word_break(input: Cursor) -> Result<Cursor, LexError> {
++ match input.chars().next() {
++ Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
++ Some(_) | None => Ok(input),
++ }
++}
++
++pub(crate) fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
++ let mut trees = Vec::new();
++ let mut stack = Vec::new();
++
++ loop {
++ input = skip_whitespace(input);
++
++ if let Ok((rest, tt)) = doc_comment(input) {
++ trees.extend(tt);
++ input = rest;
++ continue;
++ }
++
++ #[cfg(span_locations)]
++ let lo = input.off;
++
++ let first = match input.bytes().next() {
++ Some(first) => first,
++ None => break,
++ };
++
++ if let Some(open_delimiter) = match first {
++ b'(' => Some(Delimiter::Parenthesis),
++ b'[' => Some(Delimiter::Bracket),
++ b'{' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = (open_delimiter, trees);
++ #[cfg(span_locations)]
++ let frame = (lo, frame);
++ stack.push(frame);
++ trees = Vec::new();
++ } else if let Some(close_delimiter) = match first {
++ b')' => Some(Delimiter::Parenthesis),
++ b']' => Some(Delimiter::Bracket),
++ b'}' => Some(Delimiter::Brace),
++ _ => None,
++ } {
++ input = input.advance(1);
++ let frame = stack.pop().ok_or(LexError)?;
++ #[cfg(span_locations)]
++ let (lo, frame) = frame;
++ let (open_delimiter, outer) = frame;
++ if open_delimiter != close_delimiter {
++ return Err(LexError);
++ }
++ let mut g = Group::new(open_delimiter, TokenStream { inner: trees });
++ g.set_span(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: input.off,
++ });
++ trees = outer;
++ trees.push(TokenTree::Group(crate::Group::_new_stable(g)));
++ } else {
++ let (rest, mut tt) = leaf_token(input)?;
++ tt.set_span(crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ }));
++ trees.push(tt);
++ input = rest;
++ }
++ }
++
++ if stack.is_empty() {
++ Ok((input, TokenStream { inner: trees }))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn leaf_token(input: Cursor) -> PResult<TokenTree> {
++ if let Ok((input, l)) = literal(input) {
++ // must be parsed before ident
++ Ok((input, TokenTree::Literal(crate::Literal::_new_stable(l))))
++ } else if let Ok((input, p)) = op(input) {
++ Ok((input, TokenTree::Punct(p)))
++ } else if let Ok((input, i)) = ident(input) {
++ Ok((input, TokenTree::Ident(i)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn ident(input: Cursor) -> PResult<crate::Ident> {
++ let raw = input.starts_with("r#");
++ let rest = input.advance((raw as usize) << 1);
++
++ let (rest, sym) = ident_not_raw(rest)?;
++
++ if !raw {
++ let ident = crate::Ident::new(sym, crate::Span::call_site());
++ return Ok((rest, ident));
++ }
++
++ if sym == "_" {
++ return Err(LexError);
++ }
++
++ let ident = crate::Ident::_new_raw(sym, crate::Span::call_site());
++ Ok((rest, ident))
++}
++
++fn ident_not_raw(input: Cursor) -> PResult<&str> {
++ let mut chars = input.char_indices();
++
++ match chars.next() {
++ Some((_, ch)) if is_ident_start(ch) => {}
++ _ => return Err(LexError),
++ }
++
++ let mut end = input.len();
++ for (i, ch) in chars {
++ if !is_ident_continue(ch) {
++ end = i;
++ break;
++ }
++ }
++
++ Ok((input.advance(end), &input.rest[..end]))
++}
++
++fn literal(input: Cursor) -> PResult<Literal> {
++ match literal_nocapture(input) {
++ Ok(a) => {
++ let end = input.len() - a.len();
++ Ok((a, Literal::_new(input.rest[..end].to_string())))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn literal_nocapture(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(ok) = string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte_string(input) {
++ Ok(ok)
++ } else if let Ok(ok) = byte(input) {
++ Ok(ok)
++ } else if let Ok(ok) = character(input) {
++ Ok(ok)
++ } else if let Ok(ok) = float(input) {
++ Ok(ok)
++ } else if let Ok(ok) = int(input) {
++ Ok(ok)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn literal_suffix(input: Cursor) -> Cursor {
++ match ident_not_raw(input) {
++ Ok((input, _)) => input,
++ Err(LexError) => input,
++ }
++}
++
++fn string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("\"") {
++ cooked_string(input)
++ } else if let Ok(input) = input.parse("r") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices().peekable();
++
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ let input = input.advance(i + 1);
++ return Ok(literal_suffix(input));
++ }
++ '\r' => {
++ if let Some((_, '\n')) = chars.next() {
++ // ...
++ } else {
++ break;
++ }
++ }
++ '\\' => match chars.next() {
++ Some((_, 'x')) => {
++ if !backslash_x_char(&mut chars) {
++ break;
++ }
++ }
++ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
++ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
++ Some((_, 'u')) => {
++ if !backslash_u(&mut chars) {
++ break;
++ }
++ }
++ Some((_, '\n')) | Some((_, '\r')) => {
++ while let Some(&(_, ch)) = chars.peek() {
++ if ch.is_whitespace() {
++ chars.next();
++ } else {
++ break;
++ }
++ }
++ }
++ _ => break,
++ },
++ _ch => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte_string(input: Cursor) -> Result<Cursor, LexError> {
++ if let Ok(input) = input.parse("b\"") {
++ cooked_byte_string(input)
++ } else if let Ok(input) = input.parse("br") {
++ raw_string(input)
++ } else {
++ Err(LexError)
++ }
++}
++
++fn cooked_byte_string(mut input: Cursor) -> Result<Cursor, LexError> {
++ let mut bytes = input.bytes().enumerate();
++ 'outer: while let Some((offset, b)) = bytes.next() {
++ match b {
++ b'"' => {
++ let input = input.advance(offset + 1);
++ return Ok(literal_suffix(input));
++ }
++ b'\r' => {
++ if let Some((_, b'\n')) = bytes.next() {
++ // ...
++ } else {
++ break;
++ }
++ }
++ b'\\' => match bytes.next() {
++ Some((_, b'x')) => {
++ if !backslash_x_byte(&mut bytes) {
++ break;
++ }
++ }
++ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
++ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
++ Some((newline, b'\n')) | Some((newline, b'\r')) => {
++ let rest = input.advance(newline + 1);
++ for (offset, ch) in rest.char_indices() {
++ if !ch.is_whitespace() {
++ input = rest.advance(offset);
++ bytes = input.bytes().enumerate();
++ continue 'outer;
++ }
++ }
++ break;
++ }
++ _ => break,
++ },
++ b if b < 0x80 => {}
++ _ => break,
++ }
++ }
++ Err(LexError)
++}
++
++fn raw_string(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.char_indices();
++ let mut n = 0;
++ while let Some((i, ch)) = chars.next() {
++ match ch {
++ '"' => {
++ n = i;
++ break;
++ }
++ '#' => {}
++ _ => return Err(LexError),
++ }
++ }
++ for (i, ch) in chars {
++ match ch {
++ '"' if input.rest[i + 1..].starts_with(&input.rest[..n]) => {
++ let rest = input.advance(i + 1 + n);
++ return Ok(literal_suffix(rest));
++ }
++ '\r' => {}
++ _ => {}
++ }
++ }
++ Err(LexError)
++}
++
++fn byte(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("b'")?;
++ let mut bytes = input.bytes().enumerate();
++ let ok = match bytes.next().map(|(_, b)| b) {
++ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
++ Some(b'x') => backslash_x_byte(&mut bytes),
++ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
++ | Some(b'"') => true,
++ _ => false,
++ },
++ b => b.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (offset, _) = bytes.next().ok_or(LexError)?;
++ if !input.chars().as_str().is_char_boundary(offset) {
++ return Err(LexError);
++ }
++ let input = input.advance(offset).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++fn character(input: Cursor) -> Result<Cursor, LexError> {
++ let input = input.parse("'")?;
++ let mut chars = input.char_indices();
++ let ok = match chars.next().map(|(_, ch)| ch) {
++ Some('\\') => match chars.next().map(|(_, ch)| ch) {
++ Some('x') => backslash_x_char(&mut chars),
++ Some('u') => backslash_u(&mut chars),
++ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
++ true
++ }
++ _ => false,
++ },
++ ch => ch.is_some(),
++ };
++ if !ok {
++ return Err(LexError);
++ }
++ let (idx, _) = chars.next().ok_or(LexError)?;
++ let input = input.advance(idx).parse("'")?;
++ Ok(literal_suffix(input))
++}
++
++macro_rules! next_ch {
++ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
++ match $chars.next() {
++ Some((_, ch)) => match ch {
++ $pat $(| $rest)* => ch,
++ _ => return false,
++ },
++ None => return false,
++ }
++ };
++}
++
++fn backslash_x_char<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '0'..='7');
++ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++ true
++}
++
++fn backslash_x_byte<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, u8)>,
++{
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ next_ch!(chars @ b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F');
++ true
++}
++
++fn backslash_u<I>(chars: &mut I) -> bool
++where
++ I: Iterator<Item = (usize, char)>,
++{
++ next_ch!(chars @ '{');
++ next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F');
++ loop {
++ let c = next_ch!(chars @ '0'..='9' | 'a'..='f' | 'A'..='F' | '_' | '}');
++ if c == '}' {
++ return true;
++ }
++ }
++}
++
++fn float(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = float_digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn float_digits(input: Cursor) -> Result<Cursor, LexError> {
++ let mut chars = input.chars().peekable();
++ match chars.next() {
++ Some(ch) if ch >= '0' && ch <= '9' => {}
++ _ => return Err(LexError),
++ }
++
++ let mut len = 1;
++ let mut has_dot = false;
++ let mut has_exp = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '0'..='9' | '_' => {
++ chars.next();
++ len += 1;
++ }
++ '.' => {
++ if has_dot {
++ break;
++ }
++ chars.next();
++ if chars
++ .peek()
++ .map(|&ch| ch == '.' || is_ident_start(ch))
++ .unwrap_or(false)
++ {
++ return Err(LexError);
++ }
++ len += 1;
++ has_dot = true;
++ }
++ 'e' | 'E' => {
++ chars.next();
++ len += 1;
++ has_exp = true;
++ break;
++ }
++ _ => break,
++ }
++ }
++
++ let rest = input.advance(len);
++ if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
++ return Err(LexError);
++ }
++
++ if has_exp {
++ let mut has_exp_value = false;
++ while let Some(&ch) = chars.peek() {
++ match ch {
++ '+' | '-' => {
++ if has_exp_value {
++ break;
++ }
++ chars.next();
++ len += 1;
++ }
++ '0'..='9' => {
++ chars.next();
++ len += 1;
++ has_exp_value = true;
++ }
++ '_' => {
++ chars.next();
++ len += 1;
++ }
++ _ => break,
++ }
++ }
++ if !has_exp_value {
++ return Err(LexError);
++ }
++ }
++
++ Ok(input.advance(len))
++}
++
++fn int(input: Cursor) -> Result<Cursor, LexError> {
++ let mut rest = digits(input)?;
++ if let Some(ch) = rest.chars().next() {
++ if is_ident_start(ch) {
++ rest = ident_not_raw(rest)?.0;
++ }
++ }
++ word_break(rest)
++}
++
++fn digits(mut input: Cursor) -> Result<Cursor, LexError> {
++ let base = if input.starts_with("0x") {
++ input = input.advance(2);
++ 16
++ } else if input.starts_with("0o") {
++ input = input.advance(2);
++ 8
++ } else if input.starts_with("0b") {
++ input = input.advance(2);
++ 2
++ } else {
++ 10
++ };
++
++ let mut len = 0;
++ let mut empty = true;
++ for b in input.bytes() {
++ let digit = match b {
++ b'0'..=b'9' => (b - b'0') as u64,
++ b'a'..=b'f' => 10 + (b - b'a') as u64,
++ b'A'..=b'F' => 10 + (b - b'A') as u64,
++ b'_' => {
++ if empty && base == 10 {
++ return Err(LexError);
++ }
++ len += 1;
++ continue;
++ }
++ _ => break,
++ };
++ if digit >= base {
++ return Err(LexError);
++ }
++ len += 1;
++ empty = false;
++ }
++ if empty {
++ Err(LexError)
++ } else {
++ Ok(input.advance(len))
++ }
++}
++
++fn op(input: Cursor) -> PResult<Punct> {
++ match op_char(input) {
++ Ok((rest, '\'')) => {
++ ident(rest)?;
++ Ok((rest, Punct::new('\'', Spacing::Joint)))
++ }
++ Ok((rest, ch)) => {
++ let kind = match op_char(rest) {
++ Ok(_) => Spacing::Joint,
++ Err(LexError) => Spacing::Alone,
++ };
++ Ok((rest, Punct::new(ch, kind)))
++ }
++ Err(LexError) => Err(LexError),
++ }
++}
++
++fn op_char(input: Cursor) -> PResult<char> {
++ if input.starts_with("//") || input.starts_with("/*") {
++ // Do not accept `/` of a comment as an op.
++ return Err(LexError);
++ }
++
++ let mut chars = input.chars();
++ let first = match chars.next() {
++ Some(ch) => ch,
++ None => {
++ return Err(LexError);
++ }
++ };
++ let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
++ if recognized.contains(first) {
++ Ok((input.advance(first.len_utf8()), first))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
++ #[cfg(span_locations)]
++ let lo = input.off;
++ let (rest, (comment, inner)) = doc_comment_contents(input)?;
++ let span = crate::Span::_new_stable(Span {
++ #[cfg(span_locations)]
++ lo,
++ #[cfg(span_locations)]
++ hi: rest.off,
++ });
++
++ let mut scan_for_bare_cr = comment;
++ while let Some(cr) = scan_for_bare_cr.find('\r') {
++ let rest = &scan_for_bare_cr[cr + 1..];
++ if !rest.starts_with('\n') {
++ return Err(LexError);
++ }
++ scan_for_bare_cr = rest;
++ }
++
++ let mut trees = Vec::new();
++ trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
++ if inner {
++ trees.push(Punct::new('!', Spacing::Alone).into());
++ }
++ let mut stream = vec![
++ TokenTree::Ident(crate::Ident::new("doc", span)),
++ TokenTree::Punct(Punct::new('=', Spacing::Alone)),
++ TokenTree::Literal(crate::Literal::string(comment)),
++ ];
++ for tt in stream.iter_mut() {
++ tt.set_span(span);
++ }
++ let group = Group::new(Delimiter::Bracket, stream.into_iter().collect());
++ trees.push(crate::Group::_new_stable(group).into());
++ for tt in trees.iter_mut() {
++ tt.set_span(span);
++ }
++ Ok((rest, trees))
++}
++
++fn doc_comment_contents(input: Cursor) -> PResult<(&str, bool)> {
++ if input.starts_with("//!") {
++ let input = input.advance(3);
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, true)))
++ } else if input.starts_with("/*!") {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], true)))
++ } else if input.starts_with("///") {
++ let input = input.advance(3);
++ if input.starts_with("/") {
++ return Err(LexError);
++ }
++ let (input, s) = take_until_newline_or_eof(input);
++ Ok((input, (s, false)))
++ } else if input.starts_with("/**") && !input.rest[3..].starts_with('*') {
++ let (input, s) = block_comment(input)?;
++ Ok((input, (&s[3..s.len() - 2], false)))
++ } else {
++ Err(LexError)
++ }
++}
++
++fn take_until_newline_or_eof(input: Cursor) -> (Cursor, &str) {
++ let chars = input.char_indices();
++
++ for (i, ch) in chars {
++ if ch == '\n' {
++ return (input.advance(i), &input.rest[..i]);
++ } else if ch == '\r' && input.rest[i + 1..].starts_with('\n') {
++ return (input.advance(i + 1), &input.rest[..i]);
++ }
++ }
++
++ (input.advance(input.len()), input.rest)
++}
+diff --git a/third_party/rust/proc-macro2/src/strnom.rs b/third_party/rust/proc-macro2/src/strnom.rs
+deleted file mode 100644
+--- a/third_party/rust/proc-macro2/src/strnom.rs
++++ /dev/null
+@@ -1,391 +0,0 @@
+-//! Adapted from [`nom`](https://github.com/Geal/nom).
+-
+-use crate::fallback::LexError;
+-use std::str::{Bytes, CharIndices, Chars};
+-use unicode_xid::UnicodeXID;
+-
+-#[derive(Copy, Clone, Eq, PartialEq)]
+-pub struct Cursor<'a> {
+- pub rest: &'a str,
+- #[cfg(span_locations)]
+- pub off: u32,
+-}
+-
+-impl<'a> Cursor<'a> {
+- #[cfg(not(span_locations))]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- }
+- }
+- #[cfg(span_locations)]
+- pub fn advance(&self, amt: usize) -> Cursor<'a> {
+- Cursor {
+- rest: &self.rest[amt..],
+- off: self.off + (amt as u32),
+- }
+- }
+-
+- pub fn find(&self, p: char) -> Option<usize> {
+- self.rest.find(p)
+- }
+-
+- pub fn starts_with(&self, s: &str) -> bool {
+- self.rest.starts_with(s)
+- }
+-
+- pub fn is_empty(&self) -> bool {
+- self.rest.is_empty()
+- }
+-
+- pub fn len(&self) -> usize {
+- self.rest.len()
+- }
+-
+- pub fn as_bytes(&self) -> &'a [u8] {
+- self.rest.as_bytes()
+- }
+-
+- pub fn bytes(&self) -> Bytes<'a> {
+- self.rest.bytes()
+- }
+-
+- pub fn chars(&self) -> Chars<'a> {
+- self.rest.chars()
+- }
+-
+- pub fn char_indices(&self) -> CharIndices<'a> {
+- self.rest.char_indices()
+- }
+-}
+-
+-pub type PResult<'a, O> = Result<(Cursor<'a>, O), LexError>;
+-
+-pub fn whitespace(input: Cursor) -> PResult<()> {
+- if input.is_empty() {
+- return Err(LexError);
+- }
+-
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- while i < bytes.len() {
+- let s = input.advance(i);
+- if bytes[i] == b'/' {
+- if s.starts_with("//")
+- && (!s.starts_with("///") || s.starts_with("////"))
+- && !s.starts_with("//!")
+- {
+- if let Some(len) = s.find('\n') {
+- i += len + 1;
+- continue;
+- }
+- break;
+- } else if s.starts_with("/**/") {
+- i += 4;
+- continue;
+- } else if s.starts_with("/*")
+- && (!s.starts_with("/**") || s.starts_with("/***"))
+- && !s.starts_with("/*!")
+- {
+- let (_, com) = block_comment(s)?;
+- i += com.len();
+- continue;
+- }
+- }
+- match bytes[i] {
+- b' ' | 0x09..=0x0d => {
+- i += 1;
+- continue;
+- }
+- b if b <= 0x7f => {}
+- _ => {
+- let ch = s.chars().next().unwrap();
+- if is_whitespace(ch) {
+- i += ch.len_utf8();
+- continue;
+- }
+- }
+- }
+- return if i > 0 { Ok((s, ())) } else { Err(LexError) };
+- }
+- Ok((input.advance(input.len()), ()))
+-}
+-
+-pub fn block_comment(input: Cursor) -> PResult<&str> {
+- if !input.starts_with("/*") {
+- return Err(LexError);
+- }
+-
+- let mut depth = 0;
+- let bytes = input.as_bytes();
+- let mut i = 0;
+- let upper = bytes.len() - 1;
+- while i < upper {
+- if bytes[i] == b'/' && bytes[i + 1] == b'*' {
+- depth += 1;
+- i += 1; // eat '*'
+- } else if bytes[i] == b'*' && bytes[i + 1] == b'/' {
+- depth -= 1;
+- if depth == 0 {
+- return Ok((input.advance(i + 2), &input.rest[..i + 2]));
+- }
+- i += 1; // eat '/'
+- }
+- i += 1;
+- }
+- Err(LexError)
+-}
+-
+-pub fn skip_whitespace(input: Cursor) -> Cursor {
+- match whitespace(input) {
+- Ok((rest, _)) => rest,
+- Err(LexError) => input,
+- }
+-}
+-
+-fn is_whitespace(ch: char) -> bool {
+- // Rust treats left-to-right mark and right-to-left mark as whitespace
+- ch.is_whitespace() || ch == '\u{200e}' || ch == '\u{200f}'
+-}
+-
+-pub fn word_break(input: Cursor) -> PResult<()> {
+- match input.chars().next() {
+- Some(ch) if UnicodeXID::is_xid_continue(ch) => Err(LexError),
+- Some(_) | None => Ok((input, ())),
+- }
+-}
+-
+-macro_rules! named {
+- ($name:ident -> $o:ty, $submac:ident!( $($args:tt)* )) => {
+- fn $name<'a>(i: Cursor<'a>) -> $crate::strnom::PResult<'a, $o> {
+- $submac!(i, $($args)*)
+- }
+- };
+-}
+-
+-macro_rules! alt {
+- ($i:expr, $e:ident | $($rest:tt)*) => {
+- alt!($i, call!($e) | $($rest)*)
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*) | $($rest:tt)*) => {
+- match $subrule!($i, $($args)*) {
+- res @ Ok(_) => res,
+- _ => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr } | $($rest:tt)+) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => alt!($i, $($rest)*)
+- }
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr } | $($rest:tt)*) => {
+- alt!($i, call!($e) => { $gen } | $($rest)*)
+- };
+-
+- ($i:expr, $e:ident => { $gen:expr }) => {
+- alt!($i, call!($e) => { $gen })
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)* ) => { $gen:expr }) => {
+- match $subrule!($i, $($args)*) {
+- Ok((i, o)) => Ok((i, $gen(o))),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $e:ident) => {
+- alt!($i, call!($e))
+- };
+-
+- ($i:expr, $subrule:ident!( $($args:tt)*)) => {
+- $subrule!($i, $($args)*)
+- };
+-}
+-
+-macro_rules! do_parse {
+- ($i:expr, ( $($rest:expr),* )) => {
+- Ok(($i, ( $($rest),* )))
+- };
+-
+- ($i:expr, $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, _)) => do_parse!(i, $($rest)*),
+- }
+- };
+-
+- ($i:expr, $field:ident : $e:ident >> $($rest:tt)*) => {
+- do_parse!($i, $field: call!($e) >> $($rest)*)
+- };
+-
+- ($i:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => {
+- let $field = o;
+- do_parse!(i, $($rest)*)
+- },
+- }
+- };
+-}
+-
+-macro_rules! peek {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, o)) => Ok(($i, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-}
+-
+-macro_rules! call {
+- ($i:expr, $fun:expr $(, $args:expr)*) => {
+- $fun($i $(, $args)*)
+- };
+-}
+-
+-macro_rules! option {
+- ($i:expr, $f:expr) => {
+- match $f($i) {
+- Ok((i, o)) => Ok((i, Some(o))),
+- Err(LexError) => Ok(($i, None)),
+- }
+- };
+-}
+-
+-macro_rules! take_until_newline_or_eof {
+- ($i:expr,) => {{
+- if $i.len() == 0 {
+- Ok(($i, ""))
+- } else {
+- match $i.find('\n') {
+- Some(i) => Ok(($i.advance(i), &$i.rest[..i])),
+- None => Ok(($i.advance($i.len()), &$i.rest[..$i.len()])),
+- }
+- }
+- }};
+-}
+-
+-macro_rules! tuple {
+- ($i:expr, $($rest:tt)*) => {
+- tuple_parser!($i, (), $($rest)*)
+- };
+-}
+-
+-/// Do not use directly. Use `tuple!`.
+-macro_rules! tuple_parser {
+- ($i:expr, ($($parsed:tt),*), $e:ident, $($rest:tt)*) => {
+- tuple_parser!($i, ($($parsed),*), call!($e), $($rest)*)
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, (o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt)*), $submac:ident!( $($args:tt)* ), $($rest:tt)*) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => tuple_parser!(i, ($($parsed)* , o), $($rest)*),
+- }
+- };
+-
+- ($i:expr, ($($parsed:tt),*), $e:ident) => {
+- tuple_parser!($i, ($($parsed),*), call!($e))
+- };
+-
+- ($i:expr, (), $submac:ident!( $($args:tt)* )) => {
+- $submac!($i, $($args)*)
+- };
+-
+- ($i:expr, ($($parsed:expr),*), $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, ($($parsed),*, o)))
+- }
+- };
+-
+- ($i:expr, ($($parsed:expr),*)) => {
+- Ok(($i, ($($parsed),*)))
+- };
+-}
+-
+-macro_rules! not {
+- ($i:expr, $submac:ident!( $($args:tt)* )) => {
+- match $submac!($i, $($args)*) {
+- Ok((_, _)) => Err(LexError),
+- Err(LexError) => Ok(($i, ())),
+- }
+- };
+-}
+-
+-macro_rules! tag {
+- ($i:expr, $tag:expr) => {
+- if $i.starts_with($tag) {
+- Ok(($i.advance($tag.len()), &$i.rest[..$tag.len()]))
+- } else {
+- Err(LexError)
+- }
+- };
+-}
+-
+-macro_rules! punct {
+- ($i:expr, $punct:expr) => {
+- $crate::strnom::punct($i, $punct)
+- };
+-}
+-
+-/// Do not use directly. Use `punct!`.
+-pub fn punct<'a>(input: Cursor<'a>, token: &'static str) -> PResult<'a, &'a str> {
+- let input = skip_whitespace(input);
+- if input.starts_with(token) {
+- Ok((input.advance(token.len()), token))
+- } else {
+- Err(LexError)
+- }
+-}
+-
+-macro_rules! preceded {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $submac2:ident!( $($args2:tt)* )) => {
+- match tuple!($i, $submac!($($args)*), $submac2!($($args2)*)) {
+- Ok((remaining, (_, o))) => Ok((remaining, o)),
+- Err(LexError) => Err(LexError),
+- }
+- };
+-
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- preceded!($i, $submac!($($args)*), call!($g))
+- };
+-}
+-
+-macro_rules! delimited {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $($rest:tt)+) => {
+- match tuple_parser!($i, (), $submac!($($args)*), $($rest)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i1, (_, o, _))) => Ok((i1, o))
+- }
+- };
+-}
+-
+-macro_rules! map {
+- ($i:expr, $submac:ident!( $($args:tt)* ), $g:expr) => {
+- match $submac!($i, $($args)*) {
+- Err(LexError) => Err(LexError),
+- Ok((i, o)) => Ok((i, call!(o, $g)))
+- }
+- };
+-
+- ($i:expr, $f:expr, $g:expr) => {
+- map!($i, call!($f), $g)
+- };
+-}
+diff --git a/third_party/rust/proc-macro2/src/wrapper.rs b/third_party/rust/proc-macro2/src/wrapper.rs
+--- a/third_party/rust/proc-macro2/src/wrapper.rs
++++ b/third_party/rust/proc-macro2/src/wrapper.rs
+@@ -1,96 +1,39 @@
+-use std::fmt;
+-use std::iter;
++use crate::detection::inside_proc_macro;
++use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
++use std::fmt::{self, Debug, Display};
++use std::iter::FromIterator;
+ use std::ops::RangeBounds;
+-use std::panic::{self, PanicInfo};
++use std::panic;
+ #[cfg(super_unstable)]
+ use std::path::PathBuf;
+ use std::str::FromStr;
+
+-use crate::{fallback, Delimiter, Punct, Spacing, TokenTree};
+-
+ #[derive(Clone)]
+-pub enum TokenStream {
++pub(crate) enum TokenStream {
+ Compiler(DeferredTokenStream),
+ Fallback(fallback::TokenStream),
+ }
+
+ // Work around https://github.com/rust-lang/rust/issues/65080.
+ // In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
+ // we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+ // late as possible to batch together consecutive uses of the Extend impl.
+ #[derive(Clone)]
+-pub struct DeferredTokenStream {
++pub(crate) struct DeferredTokenStream {
+ stream: proc_macro::TokenStream,
+ extra: Vec<proc_macro::TokenTree>,
+ }
+
+-pub enum LexError {
++pub(crate) enum LexError {
+ Compiler(proc_macro::LexError),
+ Fallback(fallback::LexError),
+ }
+
+-fn nightly_works() -> bool {
+- use std::sync::atomic::*;
+- use std::sync::Once;
+-
+- static WORKS: AtomicUsize = AtomicUsize::new(0);
+- static INIT: Once = Once::new();
+-
+- match WORKS.load(Ordering::SeqCst) {
+- 1 => return false,
+- 2 => return true,
+- _ => {}
+- }
+-
+- // Swap in a null panic hook to avoid printing "thread panicked" to stderr,
+- // then use catch_unwind to determine whether the compiler's proc_macro is
+- // working. When proc-macro2 is used from outside of a procedural macro all
+- // of the proc_macro crate's APIs currently panic.
+- //
+- // The Once is to prevent the possibility of this ordering:
+- //
+- // thread 1 calls take_hook, gets the user's original hook
+- // thread 1 calls set_hook with the null hook
+- // thread 2 calls take_hook, thinks null hook is the original hook
+- // thread 2 calls set_hook with the null hook
+- // thread 1 calls set_hook with the actual original hook
+- // thread 2 calls set_hook with what it thinks is the original hook
+- //
+- // in which the user's hook has been lost.
+- //
+- // There is still a race condition where a panic in a different thread can
+- // happen during the interval that the user's original panic hook is
+- // unregistered such that their hook is incorrectly not called. This is
+- // sufficiently unlikely and less bad than printing panic messages to stderr
+- // on correct use of this crate. Maybe there is a libstd feature request
+- // here. For now, if a user needs to guarantee that this failure mode does
+- // not occur, they need to call e.g. `proc_macro2::Span::call_site()` from
+- // the main thread before launching any other threads.
+- INIT.call_once(|| {
+- type PanicHook = dyn Fn(&PanicInfo) + Sync + Send + 'static;
+-
+- let null_hook: Box<PanicHook> = Box::new(|_panic_info| { /* ignore */ });
+- let sanity_check = &*null_hook as *const PanicHook;
+- let original_hook = panic::take_hook();
+- panic::set_hook(null_hook);
+-
+- let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+- WORKS.store(works as usize + 1, Ordering::SeqCst);
+-
+- let hopefully_null_hook = panic::take_hook();
+- panic::set_hook(original_hook);
+- if sanity_check != &*hopefully_null_hook {
+- panic!("observed race condition in proc_macro2::nightly_works");
+- }
+- });
+- nightly_works()
+-}
+-
+ fn mismatch() -> ! {
+ panic!("stable/nightly mismatch")
+ }
+
+ impl DeferredTokenStream {
+ fn new(stream: proc_macro::TokenStream) -> Self {
+ DeferredTokenStream {
+ stream,
+@@ -98,28 +41,33 @@ impl DeferredTokenStream {
+ }
+ }
+
+ fn is_empty(&self) -> bool {
+ self.stream.is_empty() && self.extra.is_empty()
+ }
+
+ fn evaluate_now(&mut self) {
+- self.stream.extend(self.extra.drain(..));
++ // If-check provides a fast short circuit for the common case of `extra`
++ // being empty, which saves a round trip over the proc macro bridge.
++ // Improves macro expansion time in winrt by 6% in debug mode.
++ if !self.extra.is_empty() {
++ self.stream.extend(self.extra.drain(..));
++ }
+ }
+
+ fn into_token_stream(mut self) -> proc_macro::TokenStream {
+ self.evaluate_now();
+ self.stream
+ }
+ }
+
+ impl TokenStream {
+ pub fn new() -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
+ } else {
+ TokenStream::Fallback(fallback::TokenStream::new())
+ }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ match self {
+@@ -142,31 +90,37 @@ impl TokenStream {
+ }
+ }
+ }
+
+ impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Ok(TokenStream::Compiler(DeferredTokenStream::new(
+- src.parse()?,
++ proc_macro_parse(src)?,
+ )))
+ } else {
+ Ok(TokenStream::Fallback(src.parse()?))
+ }
+ }
+ }
+
+-impl fmt::Display for TokenStream {
++// Work around https://github.com/rust-lang/rust/issues/58736.
++fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
++ panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
++ .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
++}
++
++impl Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Display::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Display::fmt(tts, f),
+ }
+ }
+ }
+
+ impl From<proc_macro::TokenStream> for TokenStream {
+ fn from(inner: proc_macro::TokenStream) -> TokenStream {
+ TokenStream::Compiler(DeferredTokenStream::new(inner))
+ }
+@@ -182,17 +136,17 @@ impl From<TokenStream> for proc_macro::T
+ }
+
+ impl From<fallback::TokenStream> for TokenStream {
+ fn from(inner: fallback::TokenStream) -> TokenStream {
+ TokenStream::Fallback(inner)
+ }
+ }
+
+-// Assumes nightly_works().
++// Assumes inside_proc_macro().
+ fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+ match token {
+ TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Punct(tt) => {
+ let spacing = match tt.spacing() {
+ Spacing::Joint => proc_macro::Spacing::Joint,
+ Spacing::Alone => proc_macro::Spacing::Alone,
+ };
+@@ -202,37 +156,37 @@ fn into_compiler_token(token: TokenTree)
+ }
+ TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+ TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+ }
+ }
+
+ impl From<TokenTree> for TokenStream {
+ fn from(token: TokenTree) -> TokenStream {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+ } else {
+ TokenStream::Fallback(token.into())
+ }
+ }
+ }
+
+-impl iter::FromIterator<TokenTree> for TokenStream {
++impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+- if nightly_works() {
++ if inside_proc_macro() {
+ TokenStream::Compiler(DeferredTokenStream::new(
+ trees.into_iter().map(into_compiler_token).collect(),
+ ))
+ } else {
+ TokenStream::Fallback(trees.into_iter().collect())
+ }
+ }
+ }
+
+-impl iter::FromIterator<TokenStream> for TokenStream {
++impl FromIterator<TokenStream> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+ let mut streams = streams.into_iter();
+ match streams.next() {
+ Some(TokenStream::Compiler(mut first)) => {
+ first.evaluate_now();
+ first.stream.extend(streams.map(|s| match s {
+ TokenStream::Compiler(s) => s.into_token_stream(),
+ TokenStream::Fallback(_) => mismatch(),
+@@ -247,75 +201,76 @@ impl iter::FromIterator<TokenStream> for
+ TokenStream::Fallback(first)
+ }
+ None => TokenStream::new(),
+ }
+ }
+ }
+
+ impl Extend<TokenTree> for TokenStream {
+- fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
++ fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, stream: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ // Here is the reason for DeferredTokenStream.
+- tts.extra
+- .extend(streams.into_iter().map(into_compiler_token));
++ for token in stream {
++ tts.extra.push(into_compiler_token(token));
++ }
+ }
+- TokenStream::Fallback(tts) => tts.extend(streams),
++ TokenStream::Fallback(tts) => tts.extend(stream),
+ }
+ }
+ }
+
+ impl Extend<TokenStream> for TokenStream {
+ fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+ match self {
+ TokenStream::Compiler(tts) => {
+ tts.evaluate_now();
+ tts.stream
+- .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
++ .extend(streams.into_iter().map(TokenStream::unwrap_nightly));
+ }
+ TokenStream::Fallback(tts) => {
+- tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
++ tts.extend(streams.into_iter().map(TokenStream::unwrap_stable));
+ }
+ }
+ }
+ }
+
+-impl fmt::Debug for TokenStream {
++impl Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
+- TokenStream::Fallback(tts) => tts.fmt(f),
++ TokenStream::Compiler(tts) => Debug::fmt(&tts.clone().into_token_stream(), f),
++ TokenStream::Fallback(tts) => Debug::fmt(tts, f),
+ }
+ }
+ }
+
+ impl From<proc_macro::LexError> for LexError {
+ fn from(e: proc_macro::LexError) -> LexError {
+ LexError::Compiler(e)
+ }
+ }
+
+ impl From<fallback::LexError> for LexError {
+ fn from(e: fallback::LexError) -> LexError {
+ LexError::Fallback(e)
+ }
+ }
+
+-impl fmt::Debug for LexError {
++impl Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- LexError::Compiler(e) => e.fmt(f),
+- LexError::Fallback(e) => e.fmt(f),
++ LexError::Compiler(e) => Debug::fmt(e, f),
++ LexError::Fallback(e) => Debug::fmt(e, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum TokenTreeIter {
++pub(crate) enum TokenTreeIter {
+ Compiler(proc_macro::token_stream::IntoIter),
+ Fallback(fallback::TokenTreeIter),
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+@@ -356,25 +311,25 @@ impl Iterator for TokenTreeIter {
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ match self {
+ TokenTreeIter::Compiler(tts) => tts.size_hint(),
+ TokenTreeIter::Fallback(tts) => tts.size_hint(),
+ }
+ }
+ }
+
+-impl fmt::Debug for TokenTreeIter {
++impl Debug for TokenTreeIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("TokenTreeIter").finish()
+ }
+ }
+
+ #[derive(Clone, PartialEq, Eq)]
+ #[cfg(super_unstable)]
+-pub enum SourceFile {
++pub(crate) enum SourceFile {
+ Compiler(proc_macro::SourceFile),
+ Fallback(fallback::SourceFile),
+ }
+
+ #[cfg(super_unstable)]
+ impl SourceFile {
+ fn nightly(sf: proc_macro::SourceFile) -> Self {
+ SourceFile::Compiler(sf)
+@@ -392,68 +347,87 @@ impl SourceFile {
+ match self {
+ SourceFile::Compiler(a) => a.is_real(),
+ SourceFile::Fallback(a) => a.is_real(),
+ }
+ }
+ }
+
+ #[cfg(super_unstable)]
+-impl fmt::Debug for SourceFile {
++impl Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- SourceFile::Compiler(a) => a.fmt(f),
+- SourceFile::Fallback(a) => a.fmt(f),
++ SourceFile::Compiler(a) => Debug::fmt(a, f),
++ SourceFile::Fallback(a) => Debug::fmt(a, f),
+ }
+ }
+ }
+
+ #[cfg(any(super_unstable, feature = "span-locations"))]
+-pub struct LineColumn {
++pub(crate) struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+ }
+
+ #[derive(Copy, Clone)]
+-pub enum Span {
++pub(crate) enum Span {
+ Compiler(proc_macro::Span),
+ Fallback(fallback::Span),
+ }
+
+ impl Span {
+ pub fn call_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::call_site())
+ } else {
+ Span::Fallback(fallback::Span::call_site())
+ }
+ }
+
++ #[cfg(hygiene)]
++ pub fn mixed_site() -> Span {
++ if inside_proc_macro() {
++ Span::Compiler(proc_macro::Span::mixed_site())
++ } else {
++ Span::Fallback(fallback::Span::mixed_site())
++ }
++ }
++
+ #[cfg(super_unstable)]
+ pub fn def_site() -> Span {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Span::Compiler(proc_macro::Span::def_site())
+ } else {
+ Span::Fallback(fallback::Span::def_site())
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.resolved_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => other,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.resolved_at(b)),
+ _ => mismatch(),
+ }
+ }
+
+- #[cfg(super_unstable)]
+ pub fn located_at(&self, other: Span) -> Span {
+ match (self, other) {
++ #[cfg(hygiene)]
+ (Span::Compiler(a), Span::Compiler(b)) => Span::Compiler(a.located_at(b)),
++
++ // Name resolution affects semantics, but location is only cosmetic
++ #[cfg(not(hygiene))]
++ (Span::Compiler(_), Span::Compiler(_)) => *self,
++
+ (Span::Fallback(a), Span::Fallback(b)) => Span::Fallback(a.located_at(b)),
+ _ => mismatch(),
+ }
+ }
+
+ pub fn unwrap(self) -> proc_macro::Span {
+ match self {
+ Span::Compiler(s) => s,
+@@ -537,36 +511,36 @@ impl From<proc_macro::Span> for crate::S
+ }
+
+ impl From<fallback::Span> for Span {
+ fn from(inner: fallback::Span) -> Span {
+ Span::Fallback(inner)
+ }
+ }
+
+-impl fmt::Debug for Span {
++impl Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Span::Compiler(s) => s.fmt(f),
+- Span::Fallback(s) => s.fmt(f),
++ Span::Compiler(s) => Debug::fmt(s, f),
++ Span::Fallback(s) => Debug::fmt(s, f),
+ }
+ }
+ }
+
+-pub fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
++pub(crate) fn debug_span_field_if_nontrivial(debug: &mut fmt::DebugStruct, span: Span) {
+ match span {
+ Span::Compiler(s) => {
+ debug.field("span", &s);
+ }
+ Span::Fallback(s) => fallback::debug_span_field_if_nontrivial(debug, s),
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Group {
++pub(crate) enum Group {
+ Compiler(proc_macro::Group),
+ Fallback(fallback::Group),
+ }
+
+ impl Group {
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ match stream {
+ TokenStream::Compiler(tts) => {
+@@ -647,36 +621,36 @@ impl Group {
+ }
+
+ impl From<fallback::Group> for Group {
+ fn from(g: fallback::Group) -> Self {
+ Group::Fallback(g)
+ }
+ }
+
+-impl fmt::Display for Group {
++impl Display for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Display::fmt(group, formatter),
++ Group::Fallback(group) => Display::fmt(group, formatter),
+ }
+ }
+ }
+
+-impl fmt::Debug for Group {
++impl Debug for Group {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Group::Compiler(group) => group.fmt(formatter),
+- Group::Fallback(group) => group.fmt(formatter),
++ Group::Compiler(group) => Debug::fmt(group, formatter),
++ Group::Fallback(group) => Debug::fmt(group, formatter),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Ident {
++pub(crate) enum Ident {
+ Compiler(proc_macro::Ident),
+ Fallback(fallback::Ident),
+ }
+
+ impl Ident {
+ pub fn new(string: &str, span: Span) -> Ident {
+ match span {
+ Span::Compiler(s) => Ident::Compiler(proc_macro::Ident::new(string, s)),
+@@ -742,56 +716,56 @@ where
+ let other = other.as_ref();
+ match self {
+ Ident::Compiler(t) => t.to_string() == other,
+ Ident::Fallback(t) => t == other,
+ }
+ }
+ }
+
+-impl fmt::Display for Ident {
++impl Display for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Display::fmt(t, f),
++ Ident::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Ident {
++impl Debug for Ident {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Ident::Compiler(t) => t.fmt(f),
+- Ident::Fallback(t) => t.fmt(f),
++ Ident::Compiler(t) => Debug::fmt(t, f),
++ Ident::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+
+ #[derive(Clone)]
+-pub enum Literal {
++pub(crate) enum Literal {
+ Compiler(proc_macro::Literal),
+ Fallback(fallback::Literal),
+ }
+
+ macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+ }
+ }
+ )*)
+ }
+
+ macro_rules! unsuffixed_integers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::$name(n))
+ } else {
+ Literal::Fallback(fallback::Literal::$name(n))
+ }
+ }
+ )*)
+ }
+
+@@ -825,49 +799,49 @@ impl Literal {
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f32_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f32_unsuffixed(f))
+ }
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::f64_unsuffixed(f))
+ } else {
+ Literal::Fallback(fallback::Literal::f64_unsuffixed(f))
+ }
+ }
+
+ pub fn string(t: &str) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::string(t))
+ } else {
+ Literal::Fallback(fallback::Literal::string(t))
+ }
+ }
+
+ pub fn character(t: char) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::character(t))
+ } else {
+ Literal::Fallback(fallback::Literal::character(t))
+ }
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+- if nightly_works() {
++ if inside_proc_macro() {
+ Literal::Compiler(proc_macro::Literal::byte_string(bytes))
+ } else {
+ Literal::Fallback(fallback::Literal::byte_string(bytes))
+ }
+ }
+
+ pub fn span(&self) -> Span {
+ match self {
+@@ -903,25 +877,25 @@ impl Literal {
+ }
+
+ impl From<fallback::Literal> for Literal {
+ fn from(s: fallback::Literal) -> Literal {
+ Literal::Fallback(s)
+ }
+ }
+
+-impl fmt::Display for Literal {
++impl Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Display::fmt(t, f),
++ Literal::Fallback(t) => Display::fmt(t, f),
+ }
+ }
+ }
+
+-impl fmt::Debug for Literal {
++impl Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+- Literal::Compiler(t) => t.fmt(f),
+- Literal::Fallback(t) => t.fmt(f),
++ Literal::Compiler(t) => Debug::fmt(t, f),
++ Literal::Fallback(t) => Debug::fmt(t, f),
+ }
+ }
+ }
+diff --git a/third_party/rust/proc-macro2/tests/comments.rs b/third_party/rust/proc-macro2/tests/comments.rs
+new file mode 100644
+--- /dev/null
++++ b/third_party/rust/proc-macro2/tests/comments.rs
+@@ -0,0 +1,103 @@
++use proc_macro2::{Delimiter, Literal, Spacing, TokenStream, TokenTree};
++
++// #[doc = "..."] -> "..."
++fn lit_of_outer_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, false)
++}
++
++// #![doc = "..."] -> "..."
++fn lit_of_inner_doc_comment(tokens: TokenStream) -> Literal {
++ lit_of_doc_comment(tokens, true)
++}
++
++fn lit_of_doc_comment(tokens: TokenStream, inner: bool) -> Literal {
++ let mut iter = tokens.clone().into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '#');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ if inner {
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '!');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ }
++ iter = match iter.next().unwrap() {
++ TokenTree::Group(group) => {
++ assert_eq!(group.delimiter(), Delimiter::Bracket);
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ group.stream().into_iter()
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ };
++ match iter.next().unwrap() {
++ TokenTree::Ident(ident) => assert_eq!(ident.to_string(), "doc"),
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '=');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert!(iter.next().is_none(), "unexpected token {:?}", tokens);
++ literal
++ }
++ _ => panic!("wrong token {:?}", tokens),
++ }
++}
++
++#[test]
++fn closed_immediately() {
++ let stream = "/**/".parse::<TokenStream>().unwrap();
++ let tokens = stream.into_iter().collect::<Vec<_>>();
++ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
++}
++
++#[test]
++fn incomplete() {
++ assert!("/*/".parse::<TokenStream>().is_err());
++}
++
++#[test]
++fn lit() {
++ let stream = "/// doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "//! doc".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc\"");
++
++ let stream = "/** doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++
++ let stream = "/*! doc */".parse::<TokenStream>().unwrap();
++ let lit = lit_of_inner_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\" doc \"");
++}
++
++#[test]
++fn carriage_return() {
++ let stream = "///\r\n".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\"");
++
++ let stream = "/**\r\n*/".parse::<TokenStream>().unwrap();
++ let lit = lit_of_outer_doc_comment(stream);
++ assert_eq!(lit.to_string(), "\"\\r\\n\"");
++
++ "///\r".parse::<TokenStream>().unwrap_err();
++ "///\r \n".parse::<TokenStream>().unwrap_err();
++ "/**\r \n*/".parse::<TokenStream>().unwrap_err();
++}
+diff --git a/third_party/rust/proc-macro2/tests/test.rs b/third_party/rust/proc-macro2/tests/test.rs
+--- a/third_party/rust/proc-macro2/tests/test.rs
++++ b/third_party/rust/proc-macro2/tests/test.rs
+@@ -1,12 +1,11 @@
++use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
+ use std::str::{self, FromStr};
+
+-use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
+-
+ #[test]
+ fn idents() {
+ assert_eq!(
+ Ident::new("String", Span::call_site()).to_string(),
+ "String"
+ );
+ assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
+ assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
+@@ -105,16 +104,43 @@ fn literal_suffix() {
+ assert_eq!(token_count("999u256"), 1);
+ assert_eq!(token_count("999r#u256"), 3);
+ assert_eq!(token_count("1."), 1);
+ assert_eq!(token_count("1.f32"), 3);
+ assert_eq!(token_count("1.0_0"), 1);
+ assert_eq!(token_count("1._0"), 3);
+ assert_eq!(token_count("1._m"), 3);
+ assert_eq!(token_count("\"\"s"), 1);
++ assert_eq!(token_count("r\"\"r"), 1);
++ assert_eq!(token_count("b\"\"b"), 1);
++ assert_eq!(token_count("br\"\"br"), 1);
++ assert_eq!(token_count("r#\"\"#r"), 1);
++ assert_eq!(token_count("'c'c"), 1);
++ assert_eq!(token_count("b'b'b"), 1);
++}
++
++#[test]
++fn literal_iter_negative() {
++ let negative_literal = Literal::i32_suffixed(-3);
++ let tokens = TokenStream::from(TokenTree::Literal(negative_literal));
++ let mut iter = tokens.into_iter();
++ match iter.next().unwrap() {
++ TokenTree::Punct(punct) => {
++ assert_eq!(punct.as_char(), '-');
++ assert_eq!(punct.spacing(), Spacing::Alone);
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ match iter.next().unwrap() {
++ TokenTree::Literal(literal) => {
++ assert_eq!(literal.to_string(), "3i32");
++ }
++ unexpected => panic!("unexpected token {:?}", unexpected),
++ }
++ assert!(iter.next().is_none());
+ }
+
+ #[test]
+ fn roundtrip() {
+ fn roundtrip(p: &str) {
+ println!("parse: {}", p);
+ let s = p.parse::<TokenStream>().unwrap().to_string();
+ println!("first: {}", s);
+@@ -161,46 +187,16 @@ fn fail() {
+ fail("' static");
+ fail("r#1");
+ fail("r#_");
+ }
+
+ #[cfg(span_locations)]
+ #[test]
+ fn span_test() {
+- use proc_macro2::TokenTree;
+-
+- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+- let ts = p.parse::<TokenStream>().unwrap();
+- check_spans_internal(ts, &mut lines);
+- }
+-
+- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
+- for i in ts {
+- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
+- *lines = rest;
+-
+- let start = i.span().start();
+- assert_eq!(start.line, sline, "sline did not match for {}", i);
+- assert_eq!(start.column, scol, "scol did not match for {}", i);
+-
+- let end = i.span().end();
+- assert_eq!(end.line, eline, "eline did not match for {}", i);
+- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+-
+- match i {
+- TokenTree::Group(ref g) => {
+- check_spans_internal(g.stream().clone(), lines);
+- }
+- _ => {}
+- }
+- }
+- }
+- }
+-
+ check_spans(
+ "\
+ /// This is a document comment
+ testing 123
+ {
+ testing 234
+ }",
+ &[
+@@ -269,59 +265,17 @@ fn span_join() {
+ joined1.unwrap().source_file(),
+ source1[0].span().source_file()
+ );
+ }
+
+ #[test]
+ fn no_panic() {
+ let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
+- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
+-}
+-
+-#[test]
+-fn tricky_doc_comment() {
+- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+-
+- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
+- match tokens[0] {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
+- _ => panic!("wrong token {:?}", tokens[0]),
+- }
+- let mut tokens = match tokens[1] {
+- proc_macro2::TokenTree::Group(ref tt) => {
+- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
+- tt.stream().into_iter()
+- }
+- _ => panic!("wrong token {:?}", tokens[0]),
+- };
+-
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
+- t => panic!("wrong token {:?}", t),
+- }
+- match tokens.next().unwrap() {
+- proc_macro2::TokenTree::Literal(ref tt) => {
+- assert_eq!(tt.to_string(), "\" doc\"");
+- }
+- t => panic!("wrong token {:?}", t),
+- }
+- assert!(tokens.next().is_none());
+-
+- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
+- let tokens = stream.into_iter().collect::<Vec<_>>();
+- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
++ assert!(s.parse::<TokenStream>().is_err());
+ }
+
+ #[test]
+ fn op_before_comment() {
+ let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Punct(tt) => {
+ assert_eq!(tt.as_char(), '~');
+@@ -340,30 +294,30 @@ fn raw_identifier() {
+ }
+ assert!(tts.next().is_none());
+ }
+
+ #[test]
+ fn test_debug_ident() {
+ let ident = Ident::new("proc_macro", Span::call_site());
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "Ident(proc_macro)";
+
+- #[cfg(procmacro2_semver_exempt)]
+- let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
++ #[cfg(span_locations)]
++ let expected = "Ident { sym: proc_macro }";
+
+ assert_eq!(expected, format!("{:?}", ident));
+ }
+
+ #[test]
+ fn test_debug_tokenstream() {
+ let tts = TokenStream::from_str("[a + 1]").unwrap();
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected = "\
+ TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a,
+ },
+@@ -374,17 +328,17 @@ TokenStream [
+ Literal {
+ lit: 1,
+ },
+ ],
+ },
+ ]\
+ ";
+
+- #[cfg(not(procmacro2_semver_exempt))]
++ #[cfg(not(span_locations))]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a
+ },
+@@ -395,17 +349,17 @@ TokenStream [
+ Literal {
+ lit: 1
+ }
+ ]
+ }
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected = "\
+ TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a,
+ span: bytes(2..3),
+@@ -420,17 +374,17 @@ TokenStream [
+ span: bytes(6..7),
+ },
+ ],
+ span: bytes(1..8),
+ },
+ ]\
+ ";
+
+- #[cfg(procmacro2_semver_exempt)]
++ #[cfg(span_locations)]
+ let expected_before_trailing_commas = "\
+ TokenStream [
+ Group {
+ delimiter: Bracket,
+ stream: TokenStream [
+ Ident {
+ sym: a,
+ span: bytes(2..3)
+@@ -459,8 +413,85 @@ TokenStream [
+ }
+
+ #[test]
+ fn default_tokenstream_is_empty() {
+ let default_token_stream: TokenStream = Default::default();
+
+ assert!(default_token_stream.is_empty());
+ }
++
++#[test]
++fn tuple_indexing() {
++ // This behavior may change depending on https://github.com/rust-lang/rust/pull/71322
++ let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter();
++ assert_eq!("tuple", tokens.next().unwrap().to_string());
++ assert_eq!(".", tokens.next().unwrap().to_string());
++ assert_eq!("0.0", tokens.next().unwrap().to_string());
++ assert!(tokens.next().is_none());
++}
++
++#[cfg(span_locations)]
++#[test]
++fn non_ascii_tokens() {
++ check_spans("// abc", &[]);
++ check_spans("// ábc", &[]);
++ check_spans("// abc x", &[]);
++ check_spans("// ábc x", &[]);
++ check_spans("/* abc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ábc */ x", &[(1, 10, 1, 11)]);
++ check_spans("/* ab\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/* áb\nc */ x", &[(2, 5, 2, 6)]);
++ check_spans("/*** abc */ x", &[(1, 12, 1, 13)]);
++ check_spans("/*** ábc */ x", &[(1, 12, 1, 13)]);
++ check_spans(r#""abc""#, &[(1, 0, 1, 5)]);
++ check_spans(r#""ábc""#, &[(1, 0, 1, 5)]);
++ check_spans(r###"r#"abc"#"###, &[(1, 0, 1, 8)]);
++ check_spans(r###"r#"ábc"#"###, &[(1, 0, 1, 8)]);
++ check_spans("r#\"a\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("r#\"á\nc\"#", &[(1, 0, 2, 3)]);
++ check_spans("'a'", &[(1, 0, 1, 3)]);
++ check_spans("'á'", &[(1, 0, 1, 3)]);
++ check_spans("//! abc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! abc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("//! ábc\n", &[(1, 0, 1, 7), (1, 0, 1, 7), (1, 0, 1, 7)]);
++ check_spans("/*! abc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! ábc */", &[(1, 0, 1, 10), (1, 0, 1, 10), (1, 0, 1, 10)]);
++ check_spans("/*! a\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("/*! á\nc */", &[(1, 0, 2, 4), (1, 0, 2, 4), (1, 0, 2, 4)]);
++ check_spans("abc", &[(1, 0, 1, 3)]);
++ check_spans("ábc", &[(1, 0, 1, 3)]);
++ check_spans("ábć", &[(1, 0, 1, 3)]);
++ check_spans("abc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábc// foo", &[(1, 0, 1, 3)]);
++ check_spans("ábć// foo", &[(1, 0, 1, 3)]);
++ check_spans("b\"a\\\n c\"", &[(1, 0, 2, 3)]);
++ check_spans("b\"a\\\n\u{00a0}c\"", &[(1, 0, 2, 3)]);
++}
++
++#[cfg(span_locations)]
++fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
++ let ts = p.parse::<TokenStream>().unwrap();
++ check_spans_internal(ts, &mut lines);
++ assert!(lines.is_empty(), "leftover ranges: {:?}", lines);
++}
++
++#[cfg(span_locations)]
++fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
++ for i in ts {
++ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
++ *lines = rest;
++
++ let start = i.span().start();
++ assert_eq!(start.line, sline, "sline did not match for {}", i);
++ assert_eq!(start.column, scol, "scol did not match for {}", i);
++
++ let end = i.span().end();
++ assert_eq!(end.line, eline, "eline did not match for {}", i);
++ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
++
++ if let TokenTree::Group(g) = i {
++ check_spans_internal(g.stream().clone(), lines);
++ }
++ }
++ }
++}
+diff --git a/third_party/rust/proc-macro2/tests/test_fmt.rs b/third_party/rust/proc-macro2/tests/test_fmt.rs
+new file mode 100644
+--- /dev/null
++++ b/third_party/rust/proc-macro2/tests/test_fmt.rs
+@@ -0,0 +1,26 @@
++use proc_macro2::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
++use std::iter::{self, FromIterator};
++
++#[test]
++fn test_fmt_group() {
++ let ident = Ident::new("x", Span::call_site());
++ let inner = TokenStream::from_iter(iter::once(TokenTree::Ident(ident)));
++ let parens_empty = Group::new(Delimiter::Parenthesis, TokenStream::new());
++ let parens_nonempty = Group::new(Delimiter::Parenthesis, inner.clone());
++ let brackets_empty = Group::new(Delimiter::Bracket, TokenStream::new());
++ let brackets_nonempty = Group::new(Delimiter::Bracket, inner.clone());
++ let braces_empty = Group::new(Delimiter::Brace, TokenStream::new());
++ let braces_nonempty = Group::new(Delimiter::Brace, inner.clone());
++ let none_empty = Group::new(Delimiter::None, TokenStream::new());
++ let none_nonempty = Group::new(Delimiter::None, inner.clone());
++
++ // Matches libproc_macro.
++ assert_eq!("()", parens_empty.to_string());
++ assert_eq!("(x)", parens_nonempty.to_string());
++ assert_eq!("[]", brackets_empty.to_string());
++ assert_eq!("[x]", brackets_nonempty.to_string());
++ assert_eq!("{ }", braces_empty.to_string());
++ assert_eq!("{ x }", braces_nonempty.to_string());
++ assert_eq!("", none_empty.to_string());
++ assert_eq!("x", none_nonempty.to_string());
++}
+diff --git a/third_party/rust/syn/.cargo-checksum.json b/third_party/rust/syn/.cargo-checksum.json
+--- a/third_party/rust/syn/.cargo-checksum.json
++++ b/third_party/rust/syn/.cargo-checksum.json
+@@ -1,1 +1,1 @@
+-{"files":{"Cargo.toml":"484d29864d333a361652fa4e24e1dcfab9efa47705ffd8c106d802eb03b78da7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"ca605417b6db8c995458f8407afaad6c177aedcc2274004283600f5638fa1b0c","benches/file.rs":"b45211cc4a0296a77aac2b4de16dbc6b5cb66adfb5afac00a77bccea87f43968","benches/rust.rs":"9cc0f62e944f1583d05c43a395a1556731501cf5976ef67a081f4f6387f883ba","build.rs":"7423ab199728d55c7d64c44b7c6729cfd93bd8273366a77707353003e27565d7","src/attr.rs":"cf81add298f0e75c35a9980a59bc3c2fd3fe933635830d1591374eeb2487c225","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"2a432c11a3da67a21d46c2272bf9ce60a0bb20893b5750027bbd8ca3e843ab35","src/custom_keyword.rs":"589e46ec1be9a04d6de12c0b8cadf87cc1c05606ed46ddea62e9869cbca4a191","src/custom_punctuation.rs":"2ba2e294e15a0fce7ede3686c42b2891797079a724dd1193b66e7d305624c891","src/data.rs":"cc9b250d084e444782d3ff5e63c1ba387cbde8f7f2e977eab9846d920b4b8c3f","src/derive.rs":"c18878f14be5d5ab11fd7dda2d2ff1ff75c9662daf11eed033de62e4d0670a89","src/discouraged.rs":"50e10915695c4d14f64a78e20ecbef90a2cd53a7c26ee3426a2524a8ee5c9cbf","src/error.rs":"2c17a402f83ed5ae4ad96e753216771bef620235c2ff1ccc23f4bbafc7266fe1","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"871d8eeb43cef02ef88de3bea7477b79b4eabc096a0899dde0e5750edf482f49","src/ext.rs":"b97ed549490b9248b5b5df31b3d5b08ba8791e23e6c5d3a1157a0363eb683ff3","src/file.rs":"3cc2bf5c709238d515a557f721f231c8c725b196400de051f945b549299d38a7","src/gen/fold.rs":"10b3ae33d0ce410d6bbe8b93be9d5f9e856c7dc8212133cc46b703f97d548190","src/gen/visit.rs":"e0f5798552d186024696b7bfc7219d4ff53b0e45f735a83e77cbb6b6578c5fa4","src/gen/visit_mut.rs":"9f7dda83907969971dba84d545aaa563b0728e54db97ffab5050fdf43a79c731","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d845d7a828863123a5187fd0fe59c9dae3636f63bad302bd035792eed3dcb1ba","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"213f2f58c65ee1aa222f111bc9b1be681f8fb069caed04ca56586839979318d0","src/keyword.rs":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","src/lib.rs":"24778e9f15e8025e75aca114c712716ada586b471adb3b3b69278f4d39b8a21b","src/lifetime.rs":"905359708f772ec858954badde69ee016d29e6eeba1dd205b268445b1aff6f3a","src/lit.rs":"5bb0bddb94cbd256e50e92dc091a0baa09f1be40a77058b897507f3b17191e5d","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"6b468244cc07e3f2f10419f833d9e2ed23edbcd6dc34cf21c5947633699db964","src/macros.rs":"0d8c3bab47539aa2d00bec64e92c901ea2c9c0af74c868051c0905b82650f970","src/op.rs":"93cd44770bb110deadf807a01d9a666efe644b6e3010f4b51cae77ee7438cfbb","src/parse.rs":"5017123c249ebc65866af113a0ad671814b9873f47568180e6539a305eb0317d","src/parse_macro_input.rs":"f799aadb7216c2d333b579f48ed2fedfe07b5e96f004b25b569649ffbaa958d2","src/parse_quote.rs":"81575bf60b18b0d8624d7025a5bcc8dcd6633ad70c454dee2a06e4c391700b6c","src/pat.rs":"db0f2263b9813de1f4e3e3e0396fe0080b1e11c8090c6b4fb6fca3cfbe22bc96","src/path.rs":"32e685ac7fd2d4b9989802de8f326a8d47fa710f86ec3e45fd9d3ff8fdfe97ef","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"384e7b317b26f24118eb4b0c39e949ee9f4f3e700a4c80e462342c83b2cc3282","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"adddb6acae14a0fa340df302b932c31e34b259706ce56fd82ab597ec424500e1","src/stmt.rs":"fbccf2b4da7980fe6ea8d99457d291577c0f225b370c1dd97da41abf2a18fcf7","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"761d8d1793560eb2b631c36ddfdbb14ac65178405f095453aa0e75e8816bdbb9","src/tt.rs":"1e32ae216d14d895ff621bf32bc611f138aa00108b0090be2cbaa0affebe8e2a","src/ty.rs":"ce052e0079b65b66bea4e9502d2ff2c90ad4b867904bf7eb892eb60aa3ef219a","tests/clone.sh":"961243d42540d9992090efbbd5478b7aa395671db64a6c28cdadb6edc610ebdf","tests/common/eq.rs":"a42d339808fd32dd4bfd440c254add8c56d5e2cde3a6bf0c88621b618ce5eaa7","tests/common/mod.rs":"20a3300016351fa210a193fbb0db059ef5125fa7909585ded64790004d4977ed","tests/common/parse.rs":"17ba6d1e74aaa3f8096c6d379d803221f12d95cca69927be047d6ddf8367647f","tests/debug/gen.rs":"57bd5cf585e0b86ad00f29f09ff3db3390c4a756d503514a9b28407500dcea3c","tests/debug/mod.rs":"462d6fe34ee75c3ca1207d4db2ff3bdee5b430b9f9ca632e5671d1588d3f76b3","tests/features/error.rs":"e0581a2869cbd237c2bc18a0a85272296e1577bb5f7317a67fa85e28e04eea6f","tests/features/mod.rs":"66a2605ec54ede29208da350f2bed536dfa763b58408d64d3fca3b13de64b64f","tests/macros/mod.rs":"3f2d758c0ba76b93f54b0c1fc22ad50edff8ef42629ba4d47ac7d7f823da8359","tests/repo/mod.rs":"e851a68972c9194a9a8d7b68538b16ed79ae81cba55e1a2ce210d1b759fb1a21","tests/test_asyncness.rs":"b6c46118b036e6807d24eb0e1779244b4fca23dac0d8031e9843b3edec484ce8","tests/test_attribute.rs":"2d8f18a98c989d3f7adaaeb1aeebd4f8413365ace63feecb37cb3f9db9db4d8f","tests/test_derive_input.rs":"477d80f914c54b526f8ff229788dc0e7798d118f6dcfa348f4c99755edb347b9","tests/test_expr.rs":"f35ca80566849a36e6ba6403d9663519eff37e4224360c468fedff8b561a643e","tests/test_generics.rs":"83a5dc07f5c5701c12625399262f7120b66f01a742523f3eda28da2cf2c87eb3","tests/test_grouping.rs":"aadd75215addd9e5a8fa2f9472117d4cb80f1e8b84e07f4c0845675c9014164f","tests/test_ident.rs":"236c239dd66f543f084f44ff747d7bc3962cf11a019a279777fe972f6e17aa4c","tests/test_iterators.rs":"718938da14778dcba06324d36a99d9317c9d45d81a34c6a44c47e1fa38085e9f","tests/test_lit.rs":"7dff2661a5ac586d6ed2fe27501cb8ff62f4cf3f6c91f596bff6057c67ad7857","tests/test_meta.rs":"8444dee084882243b107dfc8a6aac27f9382f9774162d1ac8ed8ec30d60c048e","tests/test_parse_buffer.rs":"b244bb4bc41ff06d21f239e60a3d663fdec5aa4af33f2a354afef36d34f0aefc","tests/test_pat.rs":"41776b878efae9b8e340f21ffe6296e921cf309f618482efd98609c33e32c28b","tests/test_precedence.rs":"71f3ea52cda8b40166bb7416fb98774e6a653542497b521f8e183e283dcf579d","tests/test_round_trip.rs":"e0de37f45fa223b488d25a41beab185eb92abb7bf765a9f13fe5d870ff31f5f1","tests/test_should_parse.rs":"4da4e25ee2baa7e75135c375042a7f958de136c5698dab03f99ff7a774dcd463","tests/test_size.rs":"970150b9d49ef91ab4c8f8c6a59b83f9a68a02acb779f0280733a5efaec6487a","tests/test_token_trees.rs":"a07ea657bf03b9c667c821b2db2af49b176ca737e3e01217a73cca78b7f11380","tests/zzz_stable.rs":"961d4940a926db4ca523d834b060c62de988e6a8e01c9f5efaa7bb4c86745b47"},"package":"66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf"}
+\ No newline at end of file
++{"files":{"Cargo.toml":"28ddb678a5ccac4423435384c8b7116f804e896eabc5aae9d5c2bc666aaebbb4","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"03f3b53cf858536a0883aa5b5882ee61dcd0f1e71c0930c9106fcfa1d6aad2df","benches/file.rs":"b4724fc7c0f48b8f488e2632a1064f6c0bf16ded3969680fc3f4a2369536269b","benches/rust.rs":"ea6291ef2d2a83d94a3312fe179d48259f8ec0b04c961993ddd181d0a4ab740e","build.rs":"aeca2312f05aec658eaa66980a0ef3d578837db107a55702b39419ea0422eb4a","src/attr.rs":"7d79482634d6544eb4a4825405407b53660d0f5f8b929f7e1671e005b9d92038","src/await.rs":"18f0b2ecb319991f891e300011424985e3cf33d166ea9f29f22d575fc8c83a76","src/bigint.rs":"efc7f64959980653d73fe4f8bc2a3a2904dc05f45b02c6dc15cd316fa3d7c338","src/buffer.rs":"cf2a4b3bdc247b80c85ff5625a1dfb7a5f517fd835f6e1518a7b924990e4c293","src/custom_keyword.rs":"9627467063e41776315a6a14b2aaea3875592d8e0ebd2dc6df1fc2f12c06f146","src/custom_punctuation.rs":"b00e7bee96eb473507527e39db65e74e71592dc06421d2cfe45ed899c17d4847","src/data.rs":"7aec9a745cd53ec95688afa353f6efb9576e7fc0143757b51d28bc3d900b1d2a","src/derive.rs":"fa71866df6e383673dd3329f455a9f953585b83f9739050be3bf1f8c6d526b96","src/discouraged.rs":"a1f3d85e20dedf50b1b7b4571d970a3a6e9b2de4afde7dd0c986fe240df2ba46","src/error.rs":"c3005b50e3132026250c5356d0d391bf96db8087f0f5f744de98e360d8a20a3e","src/export.rs":"dcae67456278c0339acfbcbb4737b8d37cfba5a150ae789f31f4be79abf7e726","src/expr.rs":"54455fd20041996653ca5379b03cdf3c2fc1b3dd2e1149b5bc6b1dd492545d55","src/ext.rs":"870086d9021e6a6fcefa2f00cd91b55c4b74dcee8f0f6a07e76d96fb44707d61","src/file.rs":"75167ebc77e7870122078eabde1b872c337142d4b0962c20cedffcaaa2a5b7c6","src/gen/clone.rs":"0845c1bf8624c3f235cd247b4eb748e7e16b4c240097cb0ff16751f688c079ae","src/gen/debug.rs":"d24fe37f4ce1dd74f2dc54136e893782d3c4d0908323c036c97599551a56960c","src/gen/eq.rs":"1e6ef09b17ca7f36861ef23ce2a6991b231ed5f087f046469b5f23da40f5b419","src/gen/fold.rs":"3f59e59ed8ad2ab5dd347bfbe41bbc785c2aabd8ae902087a584a6daed597182","src/gen/hash.rs":"e5b2a52587173076777233a9e57e2b3c8e0dd6d6f41d16fa7c9fde68b05c2bfc","src/gen/visit.rs":"23008c170d4dd3975232876a0a654921d9b6af57372cb9fcc133ca740588d666","src/gen/visit_mut.rs":"42886c3ee02ded72d9c3eec006e20431eaee0c6b90ddefc1a36ec7bf50c6a24a","src/gen_helper.rs":"ea6c66388365971db6a2fc86cbb208f7eacde77e245bc8623f27a3642a3d7741","src/generics.rs":"d1c175284ca21e777ef0414c28383929b170ccb00aaf7a929eb18d3b05e18da8","src/group.rs":"119b62d8481b4b1c327639bed40e114bf1969765250b68186628247fd4144b3b","src/ident.rs":"503156ce51a7ef0420892e8dbf2ecf8fe51f42a84d52cc2c05654e1a83020cbf","src/item.rs":"c9ad9881e8cda8ee3f157f0c7602fc53d08a7e3288b9afc388c393689eac5aea","src/lib.rs":"558ad13779233b27bebc4b2fc8025eb1c7e57b32130dc1dd911391e27b427500","src/lifetime.rs":"f390fe06692fc51fbf3eb490bb9f795da70e4452f51c5b0df3bbaa899084ddf1","src/lit.rs":"9fab84e38756b092fbb055dcdf01e31d42d916c49e3eaae8c9019043b0ee4301","src/lookahead.rs":"5cce8b4cb345a85c24a452ea6d78eadb76f01ca0a789cbf5ce35108334904173","src/mac.rs":"e5cecea397fd01a44958162781d8d94343fe2a1b9b9754a5666c3d2ab4d7ef64","src/macros.rs":"2ce05b553f14da4ee550bb681cb0733b7186ad94719cd36f96d53e15fd02cf2b","src/op.rs":"449514e146deab0ab020bc6f764544c294dbc780941c9802bf60cf1b2839d550","src/parse.rs":"bde888c98ee259f2a73489a693515ed4875432b0d79486ac83aea19f441992a3","src/parse_macro_input.rs":"653a020f023cac0eccbc1fcc34aa7bf80567b43e5475deab4ad3e487a5363201","src/parse_quote.rs":"642f21e5fa54df4b7c373fb158289ee1005d49e1a49b1d194df5438faee71c46","src/pat.rs":"1473b258162cc822f1ee0c0869f521053ed345a140c39ed83b9b4dfb6f9f2aca","src/path.rs":"f119f0c2af12fabd360eac9a2312e0f6e6c28c633c9671bde6ef0bece7c5ba3c","src/print.rs":"da6529c1d9d21aaf6c835f66b4e67eacb7cf91a10eb5e9a2143b49bf99b3b5e1","src/punctuated.rs":"212f5a601d6c2eb8b8fa679be1167b455b595bee964d2775b0101ebb16c3eaa5","src/reserved.rs":"3625eb2a64589a4992ab79a1674e9679f465bea613ab139a671df5337e88cee6","src/sealed.rs":"896a495a5340eec898527f18bd4ddca408ea03ea0ee3af30074ff48deace778d","src/span.rs":"748c51c6feb223c26d3b1701f5bb98aee823666c775c98106cfa24fe29d8cec1","src/spanned.rs":"7d77714d585e6f42397091ffb3a799fd7b20c05c5442c737683c429ea7d409a5","src/stmt.rs":"3917fbc897f80efe838267833c55650ff8d636cb49a6d1084e28eff65d0e3ccd","src/thread.rs":"815eca6bd64f4eef7c447f0809e84108f5428ff50225224b373efd8fbb696874","src/token.rs":"a1ca6298bf6592cb80cbab1db4eac2fa4e3fa56729bb807bfb0f08ab0f229ca5","src/tt.rs":"1cc9e200624288322f800f32e3d6e2e53da946467bb312dd40a52c02cdcc4730","src/ty.rs":"cb167cbb16240c59a31b44adec175172caaf75ffef9a0bb168584b51bf105795","src/verbatim.rs":"802a97df997432f18cac6e6200ff6ea29fb2474986005e0fcdbc2b65197f87f7","src/whitespace.rs":"e63dd0aa3d34029f17766a8b09c1a6e4479e36c552c8b7023d710a399333aace","tests/.gitignore":"22e782449a3c216db3f7215d5fb8882e316768e40beeec3833aae419ad8941db","tests/common/eq.rs":"4b190a3833bdfd20a4cb1e3dff25a698751dec71d6f30249cf09426e061a4fb1","tests/common/mod.rs":"25ef6d7daa09bad3198a0e9e91b2812425f92db7c585c1e34a03a84d7362ccd8","tests/common/parse.rs":"8b7ba32f4988c30758c108536c4877dc5a039a237bf9b0687220ef2295797bbd","tests/debug/gen.rs":"d6e2abf2a7bb58a7895a60c2f094a98a4f85c9189d02011d0dcef6ef053f26e3","tests/debug/mod.rs":"868763d0ef1609a3ad5e05e9f1bfa0f813e91e7e9a36653414a188bb2fdaa425","tests/macros/mod.rs":"c0eafa4e3845fc08f6efe6021bac37822c0ac325eb7b51194a5f35236f648d92","tests/repo/mod.rs":"9e316b88d57ae213e81950c35e45443078ec90e702798353bc3528cb8a2810b6","tests/repo/progress.rs":"c08d0314a7f3ecf760d471f27da3cd2a500aeb9f1c8331bffb2aa648f9fabf3f","tests/test_asyncness.rs":"cff01db49d28ab23b0b258bc6c0a5cc4071be4fe7248eef344a5d79d2fb649b7","tests/test_attribute.rs":"0ffd99384e1a52ae17d9fed5c4053e411e8f9018decef07ffa621d1faa7329d8","tests/test_derive_input.rs":"610444351e3bf99366976bbf1da109c334a70ac9500caef366bcf9b68819829f","tests/test_expr.rs":"0ee83f6f6de950018c043efcc3e85776b4227dae3068309998a8d9709f2fc66c","tests/test_generics.rs":"9d713f90a79d6145efc89fb6f946029ca03486c632219950889da39940152ba0","tests/test_grouping.rs":"46c27baec4daaaf1e891892f0b0515ea8a44619071c7d0cc9192580916f1569f","tests/test_ident.rs":"9eb53d1e21edf23e7c9e14dc74dcc2b2538e9221e19dbcc0a44e3acc2e90f3f6","tests/test_item.rs":"461ed0c8648afffcea3217f52c9a88298182b4d39d73a11803b1281d99c98c25","tests/test_iterators.rs":"53ed6078d37550bd6765d2411e3660be401aef8a31a407350cc064a7d08c7c33","tests/test_lit.rs":"2a46c5f2f2ad1dcbb7e9b0cd11b55861c5ff818c2c4c51351d07e2daa7c74674","tests/test_meta.rs":"1fc98af3279cadc3d8db3c7e8d4d7f9e9dbd4d17548cf6a2f6f4536ed65367f6","tests/test_parse_buffer.rs":"8bbe2d24ca8a3788f72c6908fc96c26d546f11c69687bf8d72727f851d5e2d27","tests/test_parse_stream.rs":"2f449a2c41a3dee6fd14bee24e1666a453cb808eda17332fd91afd127fcdd2a6","tests/test_pat.rs":"2cb331fe404496d51e7cc7e283ae13c519a2265ca82e1c88e113296f860c2cba","tests/test_path.rs":"fcd5591e639fc787acc9763d828a811c8114525c9341282eefda8f331e082a51","tests/test_precedence.rs":"8d03656741b01e577d7501ce24332d1a4febec3e31a043e47c61062b8c527ed2","tests/test_receiver.rs":"084eca59984b9a18651da52f2c4407355da3de1335916a12477652999e2d01cc","tests/test_round_trip.rs":"ba01bf4ec04cd2d6f9e4800c343563925ae960c5f16752dc0797fda4451b6cc2","tests/test_shebang.rs":"f5772cadad5b56e3112cb16308b779f92bce1c3a48091fc9933deb2276a69331","tests/test_should_parse.rs":"1d3535698a446e2755bfc360676bdb161841a1f454cdef6e7556c6d06a95c89d","tests/test_size.rs":"5fae772bab66809d6708232f35cfb4a287882486763b0f763feec2ad79fbb68b","tests/test_stmt.rs":"17e4355843ee2982b51faba2721a18966f8c2b9422e16b052a123b8ee8b80752","tests/test_token_trees.rs":"43e56a701817e3c3bfd0cae54a457dd7a38ccb3ca19da41e2b995fdf20e6ed18","tests/test_ty.rs":"5b7c0bfc4963d41920dd0b39fdea419e34f00409ba86ad4211d6c3c7e8bbe1c0","tests/test_visibility.rs":"3f958e2b3b5908005e756a80eea326a91eac97cc4ab60599bebde8d4b942d65c","tests/zzz_stable.rs":"2a862e59cb446235ed99aec0e6ada8e16d3ecc30229b29d825b7c0bbc2602989"},"package":"963f7d3cc59b59b9325165add223142bbf1df27655d07789f109896d353d8350"}
+\ No newline at end of file
+diff --git a/third_party/rust/syn/Cargo.toml b/third_party/rust/syn/Cargo.toml
+--- a/third_party/rust/syn/Cargo.toml
++++ b/third_party/rust/syn/Cargo.toml
+@@ -8,79 +8,90 @@
+ # If you believe there's an error in this file please file an
+ # issue against the rust-lang/cargo repository. If you're
+ # editing this file be aware that the upstream Cargo.toml
+ # will likely look very different (and much more reasonable)
+
+ [package]
+ edition = "2018"
+ name = "syn"
+-version = "1.0.5"
++version = "1.0.40"
+ authors = ["David Tolnay <dtolnay@gmail.com>"]
+ include = ["/benches/**", "/build.rs", "/Cargo.toml", "/LICENSE-APACHE", "/LICENSE-MIT", "/README.md", "/src/**", "/tests/**"]
+ description = "Parser for Rust source code"
+ documentation = "https://docs.rs/syn"
+ readme = "README.md"
+ categories = ["development-tools::procedural-macro-helpers"]
+ license = "MIT OR Apache-2.0"
+ repository = "https://github.com/dtolnay/syn"
+ [package.metadata.docs.rs]
+ all-features = true
++targets = ["x86_64-unknown-linux-gnu"]
+
+ [package.metadata.playground]
+-all-features = true
+-
+-[lib]
+-name = "syn"
++features = ["full", "visit", "visit-mut", "fold", "extra-traits"]
+
+ [[bench]]
+ name = "rust"
+ harness = false
+ required-features = ["full", "parsing"]
+-edition = "2018"
+
+ [[bench]]
+ name = "file"
+ required-features = ["full", "parsing"]
+-edition = "2018"
+ [dependencies.proc-macro2]
+-version = "1.0"
++version = "1.0.13"
+ default-features = false
+
+ [dependencies.quote]
+ version = "1.0"
+ optional = true
+ default-features = false
+
+ [dependencies.unicode-xid]
+ version = "0.2"
++[dev-dependencies.anyhow]
++version = "1.0"
++
++[dev-dependencies.flate2]
++version = "1.0"
++
+ [dev-dependencies.insta]
+-version = "0.9"
++version = "0.16"
+
+ [dev-dependencies.rayon]
+ version = "1.0"
+
+ [dev-dependencies.ref-cast]
+-version = "0.2"
++version = "1.0"
+
+ [dev-dependencies.regex]
+ version = "1.0"
+
++[dev-dependencies.reqwest]
++version = "0.10"
++features = ["blocking"]
++
++[dev-dependencies.syn-test-suite]
++version = "0"
++
++[dev-dependencies.tar]
++version = "0.4"
++
+ [dev-dependencies.termcolor]
+ version = "1.0"
+
+ [dev-dependencies.walkdir]
+ version = "2.1"
+
+ [features]
+ clone-impls = []
+ default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"]
+ derive = []
+ extra-traits = []
+ fold = []
+ full = []
+ parsing = []
+ printing = ["quote"]
+ proc-macro = ["proc-macro2/proc-macro", "quote/proc-macro"]
++test = ["syn-test-suite/all-features"]
+ visit = []
+ visit-mut = []
+-[badges.travis-ci]
+-repository = "dtolnay/syn"
+diff --git a/third_party/rust/syn/README.md b/third_party/rust/syn/README.md
+--- a/third_party/rust/syn/README.md
++++ b/third_party/rust/syn/README.md
+@@ -1,15 +1,15 @@
+ Parser for Rust source code
+ ===========================
+
+-[](https://travis-ci.org/dtolnay/syn)
+-[](https://crates.io/crates/syn)
+-[](https://docs.rs/syn/1.0/syn/)
+-[](https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html)
++[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/syn-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/syn)
++[<img alt="crates.io" src="https://img.shields.io/crates/v/syn.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/syn)
++[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-syn-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K" height="20">](https://docs.rs/syn)
++[<img alt="build status" src="https://img.shields.io/github/workflow/status/dtolnay/syn/CI/master?style=for-the-badge" height="20">](https://github.com/dtolnay/syn/actions?query=branch%3Amaster)
+
+ Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
+ of Rust source code.
+
+ Currently this library is geared toward use in Rust procedural macros, but
+ contains some APIs that may be useful more generally.
+
+ - **Data structures** — Syn provides a complete syntax tree that can represent
+@@ -41,20 +41,16 @@ contains some APIs that may be useful mo
+
+ [`syn::File`]: https://docs.rs/syn/1.0/syn/struct.File.html
+ [`syn::Item`]: https://docs.rs/syn/1.0/syn/enum.Item.html
+ [`syn::Expr`]: https://docs.rs/syn/1.0/syn/enum.Expr.html
+ [`syn::Type`]: https://docs.rs/syn/1.0/syn/enum.Type.html
+ [`syn::DeriveInput`]: https://docs.rs/syn/1.0/syn/struct.DeriveInput.html
+ [parser functions]: https://docs.rs/syn/1.0/syn/parse/index.html
+
+-If you get stuck with anything involving procedural macros in Rust I am happy to
+-provide help even if the issue is not related to Syn. Please file a ticket in
+-this repo.
+-
+ *Version requirement: Syn supports rustc 1.31 and up.*
+
+ [*Release notes*](https://github.com/dtolnay/syn/releases)
+
+ <br>
+
+ ## Resources
+
+@@ -83,18 +79,16 @@ tokens back to the compiler to compile i
+ syn = "1.0"
+ quote = "1.0"
+
+ [lib]
+ proc-macro = true
+ ```
+
+ ```rust
+-extern crate proc_macro;
+-
+ use proc_macro::TokenStream;
+ use quote::quote;
+ use syn::{parse_macro_input, DeriveInput};
+
+ #[proc_macro_derive(MyMacro)]
+ pub fn my_macro(input: TokenStream) -> TokenStream {
+ // Parse the input tokens into a syntax tree
+ let input = parse_macro_input!(input as DeriveInput);
+@@ -266,17 +260,17 @@ incompatible ecosystems for proc macros
+
+ In general all of your code should be written against proc-macro2 rather than
+ proc-macro. The one exception is in the signatures of procedural macro entry
+ points, which are required by the language to use `proc_macro::TokenStream`.
+
+ The proc-macro2 crate will automatically detect and use the compiler's data
+ structures when a procedural macro is active.
+
+-[proc-macro2]: https://docs.rs/proc-macro2/1.0.0/proc_macro2/
++[proc-macro2]: https://docs.rs/proc-macro2/1.0/proc_macro2/
+
+ <br>
+
+ #### License
+
+ <sup>
+ Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
+ 2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
+diff --git a/third_party/rust/syn/benches/file.rs b/third_party/rust/syn/benches/file.rs
+--- a/third_party/rust/syn/benches/file.rs
++++ b/third_party/rust/syn/benches/file.rs
+@@ -1,14 +1,21 @@
+ // $ cargo bench --features full --bench file
+
+ #![feature(rustc_private, test)]
++#![recursion_limit = "1024"]
+
+ extern crate test;
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ pub mod repo;
+
+ use proc_macro2::TokenStream;
+ use std::fs;
+ use std::str::FromStr;
+ use test::Bencher;
+
+diff --git a/third_party/rust/syn/benches/rust.rs b/third_party/rust/syn/benches/rust.rs
+--- a/third_party/rust/syn/benches/rust.rs
++++ b/third_party/rust/syn/benches/rust.rs
+@@ -1,15 +1,22 @@
+ // $ cargo bench --features full --bench rust
+ //
+ // Syn only, useful for profiling:
+ // $ RUSTFLAGS='--cfg syn_only' cargo build --release --features full --bench rust
+
+ #![cfg_attr(not(syn_only), feature(rustc_private))]
++#![recursion_limit = "1024"]
+
++#[macro_use]
++#[path = "../tests/macros/mod.rs"]
++mod macros;
++
++#[path = "../tests/common/mod.rs"]
++mod common;
+ #[path = "../tests/repo/mod.rs"]
+ mod repo;
+
+ use std::fs;
+ use std::time::{Duration, Instant};
+
+ #[cfg(not(syn_only))]
+ mod tokenstream_parse {
+@@ -23,41 +30,45 @@ mod tokenstream_parse {
+
+ mod syn_parse {
+ pub fn bench(content: &str) -> Result<(), ()> {
+ syn::parse_file(content).map(drop).map_err(drop)
+ }
+ }
+
+ #[cfg(not(syn_only))]
+-mod libsyntax_parse {
++mod librustc_parse {
+ extern crate rustc_data_structures;
+- extern crate syntax;
+- extern crate syntax_pos;
++ extern crate rustc_errors;
++ extern crate rustc_parse;
++ extern crate rustc_session;
++ extern crate rustc_span;
+
+ use rustc_data_structures::sync::Lrc;
+- use syntax::edition::Edition;
+- use syntax::errors::{emitter::Emitter, DiagnosticBuilder, Handler};
+- use syntax::parse::ParseSess;
+- use syntax::source_map::{FilePathMapping, SourceMap};
+- use syntax_pos::FileName;
++ use rustc_errors::{emitter::Emitter, Diagnostic, Handler};
++ use rustc_session::parse::ParseSess;
++ use rustc_span::source_map::{FilePathMapping, SourceMap};
++ use rustc_span::{edition::Edition, FileName};
+
+ pub fn bench(content: &str) -> Result<(), ()> {
+ struct SilentEmitter;
+
+ impl Emitter for SilentEmitter {
+- fn emit_diagnostic(&mut self, _db: &DiagnosticBuilder) {}
++ fn emit_diagnostic(&mut self, _diag: &Diagnostic) {}
++ fn source_map(&self) -> Option<&Lrc<SourceMap>> {
++ None
++ }
+ }
+
+- syntax::with_globals(Edition::Edition2018, || {
++ rustc_span::with_session_globals(Edition::Edition2018, || {
+ let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+ let emitter = Box::new(SilentEmitter);
+ let handler = Handler::with_emitter(false, None, emitter);
+ let sess = ParseSess::with_span_handler(handler, cm);
+- if let Err(mut diagnostic) = syntax::parse::parse_crate_from_source_str(
++ if let Err(mut diagnostic) = rustc_parse::parse_crate_from_source_str(
+ FileName::Custom("bench".to_owned()),
+ content.to_owned(),
+ &sess,
+ ) {
+ diagnostic.cancel();
+ return Err(());
+ };
+ Ok(())
+@@ -99,21 +110,21 @@ fn exec(mut codepath: impl FnMut(&str) -
+ assert_eq!(success, total);
+ begin.elapsed()
+ }
+
+ fn main() {
+ repo::clone_rust();
+
+ macro_rules! testcases {
+- ($($(#[$cfg:meta])* $name:path,)*) => {
++ ($($(#[$cfg:meta])* $name:ident,)*) => {
+ vec![
+ $(
+ $(#[$cfg])*
+- (stringify!($name), $name as fn(&str) -> Result<(), ()>),
++ (stringify!($name), $name::bench as fn(&str) -> Result<(), ()>),
+ )*
+ ]
+ };
+ }
+
+ #[cfg(not(syn_only))]
+ {
+ let mut lines = 0;
+@@ -123,22 +134,22 @@ fn main() {
+ files += 1;
+ Ok(())
+ });
+ eprintln!("\n{} lines in {} files", lines, files);
+ }
+
+ for (name, f) in testcases!(
+ #[cfg(not(syn_only))]
+- read_from_disk::bench,
++ read_from_disk,
+ #[cfg(not(syn_only))]
+- tokenstream_parse::bench,
+- syn_parse::bench,
++ tokenstream_parse,
++ syn_parse,
+ #[cfg(not(syn_only))]
+- libsyntax_parse::bench,
++ librustc_parse,
+ ) {
+ eprint!("{:20}", format!("{}:", name));
+ let elapsed = exec(f);
+ eprintln!(
+ "elapsed={}.{:03}s",
+ elapsed.as_secs(),
+ elapsed.subsec_millis(),
+ );
+diff --git a/third_party/rust/syn/build.rs b/third_party/rust/syn/build.rs
+--- a/third_party/rust/syn/build.rs
++++ b/third_party/rust/syn/build.rs
+@@ -1,11 +1,11 @@
+ use std::env;
+ use std::process::Command;
+-use std::str::{self, FromStr};
++use std::str;
+
+ // The rustc-cfg strings below are *not* public API. Please let us know by
+ // opening a GitHub issue if your build environment requires some way to enable
+ // these cfgs other than by executing our build script.
+ fn main() {
+ let compiler = match rustc_version() {
+ Some(compiler) => compiler,
+ None => return,
+@@ -21,43 +21,19 @@ fn main() {
+ }
+
+ struct Compiler {
+ minor: u32,
+ nightly: bool,
+ }
+
+ fn rustc_version() -> Option<Compiler> {
+- let rustc = match env::var_os("RUSTC") {
+- Some(rustc) => rustc,
+- None => return None,
+- };
+-
+- let output = match Command::new(rustc).arg("--version").output() {
+- Ok(output) => output,
+- Err(_) => return None,
+- };
+-
+- let version = match str::from_utf8(&output.stdout) {
+- Ok(version) => version,
+- Err(_) => return None,
+- };
+-
++ let rustc = env::var_os("RUSTC")?;
++ let output = Command::new(rustc).arg("--version").output().ok()?;
++ let version = str::from_utf8(&output.stdout).ok()?;
+ let mut pieces = version.split('.');
+ if pieces.next() != Some("rustc 1") {
+ return None;
+ }
+-
+- let next = match pieces.next() {
+- Some(next) => next,
+- None => return None,
+- };
+-
+- let minor = match u32::from_str(next) {
+- Ok(minor) => minor,
+- Err(_) => return None,
+- };
+-
+- Some(Compiler {
+- minor: minor,
+- nightly: version.contains("nightly"),
+- })
++ let minor = pieces.next()?.parse().ok()?;
++ let nightly = version.contains("nightly");
++ Some(Compiler { minor, nightly })
+ }
+diff --git a/third_party/rust/syn/src/attr.rs b/third_party/rust/syn/src/attr.rs
+--- a/third_party/rust/syn/src/attr.rs
++++ b/third_party/rust/syn/src/attr.rs
+@@ -4,25 +4,21 @@ use crate::punctuated::Punctuated;
+ use std::iter;
+
+ use proc_macro2::TokenStream;
+
+ #[cfg(feature = "parsing")]
+ use crate::parse::{Parse, ParseBuffer, ParseStream, Parser, Result};
+ #[cfg(feature = "parsing")]
+ use crate::punctuated::Pair;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
+-#[cfg(feature = "extra-traits")]
+-use std::hash::{Hash, Hasher};
+
+ ast_struct! {
+ /// An attribute like `#[repr(transparent)]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// <br>
+ ///
+ /// # Syntax
+ ///
+ /// Rust has six types of attributes.
+ ///
+@@ -106,58 +102,69 @@ ast_struct! {
+ /// If the attribute you are parsing is expected to conform to the
+ /// conventional structured form of attribute, use [`parse_meta()`] to
+ /// obtain that structured representation. If the attribute follows some
+ /// other grammar of its own, use [`parse_args()`] to parse that into the
+ /// expected data structure.
+ ///
+ /// [`parse_meta()`]: Attribute::parse_meta
+ /// [`parse_args()`]: Attribute::parse_args
+- pub struct Attribute #manual_extra_traits {
++ ///
++ /// <p><br></p>
++ ///
++ /// # Doc comments
++ ///
++ /// The compiler transforms doc comments, such as `/// comment` and `/*!
++ /// comment */`, into attributes before macros are expanded. Each comment is
++ /// expanded into an attribute of the form `#[doc = r"comment"]`.
++ ///
++ /// As an example, the following `mod` items are expanded identically:
++ ///
++ /// ```
++ /// # use syn::{ItemMod, parse_quote};
++ /// let doc: ItemMod = parse_quote! {
++ /// /// Single line doc comments
++ /// /// We write so many!
++ /// /**
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// */
++ /// mod example {
++ /// //! Of course, they can be inner too
++ /// /*! And fit in a single line */
++ /// }
++ /// };
++ /// let attr: ItemMod = parse_quote! {
++ /// #[doc = r" Single line doc comments"]
++ /// #[doc = r" We write so many!"]
++ /// #[doc = r"
++ /// * Multi-line comments...
++ /// * May span many lines
++ /// "]
++ /// mod example {
++ /// #![doc = r" Of course, they can be inner too"]
++ /// #![doc = r" And fit in a single line "]
++ /// }
++ /// };
++ /// assert_eq!(doc, attr);
++ /// ```
++ pub struct Attribute {
+ pub pound_token: Token![#],
+ pub style: AttrStyle,
+ pub bracket_token: token::Bracket,
+ pub path: Path,
+ pub tokens: TokenStream,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Attribute {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Attribute {
+- fn eq(&self, other: &Self) -> bool {
+- self.style == other.style
+- && self.pound_token == other.pound_token
+- && self.bracket_token == other.bracket_token
+- && self.path == other.path
+- && TokenStreamHelper(&self.tokens) == TokenStreamHelper(&other.tokens)
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Attribute {
+- fn hash<H>(&self, state: &mut H)
+- where
+- H: Hasher,
+- {
+- self.style.hash(state);
+- self.pound_token.hash(state);
+- self.bracket_token.hash(state);
+- self.path.hash(state);
+- TokenStreamHelper(&self.tokens).hash(state);
+- }
+-}
+-
+ impl Attribute {
+ /// Parses the content of the attribute, consisting of the path and tokens,
+ /// as a [`Meta`] if possible.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_meta(&self) -> Result<Meta> {
+ fn clone_ident_segment(segment: &PathSegment) -> PathSegment {
+ PathSegment {
+ ident: segment.ident.clone(),
+ arguments: PathArguments::None,
+ }
+@@ -194,91 +201,95 @@ impl Attribute {
+ /// parser; and
+ /// - the error message has a more useful span when `tokens` is empty.
+ ///
+ /// ```text
+ /// #[my_attr(value < 5)]
+ /// ^^^^^^^^^ what gets parsed
+ /// ```
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args<T: Parse>(&self) -> Result<T> {
+ self.parse_args_with(T::parse)
+ }
+
+ /// Parse the arguments to the attribute using the given parser.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_args_with<F: Parser>(&self, parser: F) -> Result<F::Output> {
+ let parser = |input: ParseStream| {
+ let args = enter_args(self, input)?;
+ parse::parse_stream(parser, &args)
+ };
+ parser.parse2(self.tokens.clone())
+ }
+
+ /// Parses zero or more outer attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_outer(input: ParseStream) -> Result<Vec<Self>> {
+ let mut attrs = Vec::new();
+ while input.peek(Token![#]) {
+ attrs.push(input.call(parsing::single_parse_outer)?);
+ }
+ Ok(attrs)
+ }
+
+ /// Parses zero or more inner attributes from the stream.
+ ///
+- /// *This function is available if Syn is built with the `"parsing"`
++ /// *This function is available only if Syn is built with the `"parsing"`
+ /// feature.*
+ #[cfg(feature = "parsing")]
+ pub fn parse_inner(input: ParseStream) -> Result<Vec<Self>> {
+ let mut attrs = Vec::new();
+ while input.peek(Token![#]) && input.peek2(Token![!]) {
+ attrs.push(input.call(parsing::single_parse_inner)?);
+ }
+ Ok(attrs)
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+-fn error_expected_args(attr: &Attribute) -> Error {
++fn expected_parentheses(attr: &Attribute) -> String {
+ let style = match attr.style {
+ AttrStyle::Outer => "#",
+ AttrStyle::Inner(_) => "#!",
+ };
+
+ let mut path = String::new();
+ for segment in &attr.path.segments {
+ if !path.is_empty() || attr.path.leading_colon.is_some() {
+ path += "::";
+ }
+ path += &segment.ident.to_string();
+ }
+
+- let msg = format!("expected attribute arguments: {}[{}(...)]", style, path);
+-
+- #[cfg(feature = "printing")]
+- return Error::new_spanned(attr, msg);
+-
+- #[cfg(not(feature = "printing"))]
+- return Error::new(attr.bracket_token.span, msg);
++ format!("{}[{}(...)]", style, path)
+ }
+
+ #[cfg(feature = "parsing")]
+ fn enter_args<'a>(attr: &Attribute, input: ParseStream<'a>) -> Result<ParseBuffer<'a>> {
+ if input.is_empty() {
+- return Err(error_expected_args(attr));
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected attribute arguments in parentheses: {}", expected);
++ return Err(crate::error::new2(
++ attr.pound_token.span,
++ attr.bracket_token.span,
++ msg,
++ ));
++ } else if input.peek(Token![=]) {
++ let expected = expected_parentheses(attr);
++ let msg = format!("expected parentheses: {}", expected);
++ return Err(input.error(msg));
+ };
+
+ let content;
+ if input.peek(token::Paren) {
+ parenthesized!(content in input);
+ } else if input.peek(token::Bracket) {
+ bracketed!(content in input);
+ } else if input.peek(token::Brace) {
+@@ -293,41 +304,40 @@ fn enter_args<'a>(attr: &Attribute, inpu
+ Err(input.error("unexpected token in attribute arguments"))
+ }
+ }
+
+ ast_enum! {
+ /// Distinguishes between attributes that decorate an item and attributes
+ /// that are contained within an item.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Outer attributes
+ ///
+ /// - `#[repr(transparent)]`
+ /// - `/// # Example`
+ /// - `/** Please file an issue */`
+ ///
+ /// # Inner attributes
+ ///
+ /// - `#![feature(proc_macro)]`
+ /// - `//! # Example`
+ /// - `/*! Please file an issue */`
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
+ pub enum AttrStyle {
+ Outer,
+ Inner(Token![!]),
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// Content of a compile-time structured attribute.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// ## Path
+ ///
+ /// A meta path is like the `test` in `#[test]`.
+ ///
+ /// ## List
+ ///
+@@ -355,29 +365,29 @@ ast_enum_of_structs! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ NameValue(MetaNameValue),
+ }
+ }
+
+ ast_struct! {
+ /// A structured list within an attribute, like `derive(Copy, Clone)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaList {
+ pub path: Path,
+ pub paren_token: token::Paren,
+ pub nested: Punctuated<NestedMeta, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A name-value pair within an attribute, like `feature = "nightly"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct MetaNameValue {
+ pub path: Path,
+ pub eq_token: Token![=],
+ pub lit: Lit,
+ }
+ }
+
+@@ -393,17 +403,17 @@ impl Meta {
+ Meta::NameValue(meta) => &meta.path,
+ }
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// Element of a compile-time attribute list.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum NestedMeta {
+ /// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
+ /// would be a nested `Meta::Path`.
+ Meta(Meta),
+
+ /// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
+ Lit(Lit),
+@@ -424,18 +434,18 @@ ast_enum_of_structs! {
+ /// /* ... */
+ /// }
+ /// ```
+ ///
+ /// The implementation of this macro would want to parse its attribute arguments
+ /// as type `AttributeArgs`.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+ /// # const IGNORE: &str = stringify! {
+ /// #[proc_macro_attribute]
+ /// # };
+ /// pub fn my_attribute(args: TokenStream, input: TokenStream) -> TokenStream {
+ /// let args = parse_macro_input!(args as AttributeArgs);
+@@ -459,27 +469,27 @@ where
+ T: IntoIterator<Item = &'a Attribute>,
+ {
+ type Ret = iter::Filter<T::IntoIter, fn(&&Attribute) -> bool>;
+
+ fn outer(self) -> Self::Ret {
+ fn is_outer(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Outer => true,
+- _ => false,
++ AttrStyle::Inner(_) => false,
+ }
+ }
+ self.into_iter().filter(is_outer)
+ }
+
+ fn inner(self) -> Self::Ret {
+ fn is_inner(attr: &&Attribute) -> bool {
+ match attr.style {
+ AttrStyle::Inner(_) => true,
+- _ => false,
++ AttrStyle::Outer => false,
+ }
+ }
+ self.into_iter().filter(is_inner)
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+diff --git a/third_party/rust/syn/src/buffer.rs b/third_party/rust/syn/src/buffer.rs
+--- a/third_party/rust/syn/src/buffer.rs
++++ b/third_party/rust/syn/src/buffer.rs
+@@ -1,12 +1,12 @@
+ //! A stably addressed token buffer supporting efficient traversal based on a
+ //! cheaply copyable cursor.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ // This module is heavily commented as it contains most of the unsafe code in
+ // Syn, and caution should be used when editing it. The public-facing interface
+ // is 100% safe but the implementation is fragile internally.
+
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+ feature = "proc-macro"
+@@ -31,17 +31,17 @@ enum Entry {
+ // token tree, or null if this is the outermost level.
+ End(*const Entry),
+ }
+
+ /// A buffer that can be efficiently traversed multiple times, unlike
+ /// `TokenStream` which requires a deep copy in order to traverse more than
+ /// once.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct TokenBuffer {
+ // NOTE: Do not derive clone on this - there are raw pointers inside which
+ // will be messed up. Moving the `TokenBuffer` itself is safe as the actual
+ // backing slices won't be moved.
+ data: Box<[Entry]>,
+ }
+
+ impl TokenBuffer {
+@@ -93,17 +93,17 @@ impl TokenBuffer {
+ }
+
+ TokenBuffer { data: entries }
+ }
+
+ /// Creates a `TokenBuffer` containing all the tokens from the input
+ /// `TokenStream`.
+ ///
+- /// *This method is available if Syn is built with both the `"parsing"` and
++ /// *This method is available only if Syn is built with both the `"parsing"` and
+ /// `"proc-macro"` features.*
+ #[cfg(all(
+ not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
+ feature = "proc-macro"
+ ))]
+ pub fn new(stream: pm::TokenStream) -> TokenBuffer {
+ Self::new2(stream.into())
+ }
+@@ -128,18 +128,17 @@ impl TokenBuffer {
+ /// and copied around.
+ ///
+ /// An empty `Cursor` can be created directly, or one may create a `TokenBuffer`
+ /// object and get a cursor to its first token with `begin()`.
+ ///
+ /// Two cursors are equal if they have the same location in the same input
+ /// stream, and have the same scope.
+ ///
+-/// *This type is available if Syn is built with the `"parsing"` feature.*
+-#[derive(Copy, Clone, Eq, PartialEq)]
++/// *This type is available only if Syn is built with the `"parsing"` feature.*
+ pub struct Cursor<'a> {
+ // The current entry which the `Cursor` is pointing at.
+ ptr: *const Entry,
+ // This is the only `Entry::End(..)` object which this cursor is allowed to
+ // point at. All other `End` objects are skipped over in `Cursor::create`.
+ scope: *const Entry,
+ // Cursor is covariant in 'a. This field ensures that our pointers are still
+ // valid.
+@@ -196,37 +195,38 @@ impl<'a> Cursor<'a> {
+
+ /// Bump the cursor to point at the next token after the current one. This
+ /// is undefined behavior if the cursor is currently looking at an
+ /// `Entry::End`.
+ unsafe fn bump(self) -> Cursor<'a> {
+ Cursor::create(self.ptr.offset(1), self.scope)
+ }
+
+- /// If the cursor is looking at a `None`-delimited group, move it to look at
+- /// the first token inside instead. If the group is empty, this will move
++ /// While the cursor is looking at a `None`-delimited group, move it to look
++ /// at the first token inside instead. If the group is empty, this will move
+ /// the cursor past the `None`-delimited group.
+ ///
+ /// WARNING: This mutates its argument.
+ fn ignore_none(&mut self) {
+- if let Entry::Group(group, buf) = self.entry() {
++ while let Entry::Group(group, buf) = self.entry() {
+ if group.delimiter() == Delimiter::None {
+ // NOTE: We call `Cursor::create` here to make sure that
+ // situations where we should immediately exit the span after
+ // entering it are handled correctly.
+ unsafe {
+ *self = Cursor::create(&buf.data[0], self.scope);
+ }
++ } else {
++ break;
+ }
+ }
+ }
+
+ /// Checks whether the cursor is currently pointing at the end of its valid
+ /// scope.
+- #[inline]
+ pub fn eof(self) -> bool {
+ // We're at eof if we're at the end of our scope.
+ self.ptr == self.scope
+ }
+
+ /// If the cursor is pointing at a `Group` with the given delimiter, returns
+ /// a cursor into that group and one pointing to the next `TokenTree`.
+ pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, Span, Cursor<'a>)> {
+@@ -337,16 +337,54 @@ impl<'a> Cursor<'a> {
+ match self.entry() {
+ Entry::Group(group, _) => group.span(),
+ Entry::Literal(l) => l.span(),
+ Entry::Ident(t) => t.span(),
+ Entry::Punct(o) => o.span(),
+ Entry::End(..) => Span::call_site(),
+ }
+ }
++
++ /// Skip over the next token without cloning it. Returns `None` if this
++ /// cursor points to eof.
++ ///
++ /// This method treats `'lifetimes` as a single token.
++ pub(crate) fn skip(self) -> Option<Cursor<'a>> {
++ match self.entry() {
++ Entry::End(..) => None,
++
++ // Treat lifetimes as a single tt for the purposes of 'skip'.
++ Entry::Punct(op) if op.as_char() == '\'' && op.spacing() == Spacing::Joint => {
++ let next = unsafe { self.bump() };
++ match next.entry() {
++ Entry::Ident(_) => Some(unsafe { next.bump() }),
++ _ => Some(next),
++ }
++ }
++ _ => Some(unsafe { self.bump() }),
++ }
++ }
++}
++
++impl<'a> Copy for Cursor<'a> {}
++
++impl<'a> Clone for Cursor<'a> {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++
++impl<'a> Eq for Cursor<'a> {}
++
++impl<'a> PartialEq for Cursor<'a> {
++ fn eq(&self, other: &Self) -> bool {
++ let Cursor { ptr, scope, marker } = self;
++ let _ = marker;
++ *ptr == other.ptr && *scope == other.scope
++ }
+ }
+
+ pub(crate) fn same_scope(a: Cursor, b: Cursor) -> bool {
+ a.scope == b.scope
+ }
+
+ pub(crate) fn open_span_of_group(cursor: Cursor) -> Span {
+ match cursor.entry() {
+diff --git a/third_party/rust/syn/src/custom_keyword.rs b/third_party/rust/syn/src/custom_keyword.rs
+--- a/third_party/rust/syn/src/custom_keyword.rs
++++ b/third_party/rust/syn/src/custom_keyword.rs
+@@ -81,46 +81,46 @@
+ /// value: input.parse()?,
+ /// })
+ /// } else {
+ /// Err(lookahead.error())
+ /// }
+ /// }
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_keyword {
+ ($ident:ident) => {
+ #[allow(non_camel_case_types)]
+ pub struct $ident {
+ pub span: $crate::export::Span,
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
++ #[allow(dead_code, non_snake_case)]
+ pub fn $ident<__S: $crate::export::IntoSpans<[$crate::export::Span; 1]>>(
+ span: __S,
+ ) -> $ident {
+ $ident {
+ span: $crate::export::IntoSpans::into_spans(span)[0],
+ }
+ }
+
+ impl $crate::export::Default for $ident {
+ fn default() -> Self {
+ $ident {
+ span: $crate::export::Span::call_site(),
+ }
+ }
+ }
+
+- impl_parse_for_custom_keyword!($ident);
+- impl_to_tokens_for_custom_keyword!($ident);
+- impl_clone_for_custom_keyword!($ident);
+- impl_extra_traits_for_custom_keyword!($ident);
++ $crate::impl_parse_for_custom_keyword!($ident);
++ $crate::impl_to_tokens_for_custom_keyword!($ident);
++ $crate::impl_clone_for_custom_keyword!($ident);
++ $crate::impl_extra_traits_for_custom_keyword!($ident);
+ };
+ }
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+ #[macro_export]
+ macro_rules! impl_parse_for_custom_keyword {
+diff --git a/third_party/rust/syn/src/custom_punctuation.rs b/third_party/rust/syn/src/custom_punctuation.rs
+--- a/third_party/rust/syn/src/custom_punctuation.rs
++++ b/third_party/rust/syn/src/custom_punctuation.rs
+@@ -69,67 +69,67 @@
+ /// Ok(tokens)
+ /// }
+ ///
+ /// fn main() {
+ /// let input = r#" a::b </> c::d::e "#;
+ /// let _: PathSegments = syn::parse_str(input).unwrap();
+ /// }
+ /// ```
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ pub struct $ident {
+- pub spans: custom_punctuation_repr!($($tt)+),
++ pub spans: $crate::custom_punctuation_repr!($($tt)+),
+ }
+
+ #[doc(hidden)]
+- #[allow(non_snake_case)]
+- pub fn $ident<__S: $crate::export::IntoSpans<custom_punctuation_repr!($($tt)+)>>(
++ #[allow(dead_code, non_snake_case)]
++ pub fn $ident<__S: $crate::export::IntoSpans<$crate::custom_punctuation_repr!($($tt)+)>>(
+ spans: __S,
+ ) -> $ident {
+- let _validate_len = 0 $(+ custom_punctuation_len!(strict, $tt))*;
++ let _validate_len = 0 $(+ $crate::custom_punctuation_len!(strict, $tt))*;
+ $ident {
+ spans: $crate::export::IntoSpans::into_spans(spans)
+ }
+ }
+
+ impl $crate::export::Default for $ident {
+ fn default() -> Self {
+ $ident($crate::export::Span::call_site())
+ }
+ }
+
+- impl_parse_for_custom_punctuation!($ident, $($tt)+);
+- impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
+- impl_clone_for_custom_punctuation!($ident, $($tt)+);
+- impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_parse_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_to_tokens_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_clone_for_custom_punctuation!($ident, $($tt)+);
++ $crate::impl_extra_traits_for_custom_punctuation!($ident, $($tt)+);
+ };
+ }
+
+ // Not public API.
+ #[cfg(feature = "parsing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::token::CustomToken for $ident {
+ fn peek(cursor: $crate::buffer::Cursor) -> bool {
+- $crate::token::parsing::peek_punct(cursor, stringify_punct!($($tt)+))
++ $crate::token::parsing::peek_punct(cursor, $crate::stringify_punct!($($tt)+))
+ }
+
+ fn display() -> &'static $crate::export::str {
+- custom_punctuation_concat!("`", stringify_punct!($($tt)+), "`")
++ concat!("`", $crate::stringify_punct!($($tt)+), "`")
+ }
+ }
+
+ impl $crate::parse::Parse for $ident {
+ fn parse(input: $crate::parse::ParseStream) -> $crate::parse::Result<$ident> {
+- let spans: custom_punctuation_repr!($($tt)+) =
+- $crate::token::parsing::punct(input, stringify_punct!($($tt)+))?;
++ let spans: $crate::custom_punctuation_repr!($($tt)+) =
++ $crate::token::parsing::punct(input, $crate::stringify_punct!($($tt)+))?;
+ Ok($ident(spans))
+ }
+ }
+ };
+ }
+
+ // Not public API.
+ #[cfg(not(feature = "parsing"))]
+@@ -137,22 +137,22 @@ macro_rules! impl_parse_for_custom_punct
+ #[macro_export]
+ macro_rules! impl_parse_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {};
+ }
+
+ // Not public API.
+ #[cfg(feature = "printing")]
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! impl_to_tokens_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {
+ impl $crate::export::ToTokens for $ident {
+ fn to_tokens(&self, tokens: &mut $crate::export::TokenStream2) {
+- $crate::token::printing::punct(stringify_punct!($($tt)+), &self.spans, tokens)
++ $crate::token::printing::punct($crate::stringify_punct!($($tt)+), &self.spans, tokens)
+ }
+ }
+ };
+ }
+
+ // Not public API.
+ #[cfg(not(feature = "printing"))]
+ #[doc(hidden)]
+@@ -216,26 +216,26 @@ macro_rules! impl_extra_traits_for_custo
+ #[doc(hidden)]
+ #[macro_export]
+ macro_rules! impl_extra_traits_for_custom_punctuation {
+ ($ident:ident, $($tt:tt)+) => {};
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ macro_rules! custom_punctuation_repr {
+ ($($tt:tt)+) => {
+- [$crate::export::Span; 0 $(+ custom_punctuation_len!(lenient, $tt))+]
++ [$crate::export::Span; 0 $(+ $crate::custom_punctuation_len!(lenient, $tt))+]
+ };
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+-#[macro_export(local_inner_macros)]
++#[macro_export]
+ #[rustfmt::skip]
+ macro_rules! custom_punctuation_len {
+ ($mode:ident, +) => { 1 };
+ ($mode:ident, +=) => { 2 };
+ ($mode:ident, &) => { 1 };
+ ($mode:ident, &&) => { 2 };
+ ($mode:ident, &=) => { 2 };
+ ($mode:ident, @) => { 1 };
+@@ -274,17 +274,17 @@ macro_rules! custom_punctuation_len {
+ ($mode:ident, <<=) => { 3 };
+ ($mode:ident, >>) => { 2 };
+ ($mode:ident, >>=) => { 3 };
+ ($mode:ident, *) => { 1 };
+ ($mode:ident, -) => { 1 };
+ ($mode:ident, -=) => { 2 };
+ ($mode:ident, ~) => { 1 };
+ (lenient, $tt:tt) => { 0 };
+- (strict, $tt:tt) => {{ custom_punctuation_unexpected!($tt); 0 }};
++ (strict, $tt:tt) => {{ $crate::custom_punctuation_unexpected!($tt); 0 }};
+ }
+
+ // Not public API.
+ #[doc(hidden)]
+ #[macro_export]
+ macro_rules! custom_punctuation_unexpected {
+ () => {};
+ }
+@@ -292,18 +292,8 @@ macro_rules! custom_punctuation_unexpect
+ // Not public API.
+ #[doc(hidden)]
+ #[macro_export]
+ macro_rules! stringify_punct {
+ ($($tt:tt)+) => {
+ concat!($(stringify!($tt)),+)
+ };
+ }
+-
+-// Not public API.
+-// Without this, local_inner_macros breaks when looking for concat!
+-#[doc(hidden)]
+-#[macro_export]
+-macro_rules! custom_punctuation_concat {
+- ($($tt:tt)*) => {
+- concat!($($tt)*)
+- };
+-}
+diff --git a/third_party/rust/syn/src/data.rs b/third_party/rust/syn/src/data.rs
+--- a/third_party/rust/syn/src/data.rs
++++ b/third_party/rust/syn/src/data.rs
+@@ -1,15 +1,15 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+ /// An enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Variant {
+ /// Attributes tagged on the variant.
+ pub attrs: Vec<Attribute>,
+
+ /// Name of the variant.
+ pub ident: Ident,
+
+@@ -19,17 +19,17 @@ ast_struct! {
+ /// Explicit discriminant: `Variant = 1`
+ pub discriminant: Option<(Token![=], Expr)>,
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// Data stored within an enum variant or struct.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+@@ -47,28 +47,28 @@ ast_enum_of_structs! {
+ Unit,
+ }
+ }
+
+ ast_struct! {
+ /// Named fields of a struct or struct variant such as `Point { x: f64,
+ /// y: f64 }`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsNamed {
+ pub brace_token: token::Brace,
+ pub named: Punctuated<Field, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct FieldsUnnamed {
+ pub paren_token: token::Paren,
+ pub unnamed: Punctuated<Field, Token![,]>,
+ }
+ }
+
+ impl Fields {
+@@ -88,16 +88,34 @@ impl Fields {
+ /// struct or variant's fields uniformly.
+ pub fn iter_mut(&mut self) -> punctuated::IterMut<Field> {
+ match self {
+ Fields::Unit => crate::punctuated::empty_punctuated_iter_mut(),
+ Fields::Named(f) => f.named.iter_mut(),
+ Fields::Unnamed(f) => f.unnamed.iter_mut(),
+ }
+ }
++
++ /// Returns the number of fields.
++ pub fn len(&self) -> usize {
++ match self {
++ Fields::Unit => 0,
++ Fields::Named(f) => f.named.len(),
++ Fields::Unnamed(f) => f.unnamed.len(),
++ }
++ }
++
++ /// Returns `true` if there are zero fields.
++ pub fn is_empty(&self) -> bool {
++ match self {
++ Fields::Unit => true,
++ Fields::Named(f) => f.named.is_empty(),
++ Fields::Unnamed(f) => f.unnamed.is_empty(),
++ }
++ }
+ }
+
+ impl IntoIterator for Fields {
+ type Item = Field;
+ type IntoIter = punctuated::IntoIter<Field>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ match self {
+@@ -124,17 +142,17 @@ impl<'a> IntoIterator for &'a mut Fields
+ fn into_iter(self) -> Self::IntoIter {
+ self.iter_mut()
+ }
+ }
+
+ ast_struct! {
+ /// A field of a struct or enum variant.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub struct Field {
+ /// Attributes tagged on the field.
+ pub attrs: Vec<Attribute>,
+
+ /// Visibility of the field.
+ pub vis: Visibility,
+
+@@ -149,17 +167,17 @@ ast_struct! {
+ pub ty: Type,
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// The visibility level of an item: inherited or `pub` or
+ /// `pub(restricted)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+@@ -179,58 +197,61 @@ ast_enum_of_structs! {
+ /// An inherited visibility, which usually means private.
+ Inherited,
+ }
+ }
+
+ ast_struct! {
+ /// A public visibility level: `pub`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisPublic {
+ pub pub_token: Token![pub],
+ }
+ }
+
+ ast_struct! {
+ /// A crate-level visibility: `crate`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisCrate {
+ pub crate_token: Token![crate],
+ }
+ }
+
+ ast_struct! {
+ /// A visibility level restricted to some path: `pub(self)` or
+ /// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct VisRestricted {
+ pub pub_token: Token![pub],
+ pub paren_token: token::Paren,
+ pub in_token: Option<Token![in]>,
+ pub path: Box<Path>,
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+ pub mod parsing {
+ use super::*;
+
+ use crate::ext::IdentExt;
++ use crate::parse::discouraged::Speculative;
+ use crate::parse::{Parse, ParseStream, Result};
+
+ impl Parse for Variant {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
++ let _visibility: Visibility = input.parse()?;
+ Ok(Variant {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs,
+ ident: input.parse()?,
+ fields: {
+ if input.peek(token::Brace) {
+ Fields::Named(input.parse()?)
+ } else if input.peek(token::Paren) {
+ Fields::Unnamed(input.parse()?)
+ } else {
+ Fields::Unit
+@@ -290,68 +311,99 @@ pub mod parsing {
+ colon_token: None,
+ ty: input.parse()?,
+ })
+ }
+ }
+
+ impl Parse for Visibility {
+ fn parse(input: ParseStream) -> Result<Self> {
++ // Recognize an empty None-delimited group, as produced by a $:vis
++ // matcher that matched no tokens.
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if group.content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Inherited);
++ }
++ }
++
+ if input.peek(Token![pub]) {
+ Self::parse_pub(input)
+ } else if input.peek(Token![crate]) {
+ Self::parse_crate(input)
+ } else {
+ Ok(Visibility::Inherited)
+ }
+ }
+ }
+
+ impl Visibility {
+ fn parse_pub(input: ParseStream) -> Result<Self> {
+ let pub_token = input.parse::<Token![pub]>()?;
+
+ if input.peek(token::Paren) {
+- // TODO: optimize using advance_to
+ let ahead = input.fork();
+- let mut content;
+- parenthesized!(content in ahead);
+
++ let content;
++ let paren_token = parenthesized!(content in ahead);
+ if content.peek(Token![crate])
+ || content.peek(Token![self])
+ || content.peek(Token![super])
+ {
++ let path = content.call(Ident::parse_any)?;
++
++ // Ensure there are no additional tokens within `content`.
++ // Without explicitly checking, we may misinterpret a tuple
++ // field as a restricted visibility, causing a parse error.
++ // e.g. `pub (crate::A, crate::B)` (Issue #720).
++ if content.is_empty() {
++ input.advance_to(&ahead);
++ return Ok(Visibility::Restricted(VisRestricted {
++ pub_token,
++ paren_token,
++ in_token: None,
++ path: Box::new(Path::from(path)),
++ }));
++ }
++ } else if content.peek(Token![in]) {
++ let in_token: Token![in] = content.parse()?;
++ let path = content.call(Path::parse_mod_style)?;
++
++ input.advance_to(&ahead);
+ return Ok(Visibility::Restricted(VisRestricted {
+ pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: None,
+- path: Box::new(Path::from(content.call(Ident::parse_any)?)),
+- }));
+- } else if content.peek(Token![in]) {
+- return Ok(Visibility::Restricted(VisRestricted {
+- pub_token,
+- paren_token: parenthesized!(content in input),
+- in_token: Some(content.parse()?),
+- path: Box::new(content.call(Path::parse_mod_style)?),
++ paren_token,
++ in_token: Some(in_token),
++ path: Box::new(path),
+ }));
+ }
+ }
+
+ Ok(Visibility::Public(VisPublic { pub_token }))
+ }
+
+ fn parse_crate(input: ParseStream) -> Result<Self> {
+ if input.peek2(Token![::]) {
+ Ok(Visibility::Inherited)
+ } else {
+ Ok(Visibility::Crate(VisCrate {
+ crate_token: input.parse()?,
+ }))
+ }
+ }
++
++ #[cfg(feature = "full")]
++ pub(crate) fn is_some(&self) -> bool {
++ match self {
++ Visibility::Inherited => false,
++ _ => true,
++ }
++ }
+ }
+ }
+
+ #[cfg(feature = "printing")]
+ mod printing {
+ use super::*;
+
+ use proc_macro2::TokenStream;
+diff --git a/third_party/rust/syn/src/derive.rs b/third_party/rust/syn/src/derive.rs
+--- a/third_party/rust/syn/src/derive.rs
++++ b/third_party/rust/syn/src/derive.rs
+@@ -1,15 +1,15 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+
+ ast_struct! {
+ /// Data structure sent to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ pub struct DeriveInput {
+ /// Attributes tagged on the whole struct or enum.
+ pub attrs: Vec<Attribute>,
+
+ /// Visibility of the struct or enum.
+ pub vis: Visibility,
+
+ /// Name of the struct or enum.
+@@ -21,17 +21,17 @@ ast_struct! {
+ /// Data within the struct or enum.
+ pub data: Data,
+ }
+ }
+
+ ast_enum_of_structs! {
+ /// The storage of a struct, enum or union data structure.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` feature.*
++ /// *This type is available only if Syn is built with the `"derive"` feature.*
+ ///
+ /// # Syntax tree enum
+ ///
+ /// This type is a [syntax tree enum].
+ ///
+ /// [syntax tree enum]: enum.Expr.html#syntax-tree-enums
+ //
+ // TODO: change syntax-tree-enum link to an intra rustdoc link, currently
+@@ -48,41 +48,41 @@ ast_enum_of_structs! {
+ }
+
+ do_not_generate_to_tokens
+ }
+
+ ast_struct! {
+ /// A struct input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataStruct {
+ pub struct_token: Token![struct],
+ pub fields: Fields,
+ pub semi_token: Option<Token![;]>,
+ }
+ }
+
+ ast_struct! {
+ /// An enum input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataEnum {
+ pub enum_token: Token![enum],
+ pub brace_token: token::Brace,
+ pub variants: Punctuated<Variant, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// An untagged union input to a `proc_macro_derive` macro.
+ ///
+- /// *This type is available if Syn is built with the `"derive"`
++ /// *This type is available only if Syn is built with the `"derive"`
+ /// feature.*
+ pub struct DataUnion {
+ pub union_token: Token![union],
+ pub fields: FieldsNamed,
+ }
+ }
+
+ #[cfg(feature = "parsing")]
+diff --git a/third_party/rust/syn/src/discouraged.rs b/third_party/rust/syn/src/discouraged.rs
+--- a/third_party/rust/syn/src/discouraged.rs
++++ b/third_party/rust/syn/src/discouraged.rs
+@@ -11,17 +11,17 @@ pub trait Speculative {
+ /// stream to the fork to "commit" the parsing from the fork to the main
+ /// stream.
+ ///
+ /// If you can avoid doing this, you should, as it limits the ability to
+ /// generate useful errors. That said, it is often the only way to parse
+ /// syntax of the form `A* B*` for arbitrary syntax `A` and `B`. The problem
+ /// is that when the fork fails to parse an `A`, it's impossible to tell
+ /// whether that was because of a syntax error and the user meant to provide
+- /// an `A`, or that the `A`s are finished and its time to start parsing
++ /// an `A`, or that the `A`s are finished and it's time to start parsing
+ /// `B`s. Use with care.
+ ///
+ /// Also note that if `A` is a subset of `B`, `A* B*` can be parsed by
+ /// parsing `B*` and removing the leading members of `A` from the
+ /// repetition, bypassing the need to involve the downsides associated with
+ /// speculative parsing.
+ ///
+ /// [`ParseStream::fork`]: ParseBuffer::fork
+@@ -67,17 +67,16 @@ pub trait Speculative {
+ /// # }
+ ///
+ /// impl Parse for PathSegment {
+ /// fn parse(input: ParseStream) -> Result<Self> {
+ /// if input.peek(Token![super])
+ /// || input.peek(Token![self])
+ /// || input.peek(Token![Self])
+ /// || input.peek(Token![crate])
+- /// || input.peek(Token![extern])
+ /// {
+ /// let ident = input.call(Ident::parse_any)?;
+ /// return Ok(PathSegment::from(ident));
+ /// }
+ ///
+ /// let ident = input.parse()?;
+ /// if input.peek(Token![::]) && input.peek3(Token![<]) {
+ /// return Ok(PathSegment {
+@@ -159,13 +158,37 @@ pub trait Speculative {
+ }
+
+ impl<'a> Speculative for ParseBuffer<'a> {
+ fn advance_to(&self, fork: &Self) {
+ if !crate::buffer::same_scope(self.cursor(), fork.cursor()) {
+ panic!("Fork was not derived from the advancing parse stream");
+ }
+
++ let (self_unexp, self_sp) = inner_unexpected(self);
++ let (fork_unexp, fork_sp) = inner_unexpected(fork);
++ if !Rc::ptr_eq(&self_unexp, &fork_unexp) {
++ match (fork_sp, self_sp) {
++ // Unexpected set on the fork, but not on `self`, copy it over.
++ (Some(span), None) => {
++ self_unexp.set(Unexpected::Some(span));
++ }
++ // Unexpected unset. Use chain to propagate errors from fork.
++ (None, None) => {
++ fork_unexp.set(Unexpected::Chain(self_unexp));
++
++ // Ensure toplevel 'unexpected' tokens from the fork don't
++ // bubble up the chain by replacing the root `unexpected`
++ // pointer, only 'unexpected' tokens from existing group
++ // parsers should bubble.
++ fork.unexpected
++ .set(Some(Rc::new(Cell::new(Unexpected::None))));
++ }
++ // Unexpected has been set on `self`. No changes needed.
++ (_, Some(_)) => {}
++ }
++ }
++
+ // See comment on `cell` in the struct definition.
+ self.cell
+ .set(unsafe { mem::transmute::<Cursor, Cursor<'static>>(fork.cursor()) })
+ }
+ }
+diff --git a/third_party/rust/syn/src/error.rs b/third_party/rust/syn/src/error.rs
+--- a/third_party/rust/syn/src/error.rs
++++ b/third_party/rust/syn/src/error.rs
+@@ -1,9 +1,8 @@
+-use std;
+ use std::fmt::{self, Debug, Display};
+ use std::iter::FromIterator;
+ use std::slice;
+ use std::vec;
+
+ use proc_macro2::{
+ Delimiter, Group, Ident, LexError, Literal, Punct, Spacing, Span, TokenStream, TokenTree,
+ };
+@@ -27,18 +26,18 @@ pub type Result<T> = std::result::Result
+ /// message than simply panicking the macro.
+ ///
+ /// [`compile_error!`]: https://doc.rust-lang.org/std/macro.compile_error.html
+ ///
+ /// When parsing macro input, the [`parse_macro_input!`] macro handles the
+ /// conversion to `compile_error!` automatically.
+ ///
+ /// ```
+-/// extern crate proc_macro;
+-///
++/// # extern crate proc_macro;
++/// #
+ /// use proc_macro::TokenStream;
+ /// use syn::{parse_macro_input, AttributeArgs, ItemFn};
+ ///
+ /// # const IGNORE: &str = stringify! {
+ /// #[proc_macro_attribute]
+ /// # };
+ /// pub fn my_attr(args: TokenStream, input: TokenStream) -> TokenStream {
+ /// let args = parse_macro_input!(args as AttributeArgs);
+@@ -77,17 +76,16 @@ pub type Result<T> = std::result::Result
+ /// # use proc_macro2::TokenStream;
+ /// # use syn::{DeriveInput, Result};
+ /// #
+ /// # pub fn my_derive(input: DeriveInput) -> Result<TokenStream> {
+ /// # unimplemented!()
+ /// # }
+ /// # }
+ /// ```
+-#[derive(Clone)]
+ pub struct Error {
+ messages: Vec<ErrorMessage>,
+ }
+
+ struct ErrorMessage {
+ // Span is implemented as an index into a thread-local interner to keep the
+ // size small. It is not safe to access from a different thread. We want
+ // errors to be Send and Sync to play nicely with the Failure crate, so pin
+@@ -245,16 +243,27 @@ pub fn new_at<T: Display>(scope: Span, c
+ if cursor.eof() {
+ Error::new(scope, format!("unexpected end of input, {}", message))
+ } else {
+ let span = crate::buffer::open_span_of_group(cursor);
+ Error::new(span, message)
+ }
+ }
+
++#[cfg(all(feature = "parsing", any(feature = "full", feature = "derive")))]
++pub fn new2<T: Display>(start: Span, end: Span, message: T) -> Error {
++ Error {
++ messages: vec![ErrorMessage {
++ start_span: ThreadBound::new(start),
++ end_span: ThreadBound::new(end),
++ message: message.to_string(),
++ }],
++ }
++}
++
+ impl Debug for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ if self.messages.len() == 1 {
+ formatter
+ .debug_tuple("Error")
+ .field(&self.messages[0])
+ .finish()
+ } else {
+@@ -273,16 +282,24 @@ impl Debug for ErrorMessage {
+ }
+
+ impl Display for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str(&self.messages[0].message)
+ }
+ }
+
++impl Clone for Error {
++ fn clone(&self) -> Self {
++ Error {
++ messages: self.messages.clone(),
++ }
++ }
++}
++
+ impl Clone for ErrorMessage {
+ fn clone(&self) -> Self {
+ let start = self
+ .start_span
+ .get()
+ .cloned()
+ .unwrap_or_else(Span::call_site);
+ let end = self.end_span.get().cloned().unwrap_or_else(Span::call_site);
+@@ -350,8 +367,16 @@ impl<'a> Iterator for Iter<'a> {
+ type Item = Error;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ Some(Error {
+ messages: vec![self.messages.next()?.clone()],
+ })
+ }
+ }
++
++impl Extend<Error> for Error {
++ fn extend<T: IntoIterator<Item = Error>>(&mut self, iter: T) {
++ for err in iter {
++ self.combine(err);
++ }
++ }
++}
+diff --git a/third_party/rust/syn/src/expr.rs b/third_party/rust/syn/src/expr.rs
+--- a/third_party/rust/syn/src/expr.rs
++++ b/third_party/rust/syn/src/expr.rs
+@@ -1,23 +1,26 @@
+ use super::*;
+ use crate::punctuated::Punctuated;
+-#[cfg(feature = "extra-traits")]
+-use crate::tt::TokenStreamHelper;
++#[cfg(feature = "full")]
++use crate::reserved::Reserved;
+ use proc_macro2::{Span, TokenStream};
+-#[cfg(feature = "extra-traits")]
++#[cfg(feature = "printing")]
++use quote::IdentFragment;
++#[cfg(feature = "printing")]
++use std::fmt::{self, Display};
+ use std::hash::{Hash, Hasher};
+-#[cfg(all(feature = "parsing", feature = "full"))]
++#[cfg(feature = "parsing")]
+ use std::mem;
+
+ ast_enum_of_structs! {
+ /// A Rust expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
+- /// feature.*
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
++ /// feature, but most of the variants are not available unless "full" is enabled.*
+ ///
+ /// # Syntax tree enums
+ ///
+ /// This type is a syntax tree enum. In Syn this and other syntax tree enums
+ /// are designed to be traversed using the following rebinding idiom.
+ ///
+ /// ```
+ /// # use syn::Expr;
+@@ -78,17 +81,17 @@ ast_enum_of_structs! {
+ /// if let Expr::Tuple(base) = *discriminant.base {
+ /// # }
+ /// # }
+ /// ```
+ ///
+ /// A sign that you may not be choosing the right variable names is if you
+ /// see names getting repeated in your code, like accessing
+ /// `receiver.receiver` or `pat.pat` or `cond.cond`.
+- pub enum Expr #manual_extra_traits {
++ pub enum Expr {
+ /// A slice literal expression: `[a, b, c, d]`.
+ Array(ExprArray),
+
+ /// An assignment expression: `a = compute()`.
+ Assign(ExprAssign),
+
+ /// A compound assignment expression: `counter += 1`.
+ AssignOp(ExprAssignOp),
+@@ -223,191 +226,191 @@ ast_enum_of_structs! {
+ #[doc(hidden)]
+ __Nonexhaustive,
+ }
+ }
+
+ ast_struct! {
+ /// A slice literal expression: `[a, b, c, d]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprArray #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+ pub elems: Punctuated<Expr, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// An assignment expression: `a = compute()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssign #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+ pub eq_token: Token![=],
+ pub right: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A compound assignment expression: `counter += 1`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAssignOp #full {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+ pub op: BinOp,
+ pub right: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// An async block: `async { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAsync #full {
+ pub attrs: Vec<Attribute>,
+ pub async_token: Token![async],
+ pub capture: Option<Token![move]>,
+ pub block: Block,
+ }
+ }
+
+ ast_struct! {
+ /// An await expression: `fut.await`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprAwait #full {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+ pub dot_token: Token![.],
+ pub await_token: token::Await,
+ }
+ }
+
+ ast_struct! {
+ /// A binary operation: `a + b`, `a * b`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprBinary {
+ pub attrs: Vec<Attribute>,
+ pub left: Box<Expr>,
+ pub op: BinOp,
+ pub right: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A blocked scope: `{ ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub block: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A box expression: `box f`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBox #full {
+ pub attrs: Vec<Attribute>,
+ pub box_token: Token![box],
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A `break`, with an optional label to break and an optional
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprBreak #full {
+ pub attrs: Vec<Attribute>,
+ pub break_token: Token![break],
+ pub label: Option<Lifetime>,
+ pub expr: Option<Box<Expr>>,
+ }
+ }
+
+ ast_struct! {
+ /// A function call expression: `invoke(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCall {
+ pub attrs: Vec<Attribute>,
+ pub func: Box<Expr>,
+ pub paren_token: token::Paren,
+ pub args: Punctuated<Expr, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A cast expression: `foo as f64`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprCast {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub as_token: Token![as],
+ pub ty: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A closure expression: `|a, b| a + b`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprClosure #full {
+ pub attrs: Vec<Attribute>,
+ pub asyncness: Option<Token![async]>,
+ pub movability: Option<Token![static]>,
+ pub capture: Option<Token![move]>,
+ pub or1_token: Token![|],
+ pub inputs: Punctuated<Pat, Token![,]>,
+ pub or2_token: Token![|],
+ pub output: ReturnType,
+ pub body: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A `continue`, with an optional label.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprContinue #full {
+ pub attrs: Vec<Attribute>,
+ pub continue_token: Token![continue],
+ pub label: Option<Lifetime>,
+ }
+ }
+
+ ast_struct! {
+ /// Access of a named struct field (`obj.k`) or unnamed tuple struct
+ /// field (`obj.0`).
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprField {
+ pub attrs: Vec<Attribute>,
+ pub base: Box<Expr>,
+ pub dot_token: Token![.],
+ pub member: Member,
+ }
+ }
+
+ ast_struct! {
+ /// A for loop: `for pat in expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprForLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub for_token: Token![for],
+ pub pat: Pat,
+ pub in_token: Token![in],
+ pub expr: Box<Expr>,
+ pub body: Block,
+@@ -416,538 +419,312 @@ ast_struct! {
+
+ ast_struct! {
+ /// An expression contained within invisible delimiters.
+ ///
+ /// This variant is important for faithfully representing the precedence
+ /// of expressions and is related to `None`-delimited spans in a
+ /// `TokenStream`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprGroup #full {
+ pub attrs: Vec<Attribute>,
+ pub group_token: token::Group,
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// An `if` expression with an optional `else` block: `if expr { ... }
+ /// else { ... }`.
+ ///
+ /// The `else` branch expression may only be an `If` or `Block`
+ /// expression, not any of the other types of expression.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprIf #full {
+ pub attrs: Vec<Attribute>,
+ pub if_token: Token![if],
+ pub cond: Box<Expr>,
+ pub then_branch: Block,
+ pub else_branch: Option<(Token![else], Box<Expr>)>,
+ }
+ }
+
+ ast_struct! {
+ /// A square bracketed indexing expression: `vector[2]`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprIndex {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub bracket_token: token::Bracket,
+ pub index: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A `let` guard: `let Some(x) = opt`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLet #full {
+ pub attrs: Vec<Attribute>,
+ pub let_token: Token![let],
+ pub pat: Pat,
+ pub eq_token: Token![=],
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A literal in place of an expression: `1`, `"foo"`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprLit {
+ pub attrs: Vec<Attribute>,
+ pub lit: Lit,
+ }
+ }
+
+ ast_struct! {
+ /// Conditionless loop: `loop { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprLoop #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub loop_token: Token![loop],
+ pub body: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A macro invocation expression: `format!("{}", q)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMacro #full {
+ pub attrs: Vec<Attribute>,
+ pub mac: Macro,
+ }
+ }
+
+ ast_struct! {
+ /// A `match` expression: `match n { Some(n) => {}, None => {} }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMatch #full {
+ pub attrs: Vec<Attribute>,
+ pub match_token: Token![match],
+ pub expr: Box<Expr>,
+ pub brace_token: token::Brace,
+ pub arms: Vec<Arm>,
+ }
+ }
+
+ ast_struct! {
+ /// A method call expression: `x.foo::<T>(a, b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprMethodCall #full {
+ pub attrs: Vec<Attribute>,
+ pub receiver: Box<Expr>,
+ pub dot_token: Token![.],
+ pub method: Ident,
+ pub turbofish: Option<MethodTurbofish>,
+ pub paren_token: token::Paren,
+ pub args: Punctuated<Expr, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A parenthesized expression: `(a + b)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprParen {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A path like `std::mem::replace` possibly containing generic
+ /// parameters and a qualified self-type.
+ ///
+ /// A plain identifier like `x` is a path of length 1.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprPath {
+ pub attrs: Vec<Attribute>,
+ pub qself: Option<QSelf>,
+ pub path: Path,
+ }
+ }
+
+ ast_struct! {
+ /// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRange #full {
+ pub attrs: Vec<Attribute>,
+ pub from: Option<Box<Expr>>,
+ pub limits: RangeLimits,
+ pub to: Option<Box<Expr>>,
+ }
+ }
+
+ ast_struct! {
+ /// A referencing operation: `&a` or `&mut a`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReference #full {
+ pub attrs: Vec<Attribute>,
+ pub and_token: Token![&],
+ pub raw: Reserved,
+ pub mutability: Option<Token![mut]>,
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// An array literal constructed from one repeated element: `[0u8; N]`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprRepeat #full {
+ pub attrs: Vec<Attribute>,
+ pub bracket_token: token::Bracket,
+ pub expr: Box<Expr>,
+ pub semi_token: Token![;],
+ pub len: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// A `return`, with an optional value to be returned.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprReturn #full {
+ pub attrs: Vec<Attribute>,
+ pub return_token: Token![return],
+ pub expr: Option<Box<Expr>>,
+ }
+ }
+
+ ast_struct! {
+ /// A struct literal expression: `Point { x: 1, y: 1 }`.
+ ///
+ /// The `rest` provides the value of the remaining fields as in `S { a:
+ /// 1, b: 1, ..rest }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprStruct #full {
+ pub attrs: Vec<Attribute>,
+ pub path: Path,
+ pub brace_token: token::Brace,
+ pub fields: Punctuated<FieldValue, Token![,]>,
+ pub dot2_token: Option<Token![..]>,
+ pub rest: Option<Box<Expr>>,
+ }
+ }
+
+ ast_struct! {
+ /// A try-expression: `expr?`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTry #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub question_token: Token![?],
+ }
+ }
+
+ ast_struct! {
+ /// A try block: `try { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTryBlock #full {
+ pub attrs: Vec<Attribute>,
+ pub try_token: Token![try],
+ pub block: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A tuple expression: `(a, b, c, d)`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprTuple #full {
+ pub attrs: Vec<Attribute>,
+ pub paren_token: token::Paren,
+ pub elems: Punctuated<Expr, Token![,]>,
+ }
+ }
+
+ ast_struct! {
+ /// A type ascription expression: `foo: f64`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprType #full {
+ pub attrs: Vec<Attribute>,
+ pub expr: Box<Expr>,
+ pub colon_token: Token![:],
+ pub ty: Box<Type>,
+ }
+ }
+
+ ast_struct! {
+ /// A unary operation: `!x`, `*x`.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or
++ /// *This type is available only if Syn is built with the `"derive"` or
+ /// `"full"` feature.*
+ pub struct ExprUnary {
+ pub attrs: Vec<Attribute>,
+ pub op: UnOp,
+ pub expr: Box<Expr>,
+ }
+ }
+
+ ast_struct! {
+ /// An unsafe block: `unsafe { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprUnsafe #full {
+ pub attrs: Vec<Attribute>,
+ pub unsafe_token: Token![unsafe],
+ pub block: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A while loop: `while expr { ... }`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprWhile #full {
+ pub attrs: Vec<Attribute>,
+ pub label: Option<Label>,
+ pub while_token: Token![while],
+ pub cond: Box<Expr>,
+ pub body: Block,
+ }
+ }
+
+ ast_struct! {
+ /// A yield expression: `yield expr`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct ExprYield #full {
+ pub attrs: Vec<Attribute>,
+ pub yield_token: Token![yield],
+ pub expr: Option<Box<Expr>>,
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+-impl Eq for Expr {}
+-
+-#[cfg(feature = "extra-traits")]
+-impl PartialEq for Expr {
+- fn eq(&self, other: &Self) -> bool {
+- match (self, other) {
+- (Expr::Array(this), Expr::Array(other)) => this == other,
+- (Expr::Assign(this), Expr::Assign(other)) => this == other,
+- (Expr::AssignOp(this), Expr::AssignOp(other)) => this == other,
+- (Expr::Async(this), Expr::Async(other)) => this == other,
+- (Expr::Await(this), Expr::Await(other)) => this == other,
+- (Expr::Binary(this), Expr::Binary(other)) => this == other,
+- (Expr::Block(this), Expr::Block(other)) => this == other,
+- (Expr::Box(this), Expr::Box(other)) => this == other,
+- (Expr::Break(this), Expr::Break(other)) => this == other,
+- (Expr::Call(this), Expr::Call(other)) => this == other,
+- (Expr::Cast(this), Expr::Cast(other)) => this == other,
+- (Expr::Closure(this), Expr::Closure(other)) => this == other,
+- (Expr::Continue(this), Expr::Continue(other)) => this == other,
+- (Expr::Field(this), Expr::Field(other)) => this == other,
+- (Expr::ForLoop(this), Expr::ForLoop(other)) => this == other,
+- (Expr::Group(this), Expr::Group(other)) => this == other,
+- (Expr::If(this), Expr::If(other)) => this == other,
+- (Expr::Index(this), Expr::Index(other)) => this == other,
+- (Expr::Let(this), Expr::Let(other)) => this == other,
+- (Expr::Lit(this), Expr::Lit(other)) => this == other,
+- (Expr::Loop(this), Expr::Loop(other)) => this == other,
+- (Expr::Macro(this), Expr::Macro(other)) => this == other,
+- (Expr::Match(this), Expr::Match(other)) => this == other,
+- (Expr::MethodCall(this), Expr::MethodCall(other)) => this == other,
+- (Expr::Paren(this), Expr::Paren(other)) => this == other,
+- (Expr::Path(this), Expr::Path(other)) => this == other,
+- (Expr::Range(this), Expr::Range(other)) => this == other,
+- (Expr::Reference(this), Expr::Reference(other)) => this == other,
+- (Expr::Repeat(this), Expr::Repeat(other)) => this == other,
+- (Expr::Return(this), Expr::Return(other)) => this == other,
+- (Expr::Struct(this), Expr::Struct(other)) => this == other,
+- (Expr::Try(this), Expr::Try(other)) => this == other,
+- (Expr::TryBlock(this), Expr::TryBlock(other)) => this == other,
+- (Expr::Tuple(this), Expr::Tuple(other)) => this == other,
+- (Expr::Type(this), Expr::Type(other)) => this == other,
+- (Expr::Unary(this), Expr::Unary(other)) => this == other,
+- (Expr::Unsafe(this), Expr::Unsafe(other)) => this == other,
+- (Expr::Verbatim(this), Expr::Verbatim(other)) => {
+- TokenStreamHelper(this) == TokenStreamHelper(other)
+- }
+- (Expr::While(this), Expr::While(other)) => this == other,
+- (Expr::Yield(this), Expr::Yield(other)) => this == other,
+- _ => false,
+- }
+- }
+-}
+-
+-#[cfg(feature = "extra-traits")]
+-impl Hash for Expr {
+- fn hash<H>(&self, hash: &mut H)
+- where
+- H: Hasher,
+- {
+- match self {
+- Expr::Array(expr) => {
+- hash.write_u8(0);
+- expr.hash(hash);
+- }
+- Expr::Assign(expr) => {
+- hash.write_u8(1);
+- expr.hash(hash);
+- }
+- Expr::AssignOp(expr) => {
+- hash.write_u8(2);
+- expr.hash(hash);
+- }
+- Expr::Async(expr) => {
+- hash.write_u8(3);
+- expr.hash(hash);
+- }
+- Expr::Await(expr) => {
+- hash.write_u8(4);
+- expr.hash(hash);
+- }
+- Expr::Binary(expr) => {
+- hash.write_u8(5);
+- expr.hash(hash);
+- }
+- Expr::Block(expr) => {
+- hash.write_u8(6);
+- expr.hash(hash);
+- }
+- Expr::Box(expr) => {
+- hash.write_u8(7);
+- expr.hash(hash);
+- }
+- Expr::Break(expr) => {
+- hash.write_u8(8);
+- expr.hash(hash);
+- }
+- Expr::Call(expr) => {
+- hash.write_u8(9);
+- expr.hash(hash);
+- }
+- Expr::Cast(expr) => {
+- hash.write_u8(10);
+- expr.hash(hash);
+- }
+- Expr::Closure(expr) => {
+- hash.write_u8(11);
+- expr.hash(hash);
+- }
+- Expr::Continue(expr) => {
+- hash.write_u8(12);
+- expr.hash(hash);
+- }
+- Expr::Field(expr) => {
+- hash.write_u8(13);
+- expr.hash(hash);
+- }
+- Expr::ForLoop(expr) => {
+- hash.write_u8(14);
+- expr.hash(hash);
+- }
+- Expr::Group(expr) => {
+- hash.write_u8(15);
+- expr.hash(hash);
+- }
+- Expr::If(expr) => {
+- hash.write_u8(16);
+- expr.hash(hash);
+- }
+- Expr::Index(expr) => {
+- hash.write_u8(17);
+- expr.hash(hash);
+- }
+- Expr::Let(expr) => {
+- hash.write_u8(18);
+- expr.hash(hash);
+- }
+- Expr::Lit(expr) => {
+- hash.write_u8(19);
+- expr.hash(hash);
+- }
+- Expr::Loop(expr) => {
+- hash.write_u8(20);
+- expr.hash(hash);
+- }
+- Expr::Macro(expr) => {
+- hash.write_u8(21);
+- expr.hash(hash);
+- }
+- Expr::Match(expr) => {
+- hash.write_u8(22);
+- expr.hash(hash);
+- }
+- Expr::MethodCall(expr) => {
+- hash.write_u8(23);
+- expr.hash(hash);
+- }
+- Expr::Paren(expr) => {
+- hash.write_u8(24);
+- expr.hash(hash);
+- }
+- Expr::Path(expr) => {
+- hash.write_u8(25);
+- expr.hash(hash);
+- }
+- Expr::Range(expr) => {
+- hash.write_u8(26);
+- expr.hash(hash);
+- }
+- Expr::Reference(expr) => {
+- hash.write_u8(27);
+- expr.hash(hash);
+- }
+- Expr::Repeat(expr) => {
+- hash.write_u8(28);
+- expr.hash(hash);
+- }
+- Expr::Return(expr) => {
+- hash.write_u8(29);
+- expr.hash(hash);
+- }
+- Expr::Struct(expr) => {
+- hash.write_u8(30);
+- expr.hash(hash);
+- }
+- Expr::Try(expr) => {
+- hash.write_u8(31);
+- expr.hash(hash);
+- }
+- Expr::TryBlock(expr) => {
+- hash.write_u8(32);
+- expr.hash(hash);
+- }
+- Expr::Tuple(expr) => {
+- hash.write_u8(33);
+- expr.hash(hash);
+- }
+- Expr::Type(expr) => {
+- hash.write_u8(34);
+- expr.hash(hash);
+- }
+- Expr::Unary(expr) => {
+- hash.write_u8(35);
+- expr.hash(hash);
+- }
+- Expr::Unsafe(expr) => {
+- hash.write_u8(36);
+- expr.hash(hash);
+- }
+- Expr::Verbatim(expr) => {
+- hash.write_u8(37);
+- TokenStreamHelper(expr).hash(hash);
+- }
+- Expr::While(expr) => {
+- hash.write_u8(38);
+- expr.hash(hash);
+- }
+- Expr::Yield(expr) => {
+- hash.write_u8(39);
+- expr.hash(hash);
+- }
+- Expr::__Nonexhaustive => unreachable!(),
+- }
+- }
+-}
+-
+ impl Expr {
+ #[cfg(all(feature = "parsing", feature = "full"))]
+ pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
+ match self {
+ Expr::Box(ExprBox { attrs, .. })
+ | Expr::Array(ExprArray { attrs, .. })
+ | Expr::Call(ExprCall { attrs, .. })
+ | Expr::MethodCall(ExprMethodCall { attrs, .. })
+@@ -991,107 +768,145 @@ impl Expr {
+ }
+ }
+ }
+
+ ast_enum! {
+ /// A struct or tuple struct field accessed in a struct literal or field
+ /// expression.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+ pub enum Member {
+ /// A named field like `self.x`.
+ Named(Ident),
+ /// An unnamed field like `self.0`.
+ Unnamed(Index),
+ }
+ }
+
++impl Eq for Member {}
++
++impl PartialEq for Member {
++ fn eq(&self, other: &Self) -> bool {
++ match (self, other) {
++ (Member::Named(this), Member::Named(other)) => this == other,
++ (Member::Unnamed(this), Member::Unnamed(other)) => this == other,
++ _ => false,
++ }
++ }
++}
++
++impl Hash for Member {
++ fn hash<H: Hasher>(&self, state: &mut H) {
++ match self {
++ Member::Named(m) => m.hash(state),
++ Member::Unnamed(m) => m.hash(state),
++ }
++ }
++}
++
++#[cfg(feature = "printing")]
++impl IdentFragment for Member {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ match self {
++ Member::Named(m) => Display::fmt(m, formatter),
++ Member::Unnamed(m) => Display::fmt(&m.index, formatter),
++ }
++ }
++
++ fn span(&self) -> Option<Span> {
++ match self {
++ Member::Named(m) => Some(m.span()),
++ Member::Unnamed(m) => Some(m.span),
++ }
++ }
++}
++
+ ast_struct! {
+ /// The index of an unnamed tuple struct field.
+ ///
+- /// *This type is available if Syn is built with the `"derive"` or `"full"`
++ /// *This type is available only if Syn is built with the `"derive"` or `"full"`
+ /// feature.*
+- pub struct Index #manual_extra_traits {
++ pub struct Index {
+ pub index: u32,
+ pub span: Span,
+ }
+ }
+
+ impl From<usize> for Index {
+ fn from(index: usize) -> Index {
+ assert!(index < u32::max_value() as usize);
+ Index {
+ index: index as u32,
+ span: Span::call_site(),
+ }
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Eq for Index {}
+
+-#[cfg(feature = "extra-traits")]
+ impl PartialEq for Index {
+ fn eq(&self, other: &Self) -> bool {
+ self.index == other.index
+ }
+ }
+
+-#[cfg(feature = "extra-traits")]
+ impl Hash for Index {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state);
+ }
+ }
+
+-#[cfg(feature = "full")]
+-ast_struct! {
+- #[derive(Default)]
+- pub struct Reserved {
+- private: (),
++#[cfg(feature = "printing")]
++impl IdentFragment for Index {
++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
++ Display::fmt(&self.index, formatter)
++ }
++
++ fn span(&self) -> Option<Span> {
++ Some(self.span)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_struct! {
+ /// The `::<>` explicit type parameters passed to a method call:
+ /// `parse::<u64>()`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct MethodTurbofish {
+ pub colon2_token: Token![::],
+ pub lt_token: Token![<],
+ pub args: Punctuated<GenericMethodArgument, Token![,]>,
+ pub gt_token: Token![>],
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_enum! {
+ /// An individual generic argument to a method, like `T`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum GenericMethodArgument {
+ /// A type argument.
+ Type(Type),
+ /// A const expression. Must be inside of a block.
+ ///
+ /// NOTE: Identity expressions are represented as Type arguments, as
+ /// they are indistinguishable syntactically.
+ Const(Expr),
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_struct! {
+ /// A field-value pair in a struct literal.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct FieldValue {
+ /// Attributes tagged on the field.
+ pub attrs: Vec<Attribute>,
+
+ /// Name or index of the field.
+ pub member: Member,
+
+ /// The colon in `Struct { x: x }`. If written in shorthand like
+@@ -1102,17 +917,17 @@ ast_struct! {
+ pub expr: Expr,
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_struct! {
+ /// A lifetime labeling a `for`, `while`, or `loop`.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Label {
+ pub name: Lifetime,
+ pub colon_token: Token![:],
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_struct! {
+@@ -1129,45 +944,44 @@ ast_struct! {
+ /// }
+ /// // ...
+ /// # _ => {}
+ /// }
+ /// # false
+ /// # }
+ /// ```
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub struct Arm {
+ pub attrs: Vec<Attribute>,
+ pub pat: Pat,
+ pub guard: Option<(Token![if], Box<Expr>)>,
+ pub fat_arrow_token: Token![=>],
+ pub body: Box<Expr>,
+ pub comma: Option<Token![,]>,
+ }
+ }
+
+ #[cfg(feature = "full")]
+ ast_enum! {
+ /// Limit types of a range, inclusive or exclusive.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
+- #[cfg_attr(feature = "clone-impls", derive(Copy))]
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ pub enum RangeLimits {
+ /// Inclusive at the beginning, exclusive at the end.
+ HalfOpen(Token![..]),
+ /// Inclusive at the beginning and end.
+ Closed(Token![..=]),
+ }
+ }
+
+ #[cfg(any(feature = "parsing", feature = "printing"))]
+ #[cfg(feature = "full")]
+ pub(crate) fn requires_terminator(expr: &Expr) -> bool {
+- // see https://github.com/rust-lang/rust/blob/eb8f2586e/src/libsyntax/parse/classify.rs#L17-L37
++ // see https://github.com/rust-lang/rust/blob/2679c38fc/src/librustc_ast/util/classify.rs#L7-L25
+ match *expr {
+ Expr::Unsafe(..)
+ | Expr::Block(..)
+ | Expr::If(..)
+ | Expr::Match(..)
+ | Expr::While(..)
+ | Expr::Loop(..)
+ | Expr::ForLoop(..)
+@@ -1178,26 +992,27 @@ pub(crate) fn requires_terminator(expr:
+ }
+
+ #[cfg(feature = "parsing")]
+ pub(crate) mod parsing {
+ use super::*;
+
+ use crate::parse::{Parse, ParseStream, Result};
+ use crate::path;
++ use std::cmp::Ordering;
++
++ crate::custom_keyword!(raw);
+
+ // When we're parsing expressions which occur before blocks, like in an if
+ // statement's condition, we cannot parse a struct literal.
+ //
+ // Struct literals are ambiguous in certain positions
+ // https://github.com/rust-lang/rfcs/pull/92
+- #[derive(Copy, Clone)]
+ pub struct AllowStruct(bool);
+
+- #[derive(Copy, Clone, PartialEq, PartialOrd)]
+ enum Precedence {
+ Any,
+ Assign,
+ Range,
+ Or,
+ And,
+ Compare,
+ BitOr,
+@@ -1241,19 +1056,131 @@ pub(crate) mod parsing {
+ }
+
+ impl Parse for Expr {
+ fn parse(input: ParseStream) -> Result<Self> {
+ ambiguous_expr(input, AllowStruct(true))
+ }
+ }
+
+- #[cfg(feature = "full")]
+- fn expr_no_struct(input: ParseStream) -> Result<Expr> {
+- ambiguous_expr(input, AllowStruct(false))
++ impl Expr {
++ /// An alternative to the primary `Expr::parse` parser (from the
++ /// [`Parse`] trait) for ambiguous syntactic positions in which a
++ /// trailing brace should not be taken as part of the expression.
++ ///
++ /// Rust grammar has an ambiguity where braces sometimes turn a path
++ /// expression into a struct initialization and sometimes do not. In the
++ /// following code, the expression `S {}` is one expression. Presumably
++ /// there is an empty struct `struct S {}` defined somewhere which it is
++ /// instantiating.
++ ///
++ /// ```
++ /// # struct S;
++ /// # impl std::ops::Deref for S {
++ /// # type Target = bool;
++ /// # fn deref(&self) -> &Self::Target {
++ /// # &true
++ /// # }
++ /// # }
++ /// let _ = *S {};
++ ///
++ /// // parsed by rustc as: `*(S {})`
++ /// ```
++ ///
++ /// We would want to parse the above using `Expr::parse` after the `=`
++ /// token.
++ ///
++ /// But in the following, `S {}` is *not* a struct init expression.
++ ///
++ /// ```
++ /// # const S: &bool = &true;
++ /// if *S {} {}
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (*S) {
++ /// // /* empty block */
++ /// // }
++ /// // {
++ /// // /* another empty block */
++ /// // }
++ /// ```
++ ///
++ /// For that reason we would want to parse if-conditions using
++ /// `Expr::parse_without_eager_brace` after the `if` token. Same for
++ /// similar syntactic positions such as the condition expr after a
++ /// `while` token or the expr at the top of a `match`.
++ ///
++ /// The Rust grammar's choices around which way this ambiguity is
++ /// resolved at various syntactic positions is fairly arbitrary. Really
++ /// either parse behavior could work in most positions, and language
++ /// designers just decide each case based on which is more likely to be
++ /// what the programmer had in mind most of the time.
++ ///
++ /// ```
++ /// # struct S;
++ /// # fn doc() -> S {
++ /// if return S {} {}
++ /// # unreachable!()
++ /// # }
++ ///
++ /// // parsed by rustc as:
++ /// //
++ /// // if (return (S {})) {
++ /// // }
++ /// //
++ /// // but could equally well have been this other arbitrary choice:
++ /// //
++ /// // if (return S) {
++ /// // }
++ /// // {}
++ /// ```
++ ///
++ /// Note the grammar ambiguity on trailing braces is distinct from
++ /// precedence and is not captured by assigning a precedence level to
++ /// the braced struct init expr in relation to other operators. This can
++ /// be illustrated by `return 0..S {}` vs `match 0..S {}`. The former
++ /// parses as `return (0..(S {}))` implying tighter precedence for
++ /// struct init than `..`, while the latter parses as `match (0..S) {}`
++ /// implying tighter precedence for `..` than struct init, a
++ /// contradiction.
++ #[cfg(feature = "full")]
++ pub fn parse_without_eager_brace(input: ParseStream) -> Result<Expr> {
++ ambiguous_expr(input, AllowStruct(false))
++ }
++ }
++
++ impl Copy for AllowStruct {}
++
++ impl Clone for AllowStruct {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl Copy for Precedence {}
++
++ impl Clone for Precedence {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
++
++ impl PartialEq for Precedence {
++ fn eq(&self, other: &Self) -> bool {
++ *self as u8 == *other as u8
++ }
++ }
++
++ impl PartialOrd for Precedence {
++ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
++ let this = *self as u8;
++ let other = *other as u8;
++ Some(this.cmp(&other))
++ }
+ }
+
+ #[cfg(feature = "full")]
+ fn parse_expr(
+ input: ParseStream,
+ mut lhs: Expr,
+ allow_struct: AllowStruct,
+ base: Precedence,
+@@ -1425,88 +1352,114 @@ pub(crate) mod parsing {
+ }
+
+ // Parse an arbitrary expression.
+ fn ambiguous_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+ let lhs = unary_expr(input, allow_struct)?;
+ parse_expr(input, lhs, allow_struct, Precedence::Any)
+ }
+
++ #[cfg(feature = "full")]
++ fn expr_attrs(input: ParseStream) -> Result<Vec<Attribute>> {
++ let mut attrs = Vec::new();
++ loop {
++ if input.peek(token::Group) {
++ let ahead = input.fork();
++ let group = crate::group::parse_group(&ahead)?;
++ if !group.content.peek(Token![#]) || group.content.peek2(Token![!]) {
++ break;
++ }
++ let attr = group.content.call(attr::parsing::single_parse_outer)?;
++ if !group.content.is_empty() {
++ break;
++ }
++ attrs.push(attr);
++ } else if input.peek(Token![#]) {
++ attrs.push(input.call(attr::parsing::single_parse_outer)?);
++ } else {
++ break;
++ }
++ }
++ Ok(attrs)
++ }
++
+ // <UnOp> <trailer>
+ // & <trailer>
+ // &mut <trailer>
+ // box <trailer>
+ #[cfg(feature = "full")]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![&])
+- || ahead.peek(Token![box])
+- || ahead.peek(Token![*])
+- || ahead.peek(Token![!])
+- || ahead.peek(Token![-])
+- {
+- let attrs = input.call(Attribute::parse_outer)?;
+- if input.peek(Token![&]) {
++ let begin = input.fork();
++ let attrs = input.call(expr_attrs)?;
++ if input.peek(Token![&]) {
++ let and_token: Token![&] = input.parse()?;
++ let raw: Option<raw> =
++ if input.peek(raw) && (input.peek2(Token![mut]) || input.peek2(Token![const])) {
++ Some(input.parse()?)
++ } else {
++ None
++ };
++ let mutability: Option<Token![mut]> = input.parse()?;
++ if raw.is_some() && mutability.is_none() {
++ input.parse::<Token![const]>()?;
++ }
++ let expr = Box::new(unary_expr(input, allow_struct)?);
++ if raw.is_some() {
++ Ok(Expr::Verbatim(verbatim::between(begin, input)))
++ } else {
+ Ok(Expr::Reference(ExprReference {
+ attrs,
+- and_token: input.parse()?,
++ and_token,
+ raw: Reserved::default(),
+- mutability: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else if input.peek(Token![box]) {
+- Ok(Expr::Box(ExprBox {
+- attrs,
+- box_token: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
+- }))
+- } else {
+- Ok(Expr::Unary(ExprUnary {
+- attrs,
+- op: input.parse()?,
+- expr: Box::new(unary_expr(input, allow_struct)?),
++ mutability,
++ expr,
+ }))
+ }
++ } else if input.peek(Token![box]) {
++ Ok(Expr::Box(ExprBox {
++ attrs,
++ box_token: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
++ } else if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
++ Ok(Expr::Unary(ExprUnary {
++ attrs,
++ op: input.parse()?,
++ expr: Box::new(unary_expr(input, allow_struct)?),
++ }))
+ } else {
+- trailer_expr(input, allow_struct)
++ trailer_expr(attrs, input, allow_struct)
+ }
+ }
+
+ #[cfg(not(feature = "full"))]
+ fn unary_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- // TODO: optimize using advance_to
+- let ahead = input.fork();
+- ahead.call(Attribute::parse_outer)?;
+- if ahead.peek(Token![*]) || ahead.peek(Token![!]) || ahead.peek(Token![-]) {
++ if input.peek(Token![*]) || input.peek(Token![!]) || input.peek(Token![-]) {
+ Ok(Expr::Unary(ExprUnary {
+- attrs: input.call(Attribute::parse_outer)?,
++ attrs: Vec::new(),
+ op: input.parse()?,
+ expr: Box::new(unary_expr(input, allow_struct)?),
+ }))
+ } else {
+ trailer_expr(input, allow_struct)
+ }
+ }
+
+ // <atom> (..<args>) ...
+ // <atom> . <ident> (..<args>) ...
+ // <atom> . <ident> ...
+ // <atom> . <lit> ...
+ // <atom> [ <expr> ] ...
+ // <atom> ? ...
+ #[cfg(feature = "full")]
+- fn trailer_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
+- return input.call(expr_group).map(Expr::Group);
+- }
+-
+- let outer_attrs = input.call(Attribute::parse_outer)?;
+-
++ fn trailer_expr(
++ outer_attrs: Vec<Attribute>,
++ input: ParseStream,
++ allow_struct: AllowStruct,
++ ) -> Result<Expr> {
+ let atom = atom_expr(input, allow_struct)?;
+ let mut e = trailer_helper(input, atom)?;
+
+ let inner_attrs = e.replace_attrs(Vec::new());
+ let attrs = private::attrs(outer_attrs, inner_attrs);
+ e.replace_attrs(attrs);
+ Ok(e)
+ }
+@@ -1518,28 +1471,36 @@ pub(crate) mod parsing {
+ let content;
+ e = Expr::Call(ExprCall {
+ attrs: Vec::new(),
+ func: Box::new(e),
+ paren_token: parenthesized!(content in input),
+ args: content.parse_terminated(Expr::parse)?,
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) {
+- let dot_token: Token![.] = input.parse()?;
++ let mut dot_token: Token![.] = input.parse()?;
+
+- if input.peek(token::Await) {
++ let await_token: Option<token::Await> = input.parse()?;
++ if let Some(await_token) = await_token {
+ e = Expr::Await(ExprAwait {
+ attrs: Vec::new(),
+ base: Box::new(e),
+ dot_token,
+- await_token: input.parse()?,
++ await_token,
+ });
+ continue;
+ }
+
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
++
+ let member: Member = input.parse()?;
+ let turbofish = if member.is_named() && input.peek(Token![::]) {
+ Some(MethodTurbofish {
+ colon2_token: input.parse()?,
+ lt_token: input.parse()?,
+ args: {
+ let mut args = Punctuated::new();
+ loop {
+@@ -1615,20 +1576,27 @@ pub(crate) mod parsing {
+ e = Expr::Call(ExprCall {
+ attrs: Vec::new(),
+ func: Box::new(e),
+ paren_token: parenthesized!(content in input),
+ args: content.parse_terminated(Expr::parse)?,
+ });
+ } else if input.peek(Token![.]) && !input.peek(Token![..]) && !input.peek2(token::Await)
+ {
++ let mut dot_token: Token![.] = input.parse()?;
++ let float_token: Option<LitFloat> = input.parse()?;
++ if let Some(float_token) = float_token {
++ if multi_index(&mut e, &mut dot_token, float_token)? {
++ continue;
++ }
++ }
+ e = Expr::Field(ExprField {
+ attrs: Vec::new(),
+ base: Box::new(e),
+- dot_token: input.parse()?,
++ dot_token,
+ member: input.parse()?,
+ });
+ } else if input.peek(token::Bracket) {
+ let content;
+ e = Expr::Index(ExprIndex {
+ attrs: Vec::new(),
+ expr: Box::new(e),
+ bracket_token: bracketed!(content in input),
+@@ -1641,17 +1609,21 @@ pub(crate) mod parsing {
+
+ Ok(e)
+ }
+
+ // Parse all atomic expressions which don't have to worry about precedence
+ // interactions, as they are fully contained.
+ #[cfg(feature = "full")]
+ fn atom_expr(input: ParseStream, allow_struct: AllowStruct) -> Result<Expr> {
+- if input.peek(token::Group) {
++ if input.peek(token::Group)
++ && !input.peek2(Token![::])
++ && !input.peek2(Token![!])
++ && !input.peek2(token::Brace)
++ {
+ input.call(expr_group).map(Expr::Group)
+ } else if input.peek(Lit) {
+ input.parse().map(Expr::Lit)
+ } else if input.peek(Token![async])
+ && (input.peek2(token::Brace) || input.peek2(Token![move]) && input.peek3(token::Brace))
+ {
+ input.call(expr_async).map(Expr::Async)
+ } else if input.peek(Token![try]) && input.peek2(token::Brace) {
+@@ -1663,17 +1635,16 @@ pub(crate) mod parsing {
+ {
+ expr_closure(input, allow_struct).map(Expr::Closure)
+ } else if input.peek(Ident)
+ || input.peek(Token![::])
+ || input.peek(Token![<])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ path_or_macro_or_struct(input, allow_struct)
+ } else if input.peek(token::Paren) {
+ paren_or_tuple(input)
+ } else if input.peek(Token![break]) {
+ expr_break(input, allow_struct).map(Expr::Break)
+ } else if input.peek(Token![continue]) {
+@@ -1735,17 +1706,16 @@ pub(crate) mod parsing {
+ } else if input.peek(token::Paren) {
+ input.call(expr_paren).map(Expr::Paren)
+ } else if input.peek(Ident)
+ || input.peek(Token![::])
+ || input.peek(Token![<])
+ || input.peek(Token![self])
+ || input.peek(Token![Self])
+ || input.peek(Token![super])
+- || input.peek(Token![extern])
+ || input.peek(Token![crate])
+ {
+ input.parse().map(Expr::Path)
+ } else {
+ Err(input.error("unsupported expression; enable syn's features=[\"full\"]"))
+ }
+ }
+
+@@ -1873,17 +1843,17 @@ pub(crate) mod parsing {
+ }))
+ } else {
+ Err(content.error("expected `,` or `;`"))
+ }
+ }
+
+ #[cfg(feature = "full")]
+ pub(crate) fn expr_early(input: ParseStream) -> Result<Expr> {
+- let mut attrs = input.call(Attribute::parse_outer)?;
++ let mut attrs = input.call(expr_attrs)?;
+ let mut expr = if input.peek(Token![if]) {
+ Expr::If(input.parse()?)
+ } else if input.peek(Token![while]) {
+ Expr::While(input.parse()?)
+ } else if input.peek(Token![for]) {
+ Expr::ForLoop(input.parse()?)
+ } else if input.peek(Token![loop]) {
+ Expr::Loop(input.parse()?)
+@@ -1900,17 +1870,17 @@ pub(crate) mod parsing {
+ let mut expr = unary_expr(input, allow_struct)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+ expr.replace_attrs(attrs);
+
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ };
+
+- if input.peek(Token![.]) || input.peek(Token![?]) {
++ if input.peek(Token![.]) && !input.peek(Token![..]) || input.peek(Token![?]) {
+ expr = trailer_helper(input, expr)?;
+
+ attrs.extend(expr.replace_attrs(Vec::new()));
+ expr.replace_attrs(attrs);
+
+ let allow_struct = AllowStruct(true);
+ return parse_expr(input, expr, allow_struct, Precedence::Any);
+ }
+@@ -1946,63 +1916,48 @@ pub(crate) mod parsing {
+ attrs: Vec::new(),
+ paren_token: parenthesized!(content in input),
+ expr: content.parse()?,
+ })
+ }
+
+ #[cfg(feature = "full")]
+ fn generic_method_argument(input: ParseStream) -> Result<GenericMethodArgument> {
+- // TODO parse const generics as well
++ if input.peek(Lit) {
++ let lit = input.parse()?;
++ return Ok(GenericMethodArgument::Const(Expr::Lit(lit)));
++ }
++
++ if input.peek(token::Brace) {
++ let block = input.call(expr::parsing::expr_block)?;
++ return Ok(GenericMethodArgument::Const(Expr::Block(block)));
++ }
++
+ input.parse().map(GenericMethodArgument::Type)
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_let(input: ParseStream) -> Result<ExprLet> {
+ Ok(ExprLet {
+ attrs: Vec::new(),
+ let_token: input.parse()?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some()
+- || input.peek(Token![|]) && !input.peek(Token![||]) && !input.peek(Token![|=])
+- {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|])
+- && !input.peek(Token![||])
+- && !input.peek(Token![|=])
+- {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ eq_token: input.parse()?,
+- expr: Box::new(input.call(expr_no_struct)?),
++ expr: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ })
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprIf {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ Ok(ExprIf {
+- attrs: Vec::new(),
++ attrs,
+ if_token: input.parse()?,
+- cond: Box::new(input.call(expr_no_struct)?),
++ cond: Box::new(input.call(Expr::parse_without_eager_brace)?),
+ then_branch: input.parse()?,
+ else_branch: {
+ if input.peek(Token![else]) {
+ Some(input.call(else_block)?)
+ } else {
+ None
+ }
+ },
+@@ -2028,94 +1983,81 @@ pub(crate) mod parsing {
+ };
+
+ Ok((else_token, Box::new(else_branch)))
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprForLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let for_token: Token![for] = input.parse()?;
+
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let mut pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- pat = Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- });
+- }
++ let pat = pat::parsing::multi_pat_with_leading_vert(input)?;
+
+ let in_token: Token![in] = input.parse()?;
+- let expr: Expr = input.call(expr_no_struct)?;
++ let expr: Expr = input.call(Expr::parse_without_eager_brace)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprForLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ for_token,
+ pat,
+ in_token,
+ expr: Box::new(expr),
+ body: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprLoop {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let loop_token: Token![loop] = input.parse()?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprLoop {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ loop_token,
+ body: Block { brace_token, stmts },
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprMatch {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let match_token: Token![match] = input.parse()?;
+- let expr = expr_no_struct(input)?;
++ let expr = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+
+ let mut arms = Vec::new();
+ while !content.is_empty() {
+ arms.push(content.call(Arm::parse)?);
+ }
+
+ Ok(ExprMatch {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ match_token,
+ expr: Box::new(expr),
+ brace_token,
+ arms,
+ })
+ }
+ }
+
+@@ -2300,27 +2242,28 @@ pub(crate) mod parsing {
+ }
+ Ok(pat)
+ }
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for ExprWhile {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let outer_attrs = input.call(Attribute::parse_outer)?;
+ let label: Option<Label> = input.parse()?;
+ let while_token: Token![while] = input.parse()?;
+- let cond = expr_no_struct(input)?;
++ let cond = Expr::parse_without_eager_brace(input)?;
+
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
+ let stmts = content.call(Block::parse_within)?;
+
+ Ok(ExprWhile {
+- attrs: inner_attrs,
++ attrs: private::attrs(outer_attrs, inner_attrs),
+ label,
+ while_token,
+ cond: Box::new(cond),
+ body: Block { brace_token, stmts },
+ })
+ }
+ }
+
+@@ -2394,16 +2337,17 @@ pub(crate) mod parsing {
+ }
+ },
+ })
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for FieldValue {
+ fn parse(input: ParseStream) -> Result<Self> {
++ let attrs = input.call(Attribute::parse_outer)?;
+ let member: Member = input.parse()?;
+ let (colon_token, value) = if input.peek(Token![:]) || !member.is_named() {
+ let colon_token: Token![:] = input.parse()?;
+ let value: Expr = input.parse()?;
+ (Some(colon_token), value)
+ } else if let Member::Named(ident) = &member {
+ let value = Expr::Path(ExprPath {
+ attrs: Vec::new(),
+@@ -2411,73 +2355,63 @@ pub(crate) mod parsing {
+ path: Path::from(ident.clone()),
+ });
+ (None, value)
+ } else {
+ unreachable!()
+ };
+
+ Ok(FieldValue {
+- attrs: Vec::new(),
++ attrs,
+ member,
+ colon_token,
+ expr: value,
+ })
+ }
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_struct_helper(
+ input: ParseStream,
+ outer_attrs: Vec<Attribute>,
+ path: Path,
+ ) -> Result<ExprStruct> {
+ let content;
+ let brace_token = braced!(content in input);
+ let inner_attrs = content.call(Attribute::parse_inner)?;
++ let attrs = private::attrs(outer_attrs, inner_attrs);
+
+ let mut fields = Punctuated::new();
+- loop {
+- let attrs = content.call(Attribute::parse_outer)?;
+- // TODO: optimize using advance_to
+- if content.fork().parse::<Member>().is_err() {
+- if attrs.is_empty() {
+- break;
+- } else {
+- return Err(content.error("expected struct field"));
+- }
++ while !content.is_empty() {
++ if content.peek(Token![..]) {
++ return Ok(ExprStruct {
++ attrs,
++ brace_token,
++ path,
++ fields,
++ dot2_token: Some(content.parse()?),
++ rest: Some(Box::new(content.parse()?)),
++ });
+ }
+
+- fields.push(FieldValue {
+- attrs,
+- ..content.parse()?
+- });
+-
+- if !content.peek(Token![,]) {
++ fields.push(content.parse()?);
++ if content.is_empty() {
+ break;
+ }
+ let punct: Token![,] = content.parse()?;
+ fields.push_punct(punct);
+ }
+
+- let (dot2_token, rest) = if fields.empty_or_trailing() && content.peek(Token![..]) {
+- let dot2_token: Token![..] = content.parse()?;
+- let rest: Expr = content.parse()?;
+- (Some(dot2_token), Some(Box::new(rest)))
+- } else {
+- (None, None)
+- };
+-
+ Ok(ExprStruct {
+- attrs: private::attrs(outer_attrs, inner_attrs),
++ attrs,
+ brace_token,
+ path,
+ fields,
+- dot2_token,
+- rest,
++ dot2_token: None,
++ rest: None,
+ })
+ }
+
+ #[cfg(feature = "full")]
+ fn expr_unsafe(input: ParseStream) -> Result<ExprUnsafe> {
+ let unsafe_token: Token![unsafe] = input.parse()?;
+
+ let content;
+@@ -2572,37 +2506,17 @@ pub(crate) mod parsing {
+ }
+
+ #[cfg(feature = "full")]
+ impl Parse for Arm {
+ fn parse(input: ParseStream) -> Result<Arm> {
+ let requires_comma;
+ Ok(Arm {
+ attrs: input.call(Attribute::parse_outer)?,
+- pat: {
+- let leading_vert: Option<Token![|]> = input.parse()?;
+- let pat: Pat = input.parse()?;
+- if leading_vert.is_some() || input.peek(Token![|]) {
+- let mut cases = Punctuated::new();
+- cases.push_value(pat);
+- while input.peek(Token![|]) {
+- let punct = input.parse()?;
+- cases.push_punct(punct);
+- let pat: Pat = input.parse()?;
+- cases.push_value(pat);
+- }
+- Pat::Or(PatOr {
+- attrs: Vec::new(),
+- leading_vert,
+- cases,
+- })
+- } else {
+- pat
+- }
+- },
++ pat: pat::parsing::multi_pat_with_leading_vert(input)?,
+ guard: {
+ if input.peek(Token![if]) {
+ let if_token: Token![if] = input.parse()?;
+ let guard: Expr = input.parse()?;
+ Some((if_token, Box::new(guard)))
+ } else {
+ None
+ }
+@@ -2636,16 +2550,36 @@ pub(crate) mod parsing {
+ span: lit.span(),
+ })
+ } else {
+ Err(Error::new(lit.span(), "expected unsuffixed integer"))
+ }
+ }
+ }
+
++ fn multi_index(e: &mut Expr, dot_token: &mut Token![.], float: LitFloat) -> Result<bool> {
++ let mut float_repr = float.to_string();
++ let trailing_dot = float_repr.ends_with('.');
++ if trailing_dot {
++ float_repr.truncate(float_repr.len() - 1);
++ }
++ for part in float_repr.split('.') {
++ let index = crate::parse_str(part).map_err(|err| Error::new(float.span(), err))?;
++ let base = mem::replace(e, Expr::__Nonexhaustive);
++ *e = Expr::Field(ExprField {
++ attrs: Vec::new(),
++ base: Box::new(base),
++ dot_token: Token,
++ member: Member::Unnamed(index),
++ });
++ *dot_token = Token);
++ }
++ Ok(!trailing_dot)
++ }
++
+ #[cfg(feature = "full")]
+ impl Member {
+ fn is_named(&self) -> bool {
+ match *self {
+ Member::Named(_) => true,
+ Member::Unnamed(_) => false,
+ }
+ }
+diff --git a/third_party/rust/syn/src/ext.rs b/third_party/rust/syn/src/ext.rs
+--- a/third_party/rust/syn/src/ext.rs
++++ b/third_party/rust/syn/src/ext.rs
+@@ -1,27 +1,27 @@
+ //! Extension traits to provide parsing methods on foreign types.
+ //!
+-//! *This module is available if Syn is built with the `"parsing"` feature.*
++//! *This module is available only if Syn is built with the `"parsing"` feature.*
+
+ use proc_macro2::Ident;
+
+ use crate::parse::{ParseStream, Result};
+
+ use crate::buffer::Cursor;
+ use crate::parse::Peek;
+ use crate::sealed::lookahead;
+ use crate::token::CustomToken;
+
+ /// Additional methods for `Ident` not provided by proc-macro2 or libproc_macro.
+ ///
+ /// This trait is sealed and cannot be implemented for types outside of Syn. It
+ /// is implemented only for `proc_macro2::Ident`.
+ ///
+-/// *This trait is available if Syn is built with the `"parsing"` feature.*
++/// *This trait is available only if Syn is built with the `"parsing"` feature.*
+ pub trait IdentExt: Sized + private::Sealed {
+ /// Parses any identifier including keywords.
+ ///
+ /// This is useful when parsing macro input which allows Rust keywords as
+ /// identifiers.
+ ///
+ /// # Example
+ ///
+@@ -124,12 +124,18 @@ impl lookahead::Sealed for private::Peek
+
+ mod private {
+ use proc_macro2::Ident;
+
+ pub trait Sealed {}
+
+ impl Sealed for Ident {}
+
+- #[derive(Copy, Clone)]
+ pub struct PeekFn;
+ pub struct IdentAny;
++
++ impl Copy for PeekFn {}
++ impl Clone for PeekFn {
++ fn clone(&self) -> Self {
++ *self
++ }
++ }
+ }
+diff --git a/third_party/rust/syn/src/file.rs b/third_party/rust/syn/src/file.rs
+--- a/third_party/rust/syn/src/file.rs
++++ b/third_party/rust/syn/src/file.rs
+@@ -1,14 +1,14 @@
+ use super::*;
+
+ ast_struct! {
+ /// A complete file of Rust source code.
+ ///
+- /// *This type is available if Syn is built with the `"full"` feature.*
++ /// *This type is available only if Syn is built with the `"full"` feature.*
+ ///
+ /// # Example
+ ///
+ /// Parse a Rust source file into a `syn::File` and print out a debug
+ /// representation of the syntax tree.
+ ///
+ /// ```
+ /// use std::env;
+@@ -32,16 +32,18 @@ ast_struct! {
+ /// };
+ ///
+ /// let mut file = File::open(&filename).expect("Unable to open file");
+ ///
+ /// let mut src = String::new();
+ /// file.read_to_string(&mut src).expect("Unable to read file");
+ ///
+ /// let syntax = syn::parse_file(&src).expect("Unable to parse file");
++ ///
++ /// // Debug impl is available if Syn is built with "extra-traits" feature.
+ /// println!("{:#?}", syntax);
+ /// }
+ /// ```
+ ///
+ /// Running with its own source code as input, this program prints output
+ /// that begins with:
+ ///
+ /// ```text
+diff --git a/third_party/rust/syn/src/gen/clone.rs b/third_party/rust/syn/src/gen/clone.rs
+new file mode 100644
+--- /dev/null
++++ b/third_party/rust/syn/src/gen/clone.rs
+@@ -0,0 +1,2051 @@
++// This file is @generated by syn-internal-codegen.
++// It is not intended for manual editing.
++
++#![allow(clippy::clone_on_copy, clippy::expl_impl_clone_on_copy)]
++use crate::*;
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Abi {
++ fn clone(&self) -> Self {
++ Abi {
++ extern_token: self.extern_token.clone(),
++ name: self.name.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AngleBracketedGenericArguments {
++ fn clone(&self) -> Self {
++ AngleBracketedGenericArguments {
++ colon2_token: self.colon2_token.clone(),
++ lt_token: self.lt_token.clone(),
++ args: self.args.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Arm {
++ fn clone(&self) -> Self {
++ Arm {
++ attrs: self.attrs.clone(),
++ pat: self.pat.clone(),
++ guard: self.guard.clone(),
++ fat_arrow_token: self.fat_arrow_token.clone(),
++ body: self.body.clone(),
++ comma: self.comma.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for AttrStyle {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for AttrStyle {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Attribute {
++ fn clone(&self) -> Self {
++ Attribute {
++ pound_token: self.pound_token.clone(),
++ style: self.style.clone(),
++ bracket_token: self.bracket_token.clone(),
++ path: self.path.clone(),
++ tokens: self.tokens.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BareFnArg {
++ fn clone(&self) -> Self {
++ BareFnArg {
++ attrs: self.attrs.clone(),
++ name: self.name.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Copy for BinOp {}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BinOp {
++ fn clone(&self) -> Self {
++ *self
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Binding {
++ fn clone(&self) -> Self {
++ Binding {
++ ident: self.ident.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Block {
++ fn clone(&self) -> Self {
++ Block {
++ brace_token: self.brace_token.clone(),
++ stmts: self.stmts.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for BoundLifetimes {
++ fn clone(&self) -> Self {
++ BoundLifetimes {
++ for_token: self.for_token.clone(),
++ lt_token: self.lt_token.clone(),
++ lifetimes: self.lifetimes.clone(),
++ gt_token: self.gt_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ConstParam {
++ fn clone(&self) -> Self {
++ ConstParam {
++ attrs: self.attrs.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ default: self.default.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Constraint {
++ fn clone(&self) -> Self {
++ Constraint {
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ bounds: self.bounds.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for Data {
++ fn clone(&self) -> Self {
++ match self {
++ Data::Struct(v0) => Data::Struct(v0.clone()),
++ Data::Enum(v0) => Data::Enum(v0.clone()),
++ Data::Union(v0) => Data::Union(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataEnum {
++ fn clone(&self) -> Self {
++ DataEnum {
++ enum_token: self.enum_token.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataStruct {
++ fn clone(&self) -> Self {
++ DataStruct {
++ struct_token: self.struct_token.clone(),
++ fields: self.fields.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DataUnion {
++ fn clone(&self) -> Self {
++ DataUnion {
++ union_token: self.union_token.clone(),
++ fields: self.fields.clone(),
++ }
++ }
++}
++#[cfg(feature = "derive")]
++impl Clone for DeriveInput {
++ fn clone(&self) -> Self {
++ DeriveInput {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ data: self.data.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Expr {
++ fn clone(&self) -> Self {
++ match self {
++ #[cfg(feature = "full")]
++ Expr::Array(v0) => Expr::Array(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Assign(v0) => Expr::Assign(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::AssignOp(v0) => Expr::AssignOp(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Async(v0) => Expr::Async(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Await(v0) => Expr::Await(v0.clone()),
++ Expr::Binary(v0) => Expr::Binary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Block(v0) => Expr::Block(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Box(v0) => Expr::Box(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Break(v0) => Expr::Break(v0.clone()),
++ Expr::Call(v0) => Expr::Call(v0.clone()),
++ Expr::Cast(v0) => Expr::Cast(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Closure(v0) => Expr::Closure(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Continue(v0) => Expr::Continue(v0.clone()),
++ Expr::Field(v0) => Expr::Field(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::ForLoop(v0) => Expr::ForLoop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Group(v0) => Expr::Group(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::If(v0) => Expr::If(v0.clone()),
++ Expr::Index(v0) => Expr::Index(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Let(v0) => Expr::Let(v0.clone()),
++ Expr::Lit(v0) => Expr::Lit(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Loop(v0) => Expr::Loop(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Macro(v0) => Expr::Macro(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Match(v0) => Expr::Match(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::MethodCall(v0) => Expr::MethodCall(v0.clone()),
++ Expr::Paren(v0) => Expr::Paren(v0.clone()),
++ Expr::Path(v0) => Expr::Path(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Range(v0) => Expr::Range(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Reference(v0) => Expr::Reference(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Repeat(v0) => Expr::Repeat(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Return(v0) => Expr::Return(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Struct(v0) => Expr::Struct(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Try(v0) => Expr::Try(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::TryBlock(v0) => Expr::TryBlock(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Tuple(v0) => Expr::Tuple(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Type(v0) => Expr::Type(v0.clone()),
++ Expr::Unary(v0) => Expr::Unary(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Unsafe(v0) => Expr::Unsafe(v0.clone()),
++ Expr::Verbatim(v0) => Expr::Verbatim(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::While(v0) => Expr::While(v0.clone()),
++ #[cfg(feature = "full")]
++ Expr::Yield(v0) => Expr::Yield(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprArray {
++ fn clone(&self) -> Self {
++ ExprArray {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssign {
++ fn clone(&self) -> Self {
++ ExprAssign {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ eq_token: self.eq_token.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAssignOp {
++ fn clone(&self) -> Self {
++ ExprAssignOp {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAsync {
++ fn clone(&self) -> Self {
++ ExprAsync {
++ attrs: self.attrs.clone(),
++ async_token: self.async_token.clone(),
++ capture: self.capture.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprAwait {
++ fn clone(&self) -> Self {
++ ExprAwait {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ await_token: self.await_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprBinary {
++ fn clone(&self) -> Self {
++ ExprBinary {
++ attrs: self.attrs.clone(),
++ left: self.left.clone(),
++ op: self.op.clone(),
++ right: self.right.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBlock {
++ fn clone(&self) -> Self {
++ ExprBlock {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBox {
++ fn clone(&self) -> Self {
++ ExprBox {
++ attrs: self.attrs.clone(),
++ box_token: self.box_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprBreak {
++ fn clone(&self) -> Self {
++ ExprBreak {
++ attrs: self.attrs.clone(),
++ break_token: self.break_token.clone(),
++ label: self.label.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCall {
++ fn clone(&self) -> Self {
++ ExprCall {
++ attrs: self.attrs.clone(),
++ func: self.func.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprCast {
++ fn clone(&self) -> Self {
++ ExprCast {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ as_token: self.as_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprClosure {
++ fn clone(&self) -> Self {
++ ExprClosure {
++ attrs: self.attrs.clone(),
++ asyncness: self.asyncness.clone(),
++ movability: self.movability.clone(),
++ capture: self.capture.clone(),
++ or1_token: self.or1_token.clone(),
++ inputs: self.inputs.clone(),
++ or2_token: self.or2_token.clone(),
++ output: self.output.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprContinue {
++ fn clone(&self) -> Self {
++ ExprContinue {
++ attrs: self.attrs.clone(),
++ continue_token: self.continue_token.clone(),
++ label: self.label.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprField {
++ fn clone(&self) -> Self {
++ ExprField {
++ attrs: self.attrs.clone(),
++ base: self.base.clone(),
++ dot_token: self.dot_token.clone(),
++ member: self.member.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprForLoop {
++ fn clone(&self) -> Self {
++ ExprForLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ for_token: self.for_token.clone(),
++ pat: self.pat.clone(),
++ in_token: self.in_token.clone(),
++ expr: self.expr.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprGroup {
++ fn clone(&self) -> Self {
++ ExprGroup {
++ attrs: self.attrs.clone(),
++ group_token: self.group_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprIf {
++ fn clone(&self) -> Self {
++ ExprIf {
++ attrs: self.attrs.clone(),
++ if_token: self.if_token.clone(),
++ cond: self.cond.clone(),
++ then_branch: self.then_branch.clone(),
++ else_branch: self.else_branch.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprIndex {
++ fn clone(&self) -> Self {
++ ExprIndex {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ bracket_token: self.bracket_token.clone(),
++ index: self.index.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLet {
++ fn clone(&self) -> Self {
++ ExprLet {
++ attrs: self.attrs.clone(),
++ let_token: self.let_token.clone(),
++ pat: self.pat.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprLit {
++ fn clone(&self) -> Self {
++ ExprLit {
++ attrs: self.attrs.clone(),
++ lit: self.lit.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprLoop {
++ fn clone(&self) -> Self {
++ ExprLoop {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ loop_token: self.loop_token.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMacro {
++ fn clone(&self) -> Self {
++ ExprMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMatch {
++ fn clone(&self) -> Self {
++ ExprMatch {
++ attrs: self.attrs.clone(),
++ match_token: self.match_token.clone(),
++ expr: self.expr.clone(),
++ brace_token: self.brace_token.clone(),
++ arms: self.arms.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprMethodCall {
++ fn clone(&self) -> Self {
++ ExprMethodCall {
++ attrs: self.attrs.clone(),
++ receiver: self.receiver.clone(),
++ dot_token: self.dot_token.clone(),
++ method: self.method.clone(),
++ turbofish: self.turbofish.clone(),
++ paren_token: self.paren_token.clone(),
++ args: self.args.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprParen {
++ fn clone(&self) -> Self {
++ ExprParen {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprPath {
++ fn clone(&self) -> Self {
++ ExprPath {
++ attrs: self.attrs.clone(),
++ qself: self.qself.clone(),
++ path: self.path.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRange {
++ fn clone(&self) -> Self {
++ ExprRange {
++ attrs: self.attrs.clone(),
++ from: self.from.clone(),
++ limits: self.limits.clone(),
++ to: self.to.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReference {
++ fn clone(&self) -> Self {
++ ExprReference {
++ attrs: self.attrs.clone(),
++ and_token: self.and_token.clone(),
++ raw: self.raw.clone(),
++ mutability: self.mutability.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprRepeat {
++ fn clone(&self) -> Self {
++ ExprRepeat {
++ attrs: self.attrs.clone(),
++ bracket_token: self.bracket_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ len: self.len.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprReturn {
++ fn clone(&self) -> Self {
++ ExprReturn {
++ attrs: self.attrs.clone(),
++ return_token: self.return_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprStruct {
++ fn clone(&self) -> Self {
++ ExprStruct {
++ attrs: self.attrs.clone(),
++ path: self.path.clone(),
++ brace_token: self.brace_token.clone(),
++ fields: self.fields.clone(),
++ dot2_token: self.dot2_token.clone(),
++ rest: self.rest.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTry {
++ fn clone(&self) -> Self {
++ ExprTry {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ question_token: self.question_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTryBlock {
++ fn clone(&self) -> Self {
++ ExprTryBlock {
++ attrs: self.attrs.clone(),
++ try_token: self.try_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprTuple {
++ fn clone(&self) -> Self {
++ ExprTuple {
++ attrs: self.attrs.clone(),
++ paren_token: self.paren_token.clone(),
++ elems: self.elems.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprType {
++ fn clone(&self) -> Self {
++ ExprType {
++ attrs: self.attrs.clone(),
++ expr: self.expr.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for ExprUnary {
++ fn clone(&self) -> Self {
++ ExprUnary {
++ attrs: self.attrs.clone(),
++ op: self.op.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprUnsafe {
++ fn clone(&self) -> Self {
++ ExprUnsafe {
++ attrs: self.attrs.clone(),
++ unsafe_token: self.unsafe_token.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprWhile {
++ fn clone(&self) -> Self {
++ ExprWhile {
++ attrs: self.attrs.clone(),
++ label: self.label.clone(),
++ while_token: self.while_token.clone(),
++ cond: self.cond.clone(),
++ body: self.body.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ExprYield {
++ fn clone(&self) -> Self {
++ ExprYield {
++ attrs: self.attrs.clone(),
++ yield_token: self.yield_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Field {
++ fn clone(&self) -> Self {
++ Field {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldPat {
++ fn clone(&self) -> Self {
++ FieldPat {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ pat: self.pat.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FieldValue {
++ fn clone(&self) -> Self {
++ FieldValue {
++ attrs: self.attrs.clone(),
++ member: self.member.clone(),
++ colon_token: self.colon_token.clone(),
++ expr: self.expr.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Fields {
++ fn clone(&self) -> Self {
++ match self {
++ Fields::Named(v0) => Fields::Named(v0.clone()),
++ Fields::Unnamed(v0) => Fields::Unnamed(v0.clone()),
++ Fields::Unit => Fields::Unit,
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsNamed {
++ fn clone(&self) -> Self {
++ FieldsNamed {
++ brace_token: self.brace_token.clone(),
++ named: self.named.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for FieldsUnnamed {
++ fn clone(&self) -> Self {
++ FieldsUnnamed {
++ paren_token: self.paren_token.clone(),
++ unnamed: self.unnamed.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for File {
++ fn clone(&self) -> Self {
++ File {
++ shebang: self.shebang.clone(),
++ attrs: self.attrs.clone(),
++ items: self.items.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for FnArg {
++ fn clone(&self) -> Self {
++ match self {
++ FnArg::Receiver(v0) => FnArg::Receiver(v0.clone()),
++ FnArg::Typed(v0) => FnArg::Typed(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItem {
++ fn clone(&self) -> Self {
++ match self {
++ ForeignItem::Fn(v0) => ForeignItem::Fn(v0.clone()),
++ ForeignItem::Static(v0) => ForeignItem::Static(v0.clone()),
++ ForeignItem::Type(v0) => ForeignItem::Type(v0.clone()),
++ ForeignItem::Macro(v0) => ForeignItem::Macro(v0.clone()),
++ ForeignItem::Verbatim(v0) => ForeignItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemFn {
++ fn clone(&self) -> Self {
++ ForeignItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemMacro {
++ fn clone(&self) -> Self {
++ ForeignItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemStatic {
++ fn clone(&self) -> Self {
++ ForeignItemStatic {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ static_token: self.static_token.clone(),
++ mutability: self.mutability.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ForeignItemType {
++ fn clone(&self) -> Self {
++ ForeignItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericArgument::Lifetime(v0) => GenericArgument::Lifetime(v0.clone()),
++ GenericArgument::Type(v0) => GenericArgument::Type(v0.clone()),
++ GenericArgument::Binding(v0) => GenericArgument::Binding(v0.clone()),
++ GenericArgument::Constraint(v0) => GenericArgument::Constraint(v0.clone()),
++ GenericArgument::Const(v0) => GenericArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for GenericMethodArgument {
++ fn clone(&self) -> Self {
++ match self {
++ GenericMethodArgument::Type(v0) => GenericMethodArgument::Type(v0.clone()),
++ GenericMethodArgument::Const(v0) => GenericMethodArgument::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for GenericParam {
++ fn clone(&self) -> Self {
++ match self {
++ GenericParam::Type(v0) => GenericParam::Type(v0.clone()),
++ GenericParam::Lifetime(v0) => GenericParam::Lifetime(v0.clone()),
++ GenericParam::Const(v0) => GenericParam::Const(v0.clone()),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Generics {
++ fn clone(&self) -> Self {
++ Generics {
++ lt_token: self.lt_token.clone(),
++ params: self.params.clone(),
++ gt_token: self.gt_token.clone(),
++ where_clause: self.where_clause.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItem {
++ fn clone(&self) -> Self {
++ match self {
++ ImplItem::Const(v0) => ImplItem::Const(v0.clone()),
++ ImplItem::Method(v0) => ImplItem::Method(v0.clone()),
++ ImplItem::Type(v0) => ImplItem::Type(v0.clone()),
++ ImplItem::Macro(v0) => ImplItem::Macro(v0.clone()),
++ ImplItem::Verbatim(v0) => ImplItem::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemConst {
++ fn clone(&self) -> Self {
++ ImplItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMacro {
++ fn clone(&self) -> Self {
++ ImplItemMacro {
++ attrs: self.attrs.clone(),
++ mac: self.mac.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemMethod {
++ fn clone(&self) -> Self {
++ ImplItemMethod {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ImplItemType {
++ fn clone(&self) -> Self {
++ ImplItemType {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ defaultness: self.defaultness.clone(),
++ type_token: self.type_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ eq_token: self.eq_token.clone(),
++ ty: self.ty.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(any(feature = "derive", feature = "full"))]
++impl Clone for Index {
++ fn clone(&self) -> Self {
++ Index {
++ index: self.index.clone(),
++ span: self.span.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for Item {
++ fn clone(&self) -> Self {
++ match self {
++ Item::Const(v0) => Item::Const(v0.clone()),
++ Item::Enum(v0) => Item::Enum(v0.clone()),
++ Item::ExternCrate(v0) => Item::ExternCrate(v0.clone()),
++ Item::Fn(v0) => Item::Fn(v0.clone()),
++ Item::ForeignMod(v0) => Item::ForeignMod(v0.clone()),
++ Item::Impl(v0) => Item::Impl(v0.clone()),
++ Item::Macro(v0) => Item::Macro(v0.clone()),
++ Item::Macro2(v0) => Item::Macro2(v0.clone()),
++ Item::Mod(v0) => Item::Mod(v0.clone()),
++ Item::Static(v0) => Item::Static(v0.clone()),
++ Item::Struct(v0) => Item::Struct(v0.clone()),
++ Item::Trait(v0) => Item::Trait(v0.clone()),
++ Item::TraitAlias(v0) => Item::TraitAlias(v0.clone()),
++ Item::Type(v0) => Item::Type(v0.clone()),
++ Item::Union(v0) => Item::Union(v0.clone()),
++ Item::Use(v0) => Item::Use(v0.clone()),
++ Item::Verbatim(v0) => Item::Verbatim(v0.clone()),
++ _ => unreachable!(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemConst {
++ fn clone(&self) -> Self {
++ ItemConst {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ const_token: self.const_token.clone(),
++ ident: self.ident.clone(),
++ colon_token: self.colon_token.clone(),
++ ty: self.ty.clone(),
++ eq_token: self.eq_token.clone(),
++ expr: self.expr.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemEnum {
++ fn clone(&self) -> Self {
++ ItemEnum {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ enum_token: self.enum_token.clone(),
++ ident: self.ident.clone(),
++ generics: self.generics.clone(),
++ brace_token: self.brace_token.clone(),
++ variants: self.variants.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemExternCrate {
++ fn clone(&self) -> Self {
++ ItemExternCrate {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ extern_token: self.extern_token.clone(),
++ crate_token: self.crate_token.clone(),
++ ident: self.ident.clone(),
++ rename: self.rename.clone(),
++ semi_token: self.semi_token.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemFn {
++ fn clone(&self) -> Self {
++ ItemFn {
++ attrs: self.attrs.clone(),
++ vis: self.vis.clone(),
++ sig: self.sig.clone(),
++ block: self.block.clone(),
++ }
++ }
++}
++#[cfg(feature = "full")]
++impl Clone for ItemForeignMod {
++ fn clone(&self) -> Self {<