Update to latest proc-macro2-1.0.9

* fill back missing NOTICE,METADATA,*LICENSE* files

Bug: 150877376
Test: make
Test: atest --host -c --include-subdirs external/rust/crates
Change-Id: Id0cc16545586486ebf4937ed7f3e013273af59f2
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index 19bdf08..64135fd 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,5 +1,5 @@
 {
   "git": {
-    "sha1": "bdac3732544a3cfb73afe4548f550c369e906856"
+    "sha1": "b7daef7967be1e71a3d5525e3298241c42a19215"
   }
 }
diff --git a/.gitignore b/.gitignore
index 4308d82..6936990 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,3 @@
-target/
+/target
 **/*.rs.bk
 Cargo.lock
diff --git a/Android.bp b/Android.bp
index 3cf41a9..8bba8ff 100644
--- a/Android.bp
+++ b/Android.bp
@@ -10,7 +10,6 @@
         "proc-macro",
     ],
     flags: [
-        "--cfg proc_macro_span",
         "--cfg use_proc_macro",
         "--cfg wrap_proc_macro",
     ],
@@ -36,7 +35,6 @@
         "proc-macro",
     ],
     flags: [
-        "--cfg proc_macro_span",
         "--cfg use_proc_macro",
         "--cfg wrap_proc_macro",
     ],
@@ -60,7 +58,6 @@
         "proc-macro",
     ],
     flags: [
-        "--cfg proc_macro_span",
         "--cfg use_proc_macro",
         "--cfg wrap_proc_macro",
     ],
@@ -69,3 +66,6 @@
         "libunicode_xid",
     ],
 }
+
+// dependent_library ["feature_list"]
+//   unicode-xid-0.2.0 "default"
diff --git a/Cargo.toml b/Cargo.toml
index a6fea91..fd4ee2c 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,7 +13,7 @@
 [package]
 edition = "2018"
 name = "proc-macro2"
-version = "1.0.4"
+version = "1.0.9"
 authors = ["Alex Crichton <alex@alexcrichton.com>"]
 description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
 homepage = "https://github.com/alexcrichton/proc-macro2"
@@ -25,9 +25,6 @@
 [package.metadata.docs.rs]
 rustc-args = ["--cfg", "procmacro2_semver_exempt"]
 rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
-
-[lib]
-name = "proc_macro2"
 [dependencies.unicode-xid]
 version = "0.2"
 [dev-dependencies.quote]
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index fd5ee70..7870b16 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,6 +1,6 @@
 [package]
 name = "proc-macro2"
-version = "1.0.4" # remember to update html_root_url
+version = "1.0.9" # remember to update html_root_url
 authors = ["Alex Crichton <alex@alexcrichton.com>"]
 license = "MIT OR Apache-2.0"
 readme = "README.md"
@@ -15,9 +15,6 @@
 unstable API.
 """
 
-[lib]
-name = "proc_macro2"
-
 [package.metadata.docs.rs]
 rustc-args = ["--cfg", "procmacro2_semver_exempt"]
 rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
@@ -42,6 +39,9 @@
 [badges]
 travis-ci = { repository = "alexcrichton/proc-macro2" }
 
+[workspace]
+members = ["benches/bench-libproc-macro"]
+
 [patch.crates-io]
 # Our doc tests depend on quote which depends on proc-macro2. Without this line,
 # the proc-macro2 dependency of quote would be the released version of
diff --git a/LICENSE b/LICENSE
new file mode 120000
index 0000000..6b579aa
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1 @@
+LICENSE-APACHE
\ No newline at end of file
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..993f79e
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,18 @@
+name: "proc-macro2"
+description: "A wrapper around the procedural macro API of the compiler\'s proc_macro crate."
+third_party {
+  url {
+    type: HOMEPAGE
+    value: "https://crates.io/crates/proc-macro2/"
+  }
+  url {
+    type: GIT
+    value: "https://github.com/alexcrichton/proc-macro2"
+  }
+  version: "1.0.9"
+  last_upgrade_date {
+    year: 2020
+    month: 3
+    day: 20
+  }
+}
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_APACHE2
diff --git a/NOTICE b/NOTICE
new file mode 120000
index 0000000..7a694c9
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1 @@
+LICENSE
\ No newline at end of file
diff --git a/src/fallback.rs b/src/fallback.rs
index fe582b3..ac0c6ca 100644
--- a/src/fallback.rs
+++ b/src/fallback.rs
@@ -427,6 +427,32 @@
             })
         })
     }
+
+    #[cfg(not(span_locations))]
+    fn first_byte(self) -> Self {
+        self
+    }
+
+    #[cfg(span_locations)]
+    fn first_byte(self) -> Self {
+        Span {
+            lo: self.lo,
+            hi: cmp::min(self.lo.saturating_add(1), self.hi),
+        }
+    }
+
+    #[cfg(not(span_locations))]
+    fn last_byte(self) -> Self {
+        self
+    }
+
+    #[cfg(span_locations)]
+    fn last_byte(self) -> Self {
+        Span {
+            lo: cmp::max(self.hi.saturating_sub(1), self.lo),
+            hi: self.hi,
+        }
+    }
 }
 
 impl fmt::Debug for Span {
@@ -474,11 +500,11 @@
     }
 
     pub fn span_open(&self) -> Span {
-        self.span
+        self.span.first_byte()
     }
 
     pub fn span_close(&self) -> Span {
-        self.span
+        self.span.last_byte()
     }
 
     pub fn set_span(&mut self, span: Span) {
@@ -549,7 +575,6 @@
     }
 }
 
-#[inline]
 fn is_ident_start(c: char) -> bool {
     ('a' <= c && c <= 'z')
         || ('A' <= c && c <= 'Z')
@@ -557,7 +582,6 @@
         || (c > '\x7f' && UnicodeXID::is_xid_start(c))
 }
 
-#[inline]
 fn is_ident_continue(c: char) -> bool {
     ('a' <= c && c <= 'z')
         || ('A' <= c && c <= 'Z')
@@ -730,10 +754,10 @@
         text.push('"');
         for c in t.chars() {
             if c == '\'' {
-                // escape_default turns this into "\'" which is unnecessary.
+                // escape_debug turns this into "\'" which is unnecessary.
                 text.push(c);
             } else {
-                text.extend(c.escape_default());
+                text.extend(c.escape_debug());
             }
         }
         text.push('"');
@@ -744,10 +768,10 @@
         let mut text = String::new();
         text.push('\'');
         if t == '"' {
-            // escape_default turns this into '\"' which is unnecessary.
+            // escape_debug turns this into '\"' which is unnecessary.
             text.push(t);
         } else {
-            text.extend(t.escape_default());
+            text.extend(t.escape_debug());
         }
         text.push('\'');
         Literal::_new(text)
diff --git a/src/lib.rs b/src/lib.rs
index ad9e301..92a39e9 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -78,7 +78,7 @@
 //! a different thread.
 
 // Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.4")]
+#![doc(html_root_url = "https://docs.rs/proc-macro2/1.0.9")]
 #![cfg_attr(any(proc_macro_span, super_unstable), feature(proc_macro_span))]
 #![cfg_attr(super_unstable, feature(proc_macro_raw_ident, proc_macro_def_site))]
 
diff --git a/src/wrapper.rs b/src/wrapper.rs
index c3b6e3a..d4c6ddf 100644
--- a/src/wrapper.rs
+++ b/src/wrapper.rs
@@ -10,10 +10,20 @@
 
 #[derive(Clone)]
 pub enum TokenStream {
-    Compiler(proc_macro::TokenStream),
+    Compiler(DeferredTokenStream),
     Fallback(fallback::TokenStream),
 }
 
+// Work around https://github.com/rust-lang/rust/issues/65080.
+// In `impl Extend<TokenTree> for TokenStream` which is used heavily by quote,
+// we hold on to the appended tokens and do proc_macro::TokenStream::extend as
+// late as possible to batch together consecutive uses of the Extend impl.
+#[derive(Clone)]
+pub struct DeferredTokenStream {
+    stream: proc_macro::TokenStream,
+    extra: Vec<proc_macro::TokenTree>,
+}
+
 pub enum LexError {
     Compiler(proc_macro::LexError),
     Fallback(fallback::LexError),
@@ -80,10 +90,32 @@
     panic!("stable/nightly mismatch")
 }
 
+impl DeferredTokenStream {
+    fn new(stream: proc_macro::TokenStream) -> Self {
+        DeferredTokenStream {
+            stream,
+            extra: Vec::new(),
+        }
+    }
+
+    fn is_empty(&self) -> bool {
+        self.stream.is_empty() && self.extra.is_empty()
+    }
+
+    fn evaluate_now(&mut self) {
+        self.stream.extend(self.extra.drain(..));
+    }
+
+    fn into_token_stream(mut self) -> proc_macro::TokenStream {
+        self.evaluate_now();
+        self.stream
+    }
+}
+
 impl TokenStream {
     pub fn new() -> TokenStream {
         if nightly_works() {
-            TokenStream::Compiler(proc_macro::TokenStream::new())
+            TokenStream::Compiler(DeferredTokenStream::new(proc_macro::TokenStream::new()))
         } else {
             TokenStream::Fallback(fallback::TokenStream::new())
         }
@@ -98,7 +130,7 @@
 
     fn unwrap_nightly(self) -> proc_macro::TokenStream {
         match self {
-            TokenStream::Compiler(s) => s,
+            TokenStream::Compiler(s) => s.into_token_stream(),
             TokenStream::Fallback(_) => mismatch(),
         }
     }
@@ -116,17 +148,25 @@
 
     fn from_str(src: &str) -> Result<TokenStream, LexError> {
         if nightly_works() {
-            Ok(TokenStream::Compiler(src.parse()?))
+            Ok(TokenStream::Compiler(DeferredTokenStream::new(
+                proc_macro_parse(src)?,
+            )))
         } else {
             Ok(TokenStream::Fallback(src.parse()?))
         }
     }
 }
 
+// Work around https://github.com/rust-lang/rust/issues/58736.
+fn proc_macro_parse(src: &str) -> Result<proc_macro::TokenStream, LexError> {
+    panic::catch_unwind(|| src.parse().map_err(LexError::Compiler))
+        .unwrap_or(Err(LexError::Fallback(fallback::LexError)))
+}
+
 impl fmt::Display for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match self {
-            TokenStream::Compiler(tts) => tts.fmt(f),
+            TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
             TokenStream::Fallback(tts) => tts.fmt(f),
         }
     }
@@ -134,14 +174,14 @@
 
 impl From<proc_macro::TokenStream> for TokenStream {
     fn from(inner: proc_macro::TokenStream) -> TokenStream {
-        TokenStream::Compiler(inner)
+        TokenStream::Compiler(DeferredTokenStream::new(inner))
     }
 }
 
 impl From<TokenStream> for proc_macro::TokenStream {
     fn from(inner: TokenStream) -> proc_macro::TokenStream {
         match inner {
-            TokenStream::Compiler(inner) => inner,
+            TokenStream::Compiler(inner) => inner.into_token_stream(),
             TokenStream::Fallback(inner) => inner.to_string().parse().unwrap(),
         }
     }
@@ -153,40 +193,40 @@
     }
 }
 
+// Assumes nightly_works().
+fn into_compiler_token(token: TokenTree) -> proc_macro::TokenTree {
+    match token {
+        TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
+        TokenTree::Punct(tt) => {
+            let spacing = match tt.spacing() {
+                Spacing::Joint => proc_macro::Spacing::Joint,
+                Spacing::Alone => proc_macro::Spacing::Alone,
+            };
+            let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
+            op.set_span(tt.span().inner.unwrap_nightly());
+            op.into()
+        }
+        TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+        TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
+    }
+}
+
 impl From<TokenTree> for TokenStream {
     fn from(token: TokenTree) -> TokenStream {
-        if !nightly_works() {
-            return TokenStream::Fallback(token.into());
+        if nightly_works() {
+            TokenStream::Compiler(DeferredTokenStream::new(into_compiler_token(token).into()))
+        } else {
+            TokenStream::Fallback(token.into())
         }
-        let tt: proc_macro::TokenTree = match token {
-            TokenTree::Group(tt) => tt.inner.unwrap_nightly().into(),
-            TokenTree::Punct(tt) => {
-                let spacing = match tt.spacing() {
-                    Spacing::Joint => proc_macro::Spacing::Joint,
-                    Spacing::Alone => proc_macro::Spacing::Alone,
-                };
-                let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
-                op.set_span(tt.span().inner.unwrap_nightly());
-                op.into()
-            }
-            TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
-            TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
-        };
-        TokenStream::Compiler(tt.into())
     }
 }
 
 impl iter::FromIterator<TokenTree> for TokenStream {
     fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
         if nightly_works() {
-            let trees = trees
-                .into_iter()
-                .map(TokenStream::from)
-                .flat_map(|t| match t {
-                    TokenStream::Compiler(s) => s,
-                    TokenStream::Fallback(_) => mismatch(),
-                });
-            TokenStream::Compiler(trees.collect())
+            TokenStream::Compiler(DeferredTokenStream::new(
+                trees.into_iter().map(into_compiler_token).collect(),
+            ))
         } else {
             TokenStream::Fallback(trees.into_iter().collect())
         }
@@ -198,8 +238,9 @@
         let mut streams = streams.into_iter();
         match streams.next() {
             Some(TokenStream::Compiler(mut first)) => {
-                first.extend(streams.map(|s| match s {
-                    TokenStream::Compiler(s) => s,
+                first.evaluate_now();
+                first.stream.extend(streams.map(|s| match s {
+                    TokenStream::Compiler(s) => s.into_token_stream(),
                     TokenStream::Fallback(_) => mismatch(),
                 }));
                 TokenStream::Compiler(first)
@@ -220,11 +261,9 @@
     fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
         match self {
             TokenStream::Compiler(tts) => {
-                tts.extend(
-                    streams
-                        .into_iter()
-                        .map(|t| TokenStream::from(t).unwrap_nightly()),
-                );
+                // Here is the reason for DeferredTokenStream.
+                tts.extra
+                    .extend(streams.into_iter().map(into_compiler_token));
             }
             TokenStream::Fallback(tts) => tts.extend(streams),
         }
@@ -235,24 +274,12 @@
     fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
         match self {
             TokenStream::Compiler(tts) => {
-                #[cfg(not(slow_extend))]
-                {
-                    tts.extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
-                }
-                #[cfg(slow_extend)]
-                {
-                    *tts = tts
-                        .clone()
-                        .into_iter()
-                        .chain(streams.into_iter().flat_map(|t| match t {
-                            TokenStream::Compiler(tts) => tts.into_iter(),
-                            _ => mismatch(),
-                        }))
-                        .collect();
-                }
+                tts.evaluate_now();
+                tts.stream
+                    .extend(streams.into_iter().map(|stream| stream.unwrap_nightly()));
             }
             TokenStream::Fallback(tts) => {
-                tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()))
+                tts.extend(streams.into_iter().map(|stream| stream.unwrap_stable()));
             }
         }
     }
@@ -261,7 +288,7 @@
 impl fmt::Debug for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match self {
-            TokenStream::Compiler(tts) => tts.fmt(f),
+            TokenStream::Compiler(tts) => tts.clone().into_token_stream().fmt(f),
             TokenStream::Fallback(tts) => tts.fmt(f),
         }
     }
@@ -300,7 +327,9 @@
 
     fn into_iter(self) -> TokenTreeIter {
         match self {
-            TokenStream::Compiler(tts) => TokenTreeIter::Compiler(tts.into_iter()),
+            TokenStream::Compiler(tts) => {
+                TokenTreeIter::Compiler(tts.into_token_stream().into_iter())
+            }
             TokenStream::Fallback(tts) => TokenTreeIter::Fallback(tts.into_iter()),
         }
     }
@@ -546,14 +575,14 @@
 impl Group {
     pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
         match stream {
-            TokenStream::Compiler(stream) => {
+            TokenStream::Compiler(tts) => {
                 let delimiter = match delimiter {
                     Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
                     Delimiter::Bracket => proc_macro::Delimiter::Bracket,
                     Delimiter::Brace => proc_macro::Delimiter::Brace,
                     Delimiter::None => proc_macro::Delimiter::None,
                 };
-                Group::Compiler(proc_macro::Group::new(delimiter, stream))
+                Group::Compiler(proc_macro::Group::new(delimiter, tts.into_token_stream()))
             }
             TokenStream::Fallback(stream) => {
                 Group::Fallback(fallback::Group::new(delimiter, stream))
@@ -575,7 +604,7 @@
 
     pub fn stream(&self) -> TokenStream {
         match self {
-            Group::Compiler(g) => TokenStream::Compiler(g.stream()),
+            Group::Compiler(g) => TokenStream::Compiler(DeferredTokenStream::new(g.stream())),
             Group::Fallback(g) => TokenStream::Fallback(g.stream()),
         }
     }