Importing rustc-1.59.0

Test: ./build.py --lto=thin
Change-Id: I6dbef2f2e4acd7832f31c209c452b87d4f354704
Bug: 215232614
diff --git a/compiler/rustc_lexer/src/cursor.rs b/compiler/rustc_lexer/src/cursor.rs
index 297f3d1..0ba6c56 100644
--- a/compiler/rustc_lexer/src/cursor.rs
+++ b/compiler/rustc_lexer/src/cursor.rs
@@ -2,10 +2,11 @@
 
 /// Peekable iterator over a char sequence.
 ///
-/// Next characters can be peeked via `nth_char` method,
+/// Next characters can be peeked via `first` method,
 /// and position can be shifted forward via `bump` method.
 pub(crate) struct Cursor<'a> {
     initial_len: usize,
+    /// Iterator over chars. Slightly faster than a &str.
     chars: Chars<'a>,
     #[cfg(debug_assertions)]
     prev: char,
@@ -37,22 +38,21 @@
         }
     }
 
-    /// Returns nth character relative to the current cursor position.
+    /// Peeks the next symbol from the input stream without consuming it.
     /// If requested position doesn't exist, `EOF_CHAR` is returned.
     /// However, getting `EOF_CHAR` doesn't always mean actual end of file,
     /// it should be checked with `is_eof` method.
-    fn nth_char(&self, n: usize) -> char {
-        self.chars().nth(n).unwrap_or(EOF_CHAR)
-    }
-
-    /// Peeks the next symbol from the input stream without consuming it.
     pub(crate) fn first(&self) -> char {
-        self.nth_char(0)
+        // `.next()` optimizes better than `.nth(0)`
+        self.chars.clone().next().unwrap_or(EOF_CHAR)
     }
 
     /// Peeks the second symbol from the input stream without consuming it.
     pub(crate) fn second(&self) -> char {
-        self.nth_char(1)
+        // `.next()` optimizes better than `.nth(1)`
+        let mut iter = self.chars.clone();
+        iter.next();
+        iter.next().unwrap_or(EOF_CHAR)
     }
 
     /// Checks if there is nothing more to consume.
@@ -65,9 +65,9 @@
         self.initial_len - self.chars.as_str().len()
     }
 
-    /// Returns a `Chars` iterator over the remaining characters.
-    fn chars(&self) -> Chars<'a> {
-        self.chars.clone()
+    /// Resets the number of bytes consumed to 0.
+    pub(crate) fn reset_len_consumed(&mut self) {
+        self.initial_len = self.chars.as_str().len();
     }
 
     /// Moves to the next character.
@@ -81,4 +81,13 @@
 
         Some(c)
     }
+
+    /// Eats symbols while predicate returns true or until the end of file is reached.
+    pub(crate) fn eat_while(&mut self, mut predicate: impl FnMut(char) -> bool) {
+        // It was tried making optimized version of this for eg. line comments, but
+        // LLVM can inline all of this and compile it down to fast iteration over bytes.
+        while predicate(self.first()) && !self.is_eof() {
+            self.bump();
+        }
+    }
 }
diff --git a/compiler/rustc_lexer/src/lib.rs b/compiler/rustc_lexer/src/lib.rs
index 44b002f..5b8300a 100644
--- a/compiler/rustc_lexer/src/lib.rs
+++ b/compiler/rustc_lexer/src/lib.rs
@@ -227,14 +227,15 @@
 }
 
 /// Creates an iterator that produces tokens from the input string.
-pub fn tokenize(mut input: &str) -> impl Iterator<Item = Token> + '_ {
+pub fn tokenize(input: &str) -> impl Iterator<Item = Token> + '_ {
+    let mut cursor = Cursor::new(input);
     std::iter::from_fn(move || {
-        if input.is_empty() {
-            return None;
+        if cursor.is_eof() {
+            None
+        } else {
+            cursor.reset_len_consumed();
+            Some(cursor.advance_token())
         }
-        let token = first_token(input);
-        input = &input[token.len..];
-        Some(token)
     })
 }
 
@@ -832,11 +833,4 @@
 
         self.eat_while(is_id_continue);
     }
-
-    /// Eats symbols while predicate returns true or until the end of file is reached.
-    fn eat_while(&mut self, mut predicate: impl FnMut(char) -> bool) {
-        while predicate(self.first()) && !self.is_eof() {
-            self.bump();
-        }
-    }
 }