Snap for 8189365 from 0969784eddf3ede8b1d22e6550c5a687d3ce0936 to tm-frc-os-statsd-release

Change-Id: Ic9a1261589a066ce40180714c11d31e1284115c5
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index 041c1cf..7aad2fd 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,5 +1,5 @@
 {
   "git": {
-    "sha1": "dd59a74be412e349bf6df528a216a13c2cf57262"
+    "sha1": "4e0e8ec599e92b115c53ed8d760f7c38bf91891f"
   }
 }
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 8bdc61a..bc98cce 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -32,7 +32,7 @@
         include:
         - build: pinned
           os: ubuntu-18.04
-          rust: 1.41.1
+          rust: 1.28.0
         - build: stable
           os: ubuntu-18.04
           rust: stable
@@ -82,8 +82,8 @@
         # FIXME: to work around bugs in latest cross release, install master.
         # See: https://github.com/rust-embedded/cross/issues/357
         cargo install --git https://github.com/rust-embedded/cross
-        echo "CARGO=cross" >> $GITHUB_ENV
-        echo "TARGET=--target ${{ matrix.target }}" >> $GITHUB_ENV
+        echo "::set-env name=CARGO::cross"
+        echo "::set-env name=TARGET::--target ${{ matrix.target }}"
 
     - name: Show command used for Cargo
       run: |
diff --git a/Android.bp b/Android.bp
index 19545ce..7e00d34 100644
--- a/Android.bp
+++ b/Android.bp
@@ -1,4 +1,4 @@
-// This file is generated by cargo2android.py --config cargo2android.json.
+// This file is generated by cargo2android.py --run --device --dependencies.
 // Do not modify this file as changes will be overridden on upgrade.
 
 package {
@@ -42,10 +42,9 @@
 
 rust_library {
     name: "libregex_automata",
+    // has rustc warnings
     host_supported: true,
     crate_name: "regex_automata",
-    cargo_env_compat: true,
-    cargo_pkg_version: "0.1.10",
     srcs: ["src/lib.rs"],
     edition: "2015",
     features: [
@@ -58,6 +57,11 @@
         "-C opt-level=3",
     ],
     rustlibs: [
+        "libbyteorder",
         "libregex_syntax",
     ],
 }
+
+// dependent_library ["feature_list"]
+//   byteorder-1.4.2
+//   regex-syntax-0.6.22 "default,unicode,unicode-age,unicode-bool,unicode-case,unicode-gencat,unicode-perl,unicode-script,unicode-segment"
diff --git a/Cargo.toml b/Cargo.toml
index b4fcd7a..7ef891a 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -12,7 +12,7 @@
 
 [package]
 name = "regex-automata"
-version = "0.1.10"
+version = "0.1.9"
 authors = ["Andrew Gallant <[email protected]>"]
 exclude = ["/.travis.yml", "/appveyor.yml", "/ci/*", "/scripts/*", "/regex-automata-debug"]
 autoexamples = false
@@ -45,6 +45,10 @@
 [[test]]
 name = "default"
 path = "tests/tests.rs"
+[dependencies.byteorder]
+version = "1.2.7"
+default-features = false
+
 [dependencies.fst]
 version = "0.4.0"
 optional = true
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index ce1c704..2f5eb41 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,6 +1,6 @@
 [package]
 name = "regex-automata"
-version = "0.1.10"  #:version
+version = "0.1.9"  #:version
 authors = ["Andrew Gallant <[email protected]>"]
 description = "Automata construction and matching using regular expressions."
 documentation = "https://docs.rs/regex-automata"
@@ -40,6 +40,7 @@
 transducer = ["std", "fst"]
 
 [dependencies]
+byteorder = { version = "1.2.7", default-features = false }
 fst = { version = "0.4.0", optional = true }
 regex-syntax = { version = "0.6.16", optional = true }
 
diff --git a/METADATA b/METADATA
index c821873..421e63a 100644
--- a/METADATA
+++ b/METADATA
@@ -7,13 +7,13 @@
   }
   url {
     type: ARCHIVE
-    value: "https://static.crates.io/crates/regex-automata/regex-automata-0.1.10.crate"
+    value: "https://static.crates.io/crates/regex-automata/regex-automata-0.1.9.crate"
   }
-  version: "0.1.10"
+  version: "0.1.9"
   license_type: NOTICE
   last_upgrade_date {
-    year: 2021
-    month: 6
+    year: 2020
+    month: 12
     day: 21
   }
 }
diff --git a/README.md b/README.md
index 8eaf03f..2acf065 100644
--- a/README.md
+++ b/README.md
@@ -6,10 +6,9 @@
 support for cheap deserialization of automata for use in `no_std` environments.
 
 [![Build status](https://github.com/BurntSushi/regex-automata/workflows/ci/badge.svg)](https://github.com/BurntSushi/regex-automata/actions)
-[![on crates.io](https://meritbadge.herokuapp.com/regex-automata)](https://crates.io/crates/regex-automata)
-![Minimum Supported Rust Version 1.41](https://img.shields.io/badge/rustc-1.41-green)
+[![](http://meritbadge.herokuapp.com/regex-automata)](https://crates.io/crates/regex-automata)
 
-Dual-licensed under MIT or the [UNLICENSE](https://unlicense.org/).
+Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org).
 
 
 ### Documentation
@@ -183,10 +182,10 @@
 * Stretch goal: support capturing groups by implementing "tagged" DFA
   (transducers). Laurikari's paper is the usual reference here, but Trofimovich
   has a much more thorough treatment here:
-  https://re2c.org/2017_trofimovich_tagged_deterministic_finite_automata_with_lookahead.pdf
+  http://re2c.org/2017_trofimovich_tagged_deterministic_finite_automata_with_lookahead.pdf
   I've only read the paper once. I suspect it will require at least a few more
   read throughs before I understand it.
-  See also: https://re2c.org
+  See also: http://re2c.org/
 * Possibly less ambitious goal: can we select a portion of Trofimovich's work
   to make small fixed length look-around work? It would be really nice to
   support ^, $ and \b, especially the Unicode variant of \b and CRLF aware $.
@@ -220,4 +219,4 @@
   If we could know whether a regex will exhibit state explosion or not, then
   we could make an intelligent decision about whether to ahead-of-time compile
   a DFA.
-  See: https://www.researchgate.net/profile/Xu-Shutu/publication/229032602_Characterization_of_a_global_germplasm_collection_and_its_potential_utilization_for_analysis_of_complex_quantitative_traits_in_maize/links/02bfe50f914d04c837000000/Characterization-of-a-global-germplasm-collection-and-its-potential-utilization-for-analysis-of-complex-quantitative-traits-in-maize.pdf
+  See: https://www.researchgate.net/profile/XU_Shutu/publication/229032602_Characterization_of_a_global_germplasm_collection_and_its_potential_utilization_for_analysis_of_complex_quantitative_traits_in_maize/links/02bfe50f914d04c837000000.pdf
diff --git a/TEST_MAPPING b/TEST_MAPPING
deleted file mode 100644
index 3cbd48d..0000000
--- a/TEST_MAPPING
+++ /dev/null
@@ -1,17 +0,0 @@
-// Generated by update_crate_tests.py for tests that depend on this crate.
-{
-  "imports": [
-    {
-      "path": "external/rust/crates/base64"
-    },
-    {
-      "path": "external/rust/crates/tinytemplate"
-    },
-    {
-      "path": "external/rust/crates/tinyvec"
-    },
-    {
-      "path": "external/rust/crates/unicode-xid"
-    }
-  ]
-}
diff --git a/cargo2android.json b/cargo2android.json
deleted file mode 100644
index bf78496..0000000
--- a/cargo2android.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "device": true,
-  "run": true
-}
\ No newline at end of file
diff --git a/src/byteorder.rs b/src/byteorder.rs
deleted file mode 100644
index e909f93..0000000
--- a/src/byteorder.rs
+++ /dev/null
@@ -1,76 +0,0 @@
-use core::convert::TryInto;
-
-pub trait ByteOrder {
-    fn read_u16(buf: &[u8]) -> u16;
-    fn read_u32(buf: &[u8]) -> u32;
-    fn read_u64(buf: &[u8]) -> u64;
-    fn read_uint(buf: &[u8], nbytes: usize) -> u64;
-    fn write_u16(buf: &mut [u8], n: u16);
-    fn write_u32(buf: &mut [u8], n: u32);
-    fn write_u64(buf: &mut [u8], n: u64);
-    fn write_uint(buf: &mut [u8], n: u64, nbytes: usize);
-}
-
-pub enum BigEndian {}
-pub enum LittleEndian {}
-pub enum NativeEndian {}
-
-macro_rules! impl_endian {
-    ($t:ty, $from_endian:ident, $to_endian:ident) => {
-        impl ByteOrder for $t {
-            #[inline]
-            fn read_u16(buf: &[u8]) -> u16 {
-                u16::$from_endian(buf[0..2].try_into().unwrap())
-            }
-
-            #[inline]
-            fn read_u32(buf: &[u8]) -> u32 {
-                u32::$from_endian(buf[0..4].try_into().unwrap())
-            }
-
-            #[inline]
-            fn read_u64(buf: &[u8]) -> u64 {
-                u64::$from_endian(buf[0..8].try_into().unwrap())
-            }
-
-            #[inline]
-            fn read_uint(buf: &[u8], nbytes: usize) -> u64 {
-                let mut dst = [0u8; 8];
-                dst[..nbytes].copy_from_slice(&buf[..nbytes]);
-                u64::$from_endian(dst)
-            }
-
-            #[inline]
-            fn write_u16(buf: &mut [u8], n: u16) {
-                buf[0..2].copy_from_slice(&n.$to_endian()[..]);
-            }
-
-            #[inline]
-            fn write_u32(buf: &mut [u8], n: u32) {
-                buf[0..4].copy_from_slice(&n.$to_endian()[..]);
-            }
-
-            #[inline]
-            fn write_u64(buf: &mut [u8], n: u64) {
-                buf[0..8].copy_from_slice(&n.$to_endian()[..]);
-            }
-
-            #[inline]
-            fn write_uint(buf: &mut [u8], n: u64, nbytes: usize) {
-                buf[..nbytes].copy_from_slice(&n.$to_endian()[..nbytes]);
-            }
-        }
-    };
-}
-
-impl_endian! {
-    BigEndian, from_be_bytes, to_be_bytes
-}
-
-impl_endian! {
-    LittleEndian, from_le_bytes, to_le_bytes
-}
-
-impl_endian! {
-    NativeEndian, from_ne_bytes, to_ne_bytes
-}
diff --git a/src/determinize.rs b/src/determinize.rs
index cf0c285..f300316 100644
--- a/src/determinize.rs
+++ b/src/determinize.rs
@@ -148,8 +148,7 @@
         if let Some(&cached_id) = self.cache.get(&state) {
             // Since we have a cached state, put the constructed state's
             // memory back into our scratch space, so that it can be reused.
-            let _ =
-                mem::replace(&mut self.scratch_nfa_states, state.nfa_states);
+            mem::replace(&mut self.scratch_nfa_states, state.nfa_states);
             return Ok((cached_id, false));
         }
         // Nothing was in the cache, so add this state to the cache.
diff --git a/src/lib.rs b/src/lib.rs
index 7894ecc..4d3e9c1 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -290,6 +290,7 @@
 
 #[cfg(all(test, feature = "transducer"))]
 extern crate bstr;
+extern crate byteorder;
 #[cfg(feature = "transducer")]
 extern crate fst;
 #[cfg(feature = "std")]
@@ -305,7 +306,6 @@
 pub use sparse::SparseDFA;
 pub use state_id::StateID;
 
-mod byteorder;
 mod classes;
 #[path = "dense.rs"]
 mod dense_imp;
diff --git a/src/sparse_set.rs b/src/sparse_set.rs
index 56743b0..6f145ba 100644
--- a/src/sparse_set.rs
+++ b/src/sparse_set.rs
@@ -6,7 +6,7 @@
 /// entire set can also be done in constant time. Iteration yields elements
 /// in the order in which they were inserted.
 ///
-/// The data structure is based on: https://research.swtch.com/sparse
+/// The data structure is based on: http://research.swtch.com/sparse
 /// Note though that we don't actually use uninitialized memory. We generally
 /// reuse sparse sets, so the initial allocation cost is bareable. However, its
 /// other properties listed above are extremely useful.