Merge "Update http to 0.2.11" into main
diff --git a/crates/android_bp/.android-checksum.json b/crates/android_bp/.android-checksum.json
new file mode 100644
index 0000000..1b5492f
--- /dev/null
+++ b/crates/android_bp/.android-checksum.json
@@ -0,0 +1 @@
+{"package":null,"files":{"src/string.rs":"7ec29b3403b5620d393f95989f4c7f6352211e29c8bcaddd238a1bfa149b9714",".cargo-checksum.json":"2ae4cf705da6678c5cbfe15fd7d7d61c504f3316c57a46201d7703161dcebdc3","src/test_db.tar.xz":"f4e784bb3052ee48074d63f92055e84e4812c35b4b0630fa744972a5a63f34d4","Android.bp":"52e88adbd944b4568dcba9ac143c0c379a88756222663b3367a490dd071f4ce4","src/tests.rs":"e91ebbfaea2e4dda2a85143864e4fce9e4c16d1f5bc03ddf68ac6188b4257816","cargo_embargo.json":"0be745f01ba4955b20f2cb5011168c4d8cd396af75c007035ec693c67b17bce7","Cargo.lock":"3f20ce94a0d202f4ba9b8f23201d77821ec5b81e41bd3c9ba453fa51e529ff8c","METADATA":"e8f6766c93afec6eac6e8d07a665114beccd49d0739c3a09477ce8f7eb795370","MODULE_LICENSE_MIT":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","LICENSE":"7b364a931da0559e2f1dc81fee3747935fe5e6ce35d0ebd5e72211355fbfca76","examples/parse_all.py":"9fcfb435f2f053f0e3d8596bb39813234052584a0532a8cb05892df40164662f","src/macros.rs":"4230962411072838571efd27f341e2437ce45d54653466e79fc6953b794d694f","src/utils.rs":"0ffba546b2de0a97b2a490be5a7ebd159305a272f6fb8d695dbdeb1143801c3b","Cargo.toml":"6d596d5a713d2156e1a8b48c0f2d36bd419df3c9c017d1341a40e4ae177cca11","src/lib.rs":"52b2735a5625753dc968af9b4be06888d98649d57bf61263710759605d87d48f","Readme.md":"a061e9095e337a5ffb3993ec2f94d7d09175499197cc709fa87add44e9ed60be","patches/remove-android-bp.patch":"7a42d447f8fa7a86f90c269363c87b727e9b5757e18bd75fc4b9fce628289760","examples/parse_all.rs":"ed8672a0f34c7d4b144de67644bd31f94a14dd1757e1eb6939b257c476208485","src/parser.rs":"5ec43d1ce5f149e8fad6b560c3b77d7e418788e592564a198a1215f2e07f207e"}}
\ No newline at end of file
diff --git a/crates/android_bp/.cargo-checksum.json b/crates/android_bp/.cargo-checksum.json
new file mode 100644
index 0000000..b24482c
--- /dev/null
+++ b/crates/android_bp/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"aad558399ed9218f7002bcc8118cd76c7ddb08d25c19c289c564db8ed287ba5a","Cargo.toml":"8cde0ce9b9fb2dee4b7b28148b5158850fa8e901d350840fcd722cda78a999f5","LICENSE":"f71ee94bae0f2d3aaac8c01049c417e026dec7caea1145095ad24c466203799b","Readme.md":"4ce3ab1da588c55a34c7397b2674a8e2ff5e3919351a23bdcf948bf5fcd65bf6","examples/parse_all.py":"c12266ae57f8787444e40cf3f2c4d49f94bb84d8c8b7eebc07b356e312dbf5a9","examples/parse_all.rs":"9325c97bc6e58e0089f9517e4caff69a689bbe506ed642add380703bbe9c039d","fixtures/Android.bp":"5c46302420dbf441cbb0a7c10141650dec4443b66389bbedafb6a99e25d3cf8a","src/lib.rs":"09744ddf0f2596723f451b281eb31adf6d3e997986baf9442991f1394d538754","src/macros.rs":"a6a9d89f83554f3cfc4ddf46dc53a21d20aceccac4c96f453b2fecde1fb78ae0","src/parser.rs":"157256a0afaee27bac782c119e8f3cf8aac66df44ab200b36ffe38b0a3932a9b","src/string.rs":"43b2bcc56dccc4cceca8d4208103e3284512834ee12b1b930532e4a7b2ef6680","src/test_db.tar.xz":"c15ed7810e0ca86946f20849b8d426b7cf2f8f838b70dde07f430a7cd729f667","src/tests.rs":"3c760eb96fa5cde6585531adc39f6de8f3f200601c53e76034645e2ece174a2f","src/utils.rs":"d5db7065fe43ff5e9d4f6c80411cda9d33d3a5223dc01bb07f4c879eeb3a7bbb"},"package":"a9e5389cea0250e70ca9323df3b5b5b31749d4face289c92cd4e99f54b1d6425"}
\ No newline at end of file
diff --git a/crates/android_bp/Android.bp b/crates/android_bp/Android.bp
new file mode 100644
index 0000000..afdba07
--- /dev/null
+++ b/crates/android_bp/Android.bp
@@ -0,0 +1,31 @@
+// This file is generated by cargo_embargo.
+// Do not modify this file because the changes will be overridden on upgrade.
+
+package {
+    default_applicable_licenses: ["external_rust_crates_android_bp_license"],
+    default_team: "trendy_team_android_rust",
+}
+
+license {
+    name: "external_rust_crates_android_bp_license",
+    visibility: [":__subpackages__"],
+    license_kinds: ["SPDX-license-identifier-MIT"],
+    license_text: ["LICENSE"],
+}
+
+rust_library {
+    name: "libandroid_bp",
+    host_supported: true,
+    crate_name: "android_bp",
+    cargo_env_compat: true,
+    cargo_pkg_version: "0.3.0",
+    crate_root: "src/lib.rs",
+    edition: "2021",
+    rustlibs: ["libnom"],
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+    product_available: true,
+    vendor_available: true,
+}
diff --git a/crates/android_bp/Cargo.lock b/crates/android_bp/Cargo.lock
new file mode 100644
index 0000000..9b19e17
--- /dev/null
+++ b/crates/android_bp/Cargo.lock
@@ -0,0 +1,255 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "android_bp"
+version = "0.3.0"
+dependencies = [
+ "liblzma",
+ "nom",
+ "tar",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "bitflags"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
+
+[[package]]
+name = "cc"
+version = "1.0.99"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695"
+dependencies = [
+ "jobserver",
+ "libc",
+ "once_cell",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "errno"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba"
+dependencies = [
+ "libc",
+ "windows-sys",
+]
+
+[[package]]
+name = "filetime"
+version = "0.2.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "windows-sys",
+]
+
+[[package]]
+name = "jobserver"
+version = "0.1.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "libc"
+version = "0.2.155"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
+
+[[package]]
+name = "liblzma"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "243510a5543c358949902b9e76daec3a32d7b03a43abce823e7c62a1a8360172"
+dependencies = [
+ "liblzma-sys",
+]
+
+[[package]]
+name = "liblzma-sys"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "83cf78d20a45b5c0f3c7da2dcac255b230efe7d8684282bd35873164c1491187"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+]
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
+
+[[package]]
+name = "memchr"
+version = "2.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167"
+
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
+[[package]]
+name = "nom"
+version = "7.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
+dependencies = [
+ "memchr",
+ "minimal-lexical",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+
+[[package]]
+name = "pkg-config"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
+
+[[package]]
+name = "redox_syscall"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
+dependencies = [
+ "bitflags 1.3.2",
+]
+
+[[package]]
+name = "rustix"
+version = "0.38.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f"
+dependencies = [
+ "bitflags 2.5.0",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys",
+]
+
+[[package]]
+name = "tar"
+version = "0.4.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909"
+dependencies = [
+ "filetime",
+ "libc",
+ "xattr",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
+
+[[package]]
+name = "xattr"
+version = "1.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f"
+dependencies = [
+ "libc",
+ "linux-raw-sys",
+ "rustix",
+]
diff --git a/crates/android_bp/Cargo.toml b/crates/android_bp/Cargo.toml
new file mode 100644
index 0000000..f50dbdf
--- /dev/null
+++ b/crates/android_bp/Cargo.toml
@@ -0,0 +1,41 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "android_bp"
+version = "0.3.0"
+authors = ["tardyp"]
+description = "Android.bp parser"
+homepage = "https://github.com/tardyp/rs-bp"
+readme = "Readme.md"
+keywords = [
+    "android",
+    "bp",
+    "parser",
+    "blueprint",
+    "soong",
+]
+categories = ["parser-implementations"]
+license = "MIT"
+repository = "https://github.com/tardyp/rs-bp"
+
+[lib]
+name = "android_bp"
+
+[dependencies.nom]
+version = "7.1.3"
+
+[dev-dependencies.liblzma]
+version = "0.3.2"
+
+[dev-dependencies.tar]
+version = "0.4.41"
diff --git a/crates/android_bp/LICENSE b/crates/android_bp/LICENSE
new file mode 100644
index 0000000..13cefc3
--- /dev/null
+++ b/crates/android_bp/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2023 Pierre Tardy
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/crates/android_bp/METADATA b/crates/android_bp/METADATA
new file mode 100644
index 0000000..692ea13
--- /dev/null
+++ b/crates/android_bp/METADATA
@@ -0,0 +1,17 @@
+name: "android_bp"
+description: "Android.bp parser"
+third_party {
+  version: "0.3.0"
+  license_type: NOTICE
+  last_upgrade_date {
+    year: 2024
+    month: 12
+    day: 17
+  }
+  homepage: "https://crates.io/crates/android_bp"
+  identifier {
+    type: "Archive"
+    value: "https://static.crates.io/crates/android_bp/android_bp-0.3.0.crate"
+    version: "0.3.0"
+  }
+}
diff --git a/crates/android_bp/MODULE_LICENSE_MIT b/crates/android_bp/MODULE_LICENSE_MIT
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/crates/android_bp/MODULE_LICENSE_MIT
diff --git a/crates/android_bp/Readme.md b/crates/android_bp/Readme.md
new file mode 100644
index 0000000..6fc58b5
--- /dev/null
+++ b/crates/android_bp/Readme.md
@@ -0,0 +1,28 @@
+## android-bp
+
+a rust crate to parse Android.bp files
+
+### Usage
+
+```rust
+    use android_bp::BluePrint;
+
+    let bp = BluePrint::from_file("fixtures/Android.bp").unwrap();
+    println!("{:#?}", bp);
+
+    // variables are accessible as a rust HashMap
+    println!("{:#?}", bp.variables);
+    for m in &bp.modules {
+        if m.typ == "rust_binary" {
+            println!("{:?}", m.get("name").unwrap());
+        }
+    }
+    // or iter them by type
+    for m in bp.modules_by_type("rust_host_test") {
+        // m.get return an sometime inconvenient Option<&Value>
+        // so some helper methods are provided
+        let name = m.get_string("name").unwrap();
+        let srcs = m.get_array("srcs").unwrap();
+        println!("{:?} {:?}", name, srcs);
+    }
+```
diff --git a/crates/android_bp/cargo_embargo.json b/crates/android_bp/cargo_embargo.json
new file mode 100644
index 0000000..9e26dfe
--- /dev/null
+++ b/crates/android_bp/cargo_embargo.json
@@ -0,0 +1 @@
+{}
\ No newline at end of file
diff --git a/crates/android_bp/examples/parse_all.py b/crates/android_bp/examples/parse_all.py
new file mode 100644
index 0000000..321e027
--- /dev/null
+++ b/crates/android_bp/examples/parse_all.py
@@ -0,0 +1,15 @@
+from android_bp import BluePrint
+import os, sys
+import time
+t1 = time.time()
+num_files = 0
+for root, dirs, files in os.walk(sys.argv[1] if len(sys.argv) > 1 else "."):
+    for file in files:
+        if file == "Android.bp":
+            b = BluePrint.from_file(os.path.join(root, file))
+            print(b.variables)
+            print([bl.__type__ for bl in b.modules])
+            for bl in b.modules_by_type("rust_binary"):
+                print(bl.name, bl.srcs)
+            num_files += 1
+print("time: ", time.time() - t1, "num_files: ", num_files)
diff --git a/crates/android_bp/examples/parse_all.rs b/crates/android_bp/examples/parse_all.rs
new file mode 100644
index 0000000..717a1d5
--- /dev/null
+++ b/crates/android_bp/examples/parse_all.rs
@@ -0,0 +1,45 @@
+use android_bp::BluePrint;
+use std::path::Path;
+
+fn main() {
+    let arg1 = std::env::args().nth(1).unwrap();
+    let dir_root = Path::new(&arg1);
+    let t1 = std::time::Instant::now();
+    let num_parsed = walk_dir(dir_root);
+    println!("{} files parsed in {:.3}s", num_parsed, t1.elapsed().as_secs_f32());
+}
+fn walk_dir(dir: &Path) -> usize {
+    let mut num_files = 0;
+    for entry in dir.read_dir().unwrap() {
+        let path = entry.unwrap().path();
+        if path.is_dir() {
+            if path.file_name().unwrap().to_str().unwrap() == "out" {
+                continue;
+            }
+            if path.file_name().unwrap().to_str().unwrap().starts_with("."){
+                continue;
+            }
+            
+            num_files += walk_dir(&path);
+        } else {
+            let file_name = path.file_name().unwrap().to_str().unwrap();
+            if file_name == "Android.bp" {
+                let result = BluePrint::from_file(&path);
+                match result {
+                    Ok(blueprint) => {
+                        num_files += 1;
+                        println!("{}", path.to_string_lossy());
+                        for module in blueprint.modules {
+                            println!("{} {:?}", module.typ, module.get("name"));
+                        }
+                    }
+                    Err(e) => {
+                        println!("{}: {}", path.to_string_lossy(), e);
+                        panic!("please report! this file is not parsed correctly");
+                    }
+                }
+            }
+        }
+    }
+    num_files
+}
diff --git a/crates/android_bp/patches/remove-android-bp.patch b/crates/android_bp/patches/remove-android-bp.patch
new file mode 100644
index 0000000..7b97886
--- /dev/null
+++ b/crates/android_bp/patches/remove-android-bp.patch
@@ -0,0 +1,25 @@
+diff --git a/fixtures/Android.bp b/fixtures/Android.bp
+deleted file mode 100644
+index fbfd32ac..00000000
+--- a/fixtures/Android.bp
++++ /dev/null
+@@ -1,18 +0,0 @@
+-cc_library_shared {
+-     name: "libxmlrpc++",
+-
+-     rtti: true,
+-     cppflags: [
+-           "-Wall",
+-           "-Werror",
+-           "-fexceptions",
+-     ],
+-     export_include_dirs: ["src"],
+-     srcs: ["src/**/*.cpp"],
+-
+-     target: {
+-           darwin: {
+-                enabled: false,
+-           },
+-     },
+-}
+\ No newline at end of file
diff --git a/crates/android_bp/src/lib.rs b/crates/android_bp/src/lib.rs
new file mode 100644
index 0000000..e43a8c8
--- /dev/null
+++ b/crates/android_bp/src/lib.rs
@@ -0,0 +1,15 @@
+#![doc = include_str!("../Readme.md")]
+
+
+
+mod parser;
+#[macro_use]
+mod macros;
+mod utils;
+mod tests;
+mod string;
+
+pub use parser::BluePrint;
+pub use parser::Value;
+pub use parser::Module;
+pub use parser::Map;
diff --git a/crates/android_bp/src/macros.rs b/crates/android_bp/src/macros.rs
new file mode 100644
index 0000000..a1d88f2
--- /dev/null
+++ b/crates/android_bp/src/macros.rs
@@ -0,0 +1,26 @@
+// define macro tag with additional context
+
+#[macro_export(local_inner_macros)]
+#[doc(hidden)]
+macro_rules! context_tag {
+    ($tag:expr) => {
+        context(
+            $tag,
+            delimited(space_or_comments, tag($tag), space_or_comments),
+        )
+    };
+}
+// define macro ending delimiter with optional comma
+#[macro_export(local_inner_macros)]
+#[doc(hidden)]
+macro_rules! end_delimiter {
+    ($tag:expr) => {
+        tuple((
+            space_or_comments,
+            opt(char(',')),
+            space_or_comments,
+            cut(tag($tag)),
+            space_or_comments,
+        ))
+    };
+}
diff --git a/crates/android_bp/src/parser.rs b/crates/android_bp/src/parser.rs
new file mode 100644
index 0000000..0608700
--- /dev/null
+++ b/crates/android_bp/src/parser.rs
@@ -0,0 +1,411 @@
+use crate::string::parse_string;
+use crate::{context_tag, end_delimiter, utils::*};
+use nom::combinator::map_res;
+use nom::Err;
+use nom::{
+    branch::alt,
+    bytes::complete::tag,
+    character::complete::char,
+    combinator::{cut, map, opt},
+    error::{context, convert_error, VerboseError},
+    multi::{many0, separated_list0},
+    sequence::{delimited, tuple},
+};
+use std::collections::HashMap;
+use std::ops::{Deref, DerefMut};
+use std::path::Path;
+
+/// a dictionary in a blueprint file
+#[derive(Debug, PartialEq, Clone, Eq)]
+pub struct Map(pub HashMap<String, Value>);
+impl Deref for Map {
+    type Target = HashMap<String, Value>;
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+impl DerefMut for Map {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.0
+    }
+}
+fn parse_dict(input: &str) -> VerboseResult<Map> {
+    context(
+        "dict",
+        map(
+            delimited(
+                tuple((space_or_comments, context_tag!("{"), space_or_comments)),
+                separated_list0(char(','), parse_module_entry),
+                end_delimiter!("}"),
+            ),
+            |entries| Map(entries.into_iter().collect()),
+        ),
+    )(input)
+}
+#[derive(Debug, PartialEq, Clone, Eq)]
+pub struct Function {
+    pub name: String,
+    pub args: Vec<Value>,
+}
+fn parse_function(input: &str) -> VerboseResult<Function> {
+    context(
+        "function",
+        map(
+            tuple((
+                space_or_comments,
+                identifier,
+                space_or_comments,
+                delimited(
+                    tuple((space_or_comments, context_tag!("("), space_or_comments)),
+                    separated_list0(comma, parse_expr),
+                    end_delimiter!(")"),
+                ),
+            )),
+            |(_, name, _, args)| Function {
+                name: name.to_string(),
+                args,
+            },
+        ),
+    )(input)
+}
+/// a value in a blueprint file
+#[derive(Debug, PartialEq, Clone, Eq)]
+pub enum Value {
+    String(String),
+    Integer(i64),
+    Array(Vec<Value>),
+    Boolean(bool),
+    Map(Map),
+    Ident(String),
+    ConcatExpr(Vec<Value>),
+    Function(Function),
+}
+// convert value from str
+impl From<&str> for Value {
+    fn from(s: &str) -> Self {
+        Value::String(s.to_string())
+    }
+}
+fn parse_value(input: &str) -> VerboseResult<Value> {
+    context(
+        "value",
+        alt((
+            map(parse_array, Value::Array),
+            map(parse_function, Value::Function),
+            map(string_literal, Value::String),
+            map(parse_bool, Value::Boolean),
+            map(parse_dict, Value::Map),
+            map(parse_int, Value::Integer),
+            map(identifier, |x| Value::Ident(x.to_string())),
+        )),
+    )(input)
+}
+fn concat_value_string(values: Vec<Value>) -> Result<Value, &'static str> {
+    let mut result = String::new();
+    for value in values {
+        match value {
+            Value::String(s) => result.push_str(&s),
+            _ => Err("value is not a string")?,
+        }
+    }
+    Ok(Value::String(result))
+}
+fn concat_value_array(values: Vec<Value>) -> Result<Value, &'static str> {
+    let mut result = Vec::new();
+    for value in values {
+        match value {
+            Value::Array(a) => result.extend(a),
+            _ => Err("value is not an array")?,
+        }
+    }
+    Ok(Value::Array(result))
+}
+pub(crate) fn parse_expr(input: &str) -> VerboseResult<Value> {
+    // in bp, value can be combined with '+' operator
+    // this parser parse the expression and combine the values
+    // into a single value, if there is no Ident in the values
+    context(
+        "expr",
+        map_res(
+            separated_list0(
+                tuple((space_or_comments, char('+'), space_or_comments)),
+                parse_value,
+            ),
+            |values| {
+                match values.len() {
+                    0 => Err("no value"),
+                    1 => Ok(values[0].clone()),
+                    _ => {
+                        // if there is one ident we cannot concat
+                        if values
+                            .iter()
+                            .any(|v| matches!(v, Value::Ident(_) | Value::Function(_)))
+                        {
+                            return Ok(Value::ConcatExpr(values));
+                        }
+                        match &values[0] {
+                            Value::String(_) => concat_value_string(values),
+                            Value::Array(_) => concat_value_array(values),
+                            _ => Err("first value is not a string"),
+                        }
+                    }
+                }
+            },
+        ),
+    )(input)
+}
+pub(crate) fn parse_array(input: &str) -> VerboseResult<Vec<Value>> {
+    context(
+        "array",
+        delimited(
+            ws(char('[')),
+            separated_list0(comma, parse_expr),
+            end_delimiter!("]"),
+        ),
+    )(input)
+}
+
+/// a blueprint file
+#[derive(Debug, PartialEq, Clone, Eq)]
+pub struct BluePrint {
+    /// variables in the blueprint file
+    /// found in root of the file in the form of `key = value`
+    pub variables: HashMap<String, Value>,
+    /// all ordered modules in the blueprint file
+    pub modules: Vec<Module>,
+}
+
+/// a module in a blueprint file
+#[derive(Debug, PartialEq, Clone, Eq)]
+pub struct Module {
+    pub typ: String,
+    pub entries: HashMap<String, Value>,
+}
+impl Module {
+    /// get an attribute value from a module
+    pub fn get(&self, key: &str) -> Option<&Value> {
+        self.entries.get(key)
+    }
+    /// get a string attribute value from a module
+    pub fn get_string(&self, key: &str) -> Option<&String> {
+        match self.get(key) {
+            Some(Value::String(s)) => Some(s),
+            _ => None,
+        }
+    }
+    /// get a boolean attribute value from a module
+    pub fn get_bool(&self, key: &str) -> Option<bool> {
+        match self.get(key) {
+            Some(Value::Boolean(b)) => Some(*b),
+            _ => None,
+        }
+    }
+    /// get an array attribute value from a module
+    pub fn get_array(&self, key: &str) -> Option<&Vec<Value>> {
+        match self.get(key) {
+            Some(Value::Array(a)) => Some(a),
+            _ => None,
+        }
+    }
+    /// get a map attribute value from a module
+    pub fn get_map(&self, key: &str) -> Option<&Map> {
+        match self.get(key) {
+            Some(Value::Map(d)) => Some(d),
+            _ => None,
+        }
+    }
+    /// get an identifier attribute value from a module
+    pub fn get_ident(&self, key: &str) -> Option<&String> {
+        match self.get(key) {
+            Some(Value::Ident(i)) => Some(i),
+            _ => None,
+        }
+    }
+    /// get an integer attribute value from a module
+    pub fn get_int(&self, key: &str) -> Option<i64> {
+        match self.get(key) {
+            Some(Value::Integer(i)) => Some(*i),
+            _ => None,
+        }
+    }
+}
+/// parse a module entry, with `:` as delimiter
+pub(crate) fn parse_module_entry(input: &str) -> VerboseResult<(String, Value)> {
+    _parse_module_entry(input, ':')
+}
+/// second form of module entry, with `=` as delimiter
+pub(crate) fn parse_module_entry2(input: &str) -> VerboseResult<(String, Value)> {
+    _parse_module_entry(input, '=')
+}
+pub(crate) fn _parse_module_entry(input: &str, delimiter: char) -> VerboseResult<(String, Value)> {
+    context(
+        "module entry",
+        map(
+            tuple((
+                space_or_comments,
+                alt((
+                    map(identifier, |x| x.to_string()),
+                    parse_string::<VerboseError<&str>>,
+                )),
+                space_or_comments,
+                char(delimiter),
+                space_or_comments,
+                cut(parse_expr),
+                space_or_comments,
+            )),
+            |(_, key, _, _, _, value, _)| (key.to_string(), value),
+        ),
+    )(input)
+}
+
+pub(crate) fn parse_module(input: &str) -> VerboseResult<Module> {
+    // parse a identifier followed by a module of entries
+    let (input, _) = space_or_comments(input)?;
+    let (input, ident) = identifier(input)?;
+    let (input, _) = space_or_comments(input)?;
+    let (input, module) = context(
+        "module",
+        alt((
+            map(
+                delimited(
+                    tuple((space_or_comments, context_tag!("{"), space_or_comments)),
+                    separated_list0(char(','), parse_module_entry),
+                    end_delimiter!("}"),
+                ),
+                |entries| entries.into_iter().collect(),
+            ),
+            map(
+                delimited(
+                    tuple((space_or_comments, context_tag!("("), space_or_comments)),
+                    separated_list0(char(','), parse_module_entry2),
+                    end_delimiter!(")"),
+                ),
+                |entries| entries.into_iter().collect(),
+            ),
+        )),
+    )(input)?;
+    Ok((
+        input,
+        Module {
+            typ: ident.to_string(),
+            entries: module,
+        },
+    ))
+}
+
+pub(crate) fn parse_define(input: &str) -> VerboseResult<(String, String, Value)> {
+    context(
+        "define",
+        map(
+            tuple((
+                space_or_comments,
+                identifier,
+                space_or_comments,
+                alt((tag("="), tag("+="))),
+                space_or_comments,
+                cut(parse_expr),
+                space_or_comments,
+            )),
+            |(_, key, _, op, _, value, _)| (key.to_string(), op.to_string(), value),
+        ),
+    )(input)
+}
+
+pub(crate) fn parse_blueprint(input: &str) -> VerboseResult<BluePrint> {
+    let mut entries = Vec::new();
+    let mut variables = HashMap::new();
+    let (input, _) = context(
+        "blueprint",
+        many0(alt((
+            map(parse_module, |b| {
+                entries.push(b);
+                ()
+            }),
+            map_res(parse_define, |(k, op, v)| match op.as_str() {
+                "=" => {
+                    variables.insert(k, v);
+                    Ok(())
+                }
+                "+=" => {
+                    let e = variables.entry(k);
+                    match e {
+                        std::collections::hash_map::Entry::Occupied(prev) => {
+                            let prev = prev.into_mut();
+                            match prev {
+                                Value::String(s) => {
+                                    match v {
+                                        Value::String(s2) => {
+                                            s.push_str(&s2);
+                                        }
+                                        _ => Err("cannot append value to string")?,
+                                    }
+                                }
+                                Value::Array(a) => {
+                                    match v {
+                                        Value::Array(a2) => {
+                                            a.extend(a2);
+                                        }
+                                        Value::Ident(_) => {
+                                            Err("FIXME in this case, we should turn the Array into ConcatExpr")?
+                                        }
+                                        _ => Err("cannot append value to array")?,
+                                    }
+                                }
+                                Value::Integer(i) => {
+                                    match v {
+                                        Value::Integer(i2) => {
+                                            *i += i2;
+                                        }
+                                        _ => Err("cannot append value to integer")?,
+                                    }
+                                }
+                                _ => Err("cannot append value to this type")?,
+                            }
+                        }
+                        std::collections::hash_map::Entry::Vacant(_) => Err("variable not found")?,
+                    }
+                    Ok(())
+                }
+                _ => Err("unknown operator"),
+            }),
+            space_or_comments1,
+        ))),
+    )(input)?;
+    Ok((
+        input,
+        BluePrint {
+            variables: variables,
+            modules: entries,
+        },
+    ))
+}
+
+pub(crate) fn format_err(input: &str, err: Err<VerboseError<&str>>) -> String {
+    match err {
+        Err::Error(e) | Err::Failure(e) => convert_error(input, e.into()),
+        Err::Incomplete(_) => "Incomplete".to_string(),
+    }
+}
+impl BluePrint {
+    /// parse an Android.bp file from a string
+    pub fn parse(input: &str) -> Result<Self, String> {
+        match parse_blueprint(input) {
+            Ok((rest, result)) => {
+                if rest.len() > 0 {
+                    return Err(format!("Unexpected left input: {}", rest));
+                }
+                Ok(result)
+            }
+            Err(err) => Err(format_err(input, err)),
+        }
+    }
+    /// parse an Android.bp file from a file path
+    pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, String> {
+        let input = std::fs::read_to_string(path).map_err(|e| e.to_string())?;
+        Self::parse(&input)
+    }
+    /// get all modules of a specific type
+    pub fn modules_by_type<'a>(&'a self, typ: &'static str) -> impl Iterator<Item = &'a Module> {
+        self.modules.iter().filter(move |b| b.typ == typ)
+    }
+}
diff --git a/crates/android_bp/src/string.rs b/crates/android_bp/src/string.rs
new file mode 100644
index 0000000..1c9e125
--- /dev/null
+++ b/crates/android_bp/src/string.rs
@@ -0,0 +1,238 @@
+//! modified from nom example https://github.com/rust-bakery/nom/blob/7.1.3/examples/string.rs
+//! License MIT:
+//! from @0x7FFFFFFFFFFFFFFF and @Geal
+//! This example shows an example of how to parse an escaped string. The
+//! rules for the string are similar to JSON and rust. A string is:
+//!
+//! - Enclosed by double quotes
+//! - Can contain any raw unescaped code point besides \ and "
+//! - Matches the following escape sequences: \b, \f, \n, \r, \t, \", \\, \/
+//! - Matches code points like Rust: \u{XXXX}, where XXXX can be up to 6
+//!   hex characters
+//! - an escape followed by whitespace consumes all whitespace between the
+//!   escape and the next non-whitespace character
+
+use nom::branch::alt;
+use nom::bytes::streaming::{is_not, take_while_m_n};
+use nom::character::streaming::{char, multispace1};
+use nom::combinator::{map, map_opt, map_res, value, verify};
+use nom::error::{FromExternalError, ParseError};
+use nom::multi::fold_many0;
+use nom::sequence::{delimited, preceded, tuple};
+use nom::IResult;
+
+// parser combinators are constructed from the bottom up:
+// first we write parsers for the smallest elements (escaped characters),
+// then combine them into larger parsers.
+
+/// Parse a unicode sequence, of the form u{XXXX}, where XXXX is 1 to 6
+/// hexadecimal numerals. We will combine this later with parse_escaped_char
+/// to parse sequences like \u{00AC}.
+fn parse_unicode<'a, E>(input: &'a str) -> IResult<&'a str, char, E>
+where
+    E: ParseError<&'a str> + FromExternalError<&'a str, std::num::ParseIntError>,
+{
+    // `take_while_m_n` parses between `m` and `n` bytes (inclusive) that match
+    // a predicate. `parse_hex` here parses between 1 and 6 hexadecimal numerals.
+    let parse_hex = take_while_m_n(1, 6, |c: char| c.is_ascii_hexdigit());
+
+    // `preceded` takes a prefix parser, and if it succeeds, returns the result
+    // of the body parser. In this case, it parses u{XXXX}.
+    let parse_delimited_hex = preceded(
+        char('u'),
+        // `delimited` is like `preceded`, but it parses both a prefix and a suffix.
+        // It returns the result of the middle parser. In this case, it parses
+        // {XXXX}, where XXXX is 1 to 6 hex numerals, and returns XXXX
+        delimited(char('{'), parse_hex, char('}')),
+    );
+
+    // `map_res` takes the result of a parser and applies a function that returns
+    // a Result. In this case we take the hex bytes from parse_hex and attempt to
+    // convert them to a u32.
+    let parse_u32 = map_res(parse_delimited_hex, move |hex| u32::from_str_radix(hex, 16));
+
+    // map_opt is like map_res, but it takes an Option instead of a Result. If
+    // the function returns None, map_opt returns an error. In this case, because
+    // not all u32 values are valid unicode code points, we have to fallibly
+    // convert to char with from_u32.
+    map_opt(parse_u32, |value| std::char::from_u32(value))(input)
+}
+
+/// Parse a hex sequence, of the form xXX, where XX is 2 hexadecimal numerals.
+/// We will combine this later with parse_escaped_char
+/// to parse sequences like \x1b.
+fn parse_hex<'a, E>(input: &'a str) -> IResult<&'a str, char, E>
+where
+    E: ParseError<&'a str> + FromExternalError<&'a str, std::num::ParseIntError>,
+{
+    map_res(
+        tuple((
+            char('x'),
+            // `take_while_m_n` parses between `m` and `n` bytes (inclusive) that match
+            // a predicate. `parse_hex` here parses 2 hexadecimal numerals.
+            take_while_m_n(2, 2, |c: char| c.is_ascii_hexdigit()),
+        )),
+        |(_, hex)| {
+            let value = u8::from_str_radix(hex, 16)?;
+            Ok(value as char)
+        },
+    )(input)
+}
+
+/// Parse a oct sequence, of the form 0XX, where XX is 2 octal numerals.
+/// We will combine this later with parse_escaped_char
+/// to parse sequences like \033.
+fn parse_oct<'a, E>(input: &'a str) -> IResult<&'a str, char, E>
+where
+    E: ParseError<&'a str> + FromExternalError<&'a str, std::num::ParseIntError>,
+{
+    map_res(
+        tuple((
+            char('0'),
+            // `take_while_m_n` parses between `m` and `n` bytes (inclusive) that match
+            // a predicate. `parse_hex` here parses 2 hexadecimal numerals.
+            take_while_m_n(2, 2, |c: char| c.is_ascii_hexdigit()),
+        )),
+        |(_, hex)| {
+            let value = u8::from_str_radix(hex, 8)?;
+            Ok(value as char)
+        },
+    )(input)
+}
+
+/// Parse an escaped character: \n, \t, \r, \u{00AC}, etc.
+fn parse_escaped_char<'a, E>(input: &'a str) -> IResult<&'a str, char, E>
+where
+    E: ParseError<&'a str> + FromExternalError<&'a str, std::num::ParseIntError>,
+{
+    preceded(
+        char('\\'),
+        // `alt` tries each parser in sequence, returning the result of
+        // the first successful match
+        alt((
+            parse_unicode,
+            parse_hex,
+            parse_oct,
+            // The `value` parser returns a fixed value (the first argument) if its
+            // parser (the second argument) succeeds. In these cases, it looks for
+            // the marker characters (n, r, t, etc) and returns the matching
+            // character (\n, \r, \t, etc).
+            value('\n', char('n')),
+            value('\r', char('r')),
+            value('\t', char('t')),
+            value('\u{08}', char('b')),
+            value('\u{0C}', char('f')),
+            value('\\', char('\\')),
+            value('/', char('/')),
+            value('"', char('"')),
+        )),
+    )(input)
+}
+
+/// Parse a backslash, followed by any amount of whitespace. This is used later
+/// to discard any escaped whitespace.
+fn parse_escaped_whitespace<'a, E: ParseError<&'a str>>(
+    input: &'a str,
+) -> IResult<&'a str, &'a str, E> {
+    preceded(char('\\'), multispace1)(input)
+}
+
+/// Parse a non-empty block of text that doesn't include \ or "
+fn parse_literal<'a, E: ParseError<&'a str>>(input: &'a str) -> IResult<&'a str, &'a str, E> {
+    // `is_not` parses a string of 0 or more characters that aren't one of the
+    // given characters.
+    let not_quote_slash = is_not("\"\\");
+
+    // `verify` runs a parser, then runs a verification function on the output of
+    // the parser. The verification function accepts out output only if it
+    // returns true. In this case, we want to ensure that the output of is_not
+    // is non-empty.
+    verify(not_quote_slash, |s: &str| !s.is_empty())(input)
+}
+
+/// A string fragment contains a fragment of a string being parsed: either
+/// a non-empty Literal (a series of non-escaped characters), a single
+/// parsed escaped character, or a block of escaped whitespace.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum StringFragment<'a> {
+    Literal(&'a str),
+    EscapedChar(char),
+    EscapedWS,
+}
+
+/// Combine parse_literal, parse_escaped_whitespace, and parse_escaped_char
+/// into a StringFragment.
+fn parse_fragment<'a, E>(input: &'a str) -> IResult<&'a str, StringFragment<'a>, E>
+where
+    E: ParseError<&'a str> + FromExternalError<&'a str, std::num::ParseIntError>,
+{
+    alt((
+        // The `map` combinator runs a parser, then applies a function to the output
+        // of that parser.
+        map(parse_literal, StringFragment::Literal),
+        map(parse_escaped_char, StringFragment::EscapedChar),
+        value(StringFragment::EscapedWS, parse_escaped_whitespace),
+    ))(input)
+}
+
+/// Parse a string. Use a loop of parse_fragment and push all of the fragments
+/// into an output string.
+pub(crate) fn parse_string<'a, E>(input: &'a str) -> IResult<&'a str, String, E>
+where
+    E: ParseError<&'a str> + FromExternalError<&'a str, std::num::ParseIntError>,
+{
+    // fold_many0 is the equivalent of iterator::fold. It runs a parser in a loop,
+    // and for each output value, calls a folding function on each output value.
+    let build_string = fold_many0(
+        // Our parser function– parses a single string fragment
+        parse_fragment,
+        // Our init value, an empty string
+        String::new,
+        // Our folding function. For each fragment, append the fragment to the
+        // string.
+        |mut string, fragment| {
+            match fragment {
+                StringFragment::Literal(s) => string.push_str(s),
+                StringFragment::EscapedChar(c) => string.push(c),
+                StringFragment::EscapedWS => {}
+            }
+            string
+        },
+    );
+
+    // Finally, parse the string. Note that, if `build_string` could accept a raw
+    // " character, the closing delimiter " would never match. When using
+    // `delimited` with a looping parser (like fold_many0), be sure that the
+    // loop won't accidentally match your closing delimiter!
+    delimited(char('"'), build_string, char('"'))(input)
+}
+#[cfg(test)]
+mod tests {
+    use super::parse_string;
+    use nom::error::VerboseError;
+
+    #[test]
+    fn test_parse_string() {
+        let input = r#""Hello, world!""#;
+        let expected_output = Ok(("", "Hello, world!".to_string()));
+        assert_eq!(parse_string::<VerboseError<&str>>(input), expected_output);
+    }
+    #[test]
+    fn test_parse_escaped() {
+        let input = r#""Hello, \"world\"!""#;
+        let expected_output = Ok(("", "Hello, \"world\"!".to_string()));
+        assert_eq!(parse_string::<VerboseError<&str>>(input), expected_output);
+    }
+    #[test]
+    fn test_parse_escaped_x1b() {
+        let input = r#""echo \"\x1b""#;
+        let expected_output = Ok(("", "echo \"\x1b".to_string()));
+        assert_eq!(parse_string::<VerboseError<&str>>(input), expected_output);
+    }
+    #[test]
+    fn test_parse_escaped_033() {
+        let input = r#""echo \"\033""#;
+        let expected_output = Ok(("", "echo \"\x1b".to_string()));
+        assert_eq!(parse_string::<VerboseError<&str>>(input), expected_output);
+    }
+}
diff --git a/crates/android_bp/src/test_db.tar.xz b/crates/android_bp/src/test_db.tar.xz
new file mode 100644
index 0000000..6cd1575
--- /dev/null
+++ b/crates/android_bp/src/test_db.tar.xz
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:faa09385f32d8b3e4518c14384b52ef94f6430da5a838307a2e0af2940b89608
+size 1803816
diff --git a/crates/android_bp/src/tests.rs b/crates/android_bp/src/tests.rs
new file mode 100644
index 0000000..82c4873
--- /dev/null
+++ b/crates/android_bp/src/tests.rs
@@ -0,0 +1,393 @@
+#[cfg(test)]
+mod tests {
+
+    use std::io::Read;
+
+    use crate::parser::*;
+    use nom::error::VerboseError;
+    use nom::Err;
+
+    #[test]
+    fn test_parse_array() {
+        // Test case 1: Valid input
+        let input = r#"[ "value1", "value2", "value3" ]"#;
+        let expected_output = Ok(("", vec!["value1".into(), "value2".into(), "value3".into()]));
+        assert_eq!(parse_array(input), expected_output);
+
+        // Test case 2: Empty array
+        let input = r#"[]"#;
+        let expected_output = Ok(("", vec![]));
+        assert_eq!(parse_array(input), expected_output);
+
+        // Test case 3: Array with whitespace
+        let input = r#"[ "value1" , "value2" , "value3" ]"#;
+        let expected_output = Ok(("", vec!["value1".into(), "value2".into(), "value3".into()]));
+        assert_eq!(parse_array(input), expected_output);
+
+        // Test case 4: Array with empty values
+        let input = r#"[ "", "", "" ]"#;
+        let expected_output = Ok(("", vec!["".into(), "".into(), "".into()]));
+        assert_eq!(parse_array(input), expected_output);
+
+        // Test case 5: Invalid input - missing closing bracket
+        let input = r#"[ "value1", "value2", "value3""#;
+        assert!(parse_array(input).is_err());
+
+        // Test case 5: Invalid input - missing opening bracket
+        let input = r#""value1", "value2", "value3" ]"#;
+        assert!(parse_array(input).is_err());
+
+        // Test case 6: Array with trailing comma is not an error
+        let input = r#"[ "value1", "value2", "value3", ]"#;
+        let expected_output = Ok(("", vec!["value1".into(), "value2".into(), "value3".into()]));
+        assert_eq!(parse_array(input), expected_output);
+    }
+    #[test]
+    fn test_parse_entry() {
+        // Test case 1: Valid input
+        let input = r#"key: "value""#;
+        let expected_output = Ok(("", ("key".to_string(), Value::String("value".to_string()))));
+        assert_eq!(parse_module_entry(input), expected_output);
+
+        // Test case 2: Valid input with whitespace
+        let input = r#"  key  :   "value"  "#;
+        let expected_output = Ok(("", ("key".to_string(), Value::String("value".to_string()))));
+        assert_eq!(parse_module_entry(input), expected_output);
+
+        // Test case 3: Valid input with array value
+        let input = r#"key: [ "value1", "value2", "value3" ]"#;
+        let expected_output = Ok((
+            "",
+            (
+                "key".to_string(),
+                Value::Array(vec!["value1".into(), "value2".into(), "value3".into()]),
+            ),
+        ));
+        assert_eq!(parse_module_entry(input), expected_output);
+
+        // Test case 4: Invalid input - missing colon
+        let input = r#"key "value""#;
+        assert!(parse_module_entry(input).is_err());
+
+        // Test case 5: Invalid input - missing value
+        let input = r#"key:"#;
+        assert!(parse_module_entry(input).is_err());
+
+        // Test case 6: Invalid input - missing key
+        let input = r#":"value""#;
+        assert!(parse_module_entry(input).is_err());
+
+        // Test case 7: Invalid input - missing key and value
+        let input = r#":"#;
+        assert!(parse_module_entry(input).is_err());
+    }
+    #[test]
+    fn test_parse_module() {
+        let input = r#"
+            module_name {
+                key1: "value1",
+                key2: true,
+                key3: [ "value2", "value3" ],
+            }
+        "#;
+
+        let expected_output = Module {
+            typ: "module_name".to_string(),
+            entries: vec![
+                ("key1".to_string(), Value::String("value1".to_string())),
+                ("key2".to_string(), Value::Boolean(true)),
+                (
+                    "key3".to_string(),
+                    Value::Array(vec!["value2".into(), "value3".into()]),
+                ),
+            ]
+            .into_iter()
+            .collect(),
+        };
+
+        assert_eq!(parse_module(input), Ok(("", expected_output)));
+    }
+    #[test]
+    fn test_parse_blueprint() {
+        let input = r#"
+            module_name {
+                key1: "value1",
+                key2: true,
+                key3: [ "value2", "value3" ],
+            }
+            module_name2 {
+                key1: "value1",
+                key2: true,
+                key3: [ "value2", "value3" ],
+            }"#;
+        let output = BluePrint::parse(input).unwrap();
+        assert_eq!(output.modules.len(), 2);
+        assert_eq!(output.modules[0].typ, "module_name");
+        assert_eq!(output.modules[1].typ, "module_name2");
+        let mut keys = output.modules[0]
+            .entries
+            .keys()
+            .map(|x| x.to_owned())
+            .collect::<Vec<_>>();
+        keys.sort();
+        assert_eq!(
+            keys,
+            vec!["key1".to_string(), "key2".to_string(), "key3".to_string()]
+        );
+    }
+    #[test]
+    fn test_nested_dict() {
+        let input = r#"
+        rust_test_host {
+            name: "ss",
+            srcs: ["src/ss.rs"],
+            test_options: {
+                unit_test: true,
+            },
+        }        "#;
+        let output: Result<(&str, Module), Err<VerboseError<&str>>> = parse_module(input);
+        assert!(output.is_ok());
+    }
+
+    #[test]
+    fn test_comment() {
+        let input = r#"
+        rust_test_host {
+        //     name: "ss",
+        //
+        srcs: ["src/ss.rs"],
+        test_options: {
+            unit_test: true,
+        },
+        }        "#;
+        let output = parse_module(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+    }
+    #[test]
+    fn test_all_comment() {
+        let input = r#"/*
+        rust_test_host {
+        //     name: "ss",
+        //
+        srcs: ["src/ss.rs"],
+        test_options: {
+            unit_test: true,
+        },
+        }        
+        */"#;
+        let output = BluePrint::parse(input);
+        if output.is_err() {
+            println!("Error: {}", output.unwrap_err());
+            panic!("Error in parsing");
+        }
+    }
+
+    #[test]
+    fn test_issue_1() {
+        let input = r#"
+        aidl_interface {
+            name: "android.hardware.tetheroffload",
+            vendor_available: true,
+            srcs: ["android/hardware/tetheroffload/*.aidl"],
+            stability: "vintf",
+            backend: {
+                cpp: {
+                    enabled: false,
+                },
+                java: {
+                    sdk_version: "module_current",
+                    apex_available: [
+                        "com.android.tethering",
+                    ],
+                    min_sdk_version: "30",
+                    enabled: true,
+                },
+                ndk: {
+                    apps_enabled: false,
+                },
+            },
+            versions_with_info: [
+                {
+                    version: "1",
+                    imports: [],
+                },
+            ],
+            frozen: true,
+        
+        }
+        "#;
+        let output = parse_module(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+    }
+    #[test]
+    fn test_issue_2() {
+        let input = r#"
+        aidl_interface {
+            name: "android.hardw\"are.tetheroffload",
+        }
+        "#;
+        let output = parse_module(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+    }
+    #[test]
+    fn test_module_second_form() {
+        let input = r#"
+        aidl_interface(name = "android.hardware.tetheroffload")
+        "#;
+        let output = parse_module(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+    }
+    fn display_error<T>(input: &str, output: &Result<(&str, T), Err<VerboseError<&str>>>) -> () {
+        if let Err(e) = output {
+            println!("Error: {}", format_err(input, e.clone()));
+        }
+    }
+    #[test]
+    fn test_expr() {
+        let input = r#""abc" + "def""#;
+        let output = parse_expr(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+        assert!(output.as_ref().unwrap().0.is_empty());
+        assert!(output.unwrap().1 == Value::String("abcdef".to_string()));
+    }
+    #[test]
+    fn test_expr_array() {
+        let input = r#"["abc", "def"] + [ "ghi" ]"#;
+        let output = parse_expr(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+        assert!(output.as_ref().unwrap().0.is_empty());
+        assert!(output.unwrap().1 == Value::Array(vec!["abc".into(), "def".into(), "ghi".into()]));
+    }
+    #[test]
+    fn test_expr_ident() {
+        let input = r#"ident + [ "ghi" ]"#;
+        let output = parse_expr(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+        assert!(output.as_ref().unwrap().0.is_empty());
+        assert!(
+            output.unwrap().1
+                == Value::ConcatExpr([
+                    Value::Ident("ident".to_string()),
+                    Value::Array(["ghi".into()].into())
+                ].into())
+        );
+    }
+    #[test]
+    fn test_expr_value() {
+        let input = r#"123"#;
+        let output = parse_expr(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+        assert!(output.as_ref().unwrap().0.is_empty());
+        assert!(
+            output.unwrap().1
+                == Value::Integer(123));
+    }
+    // found in platform_testing/tests/health/scenarios/tests/Android.bp
+    #[test]
+    fn test_complicated_concat() {
+        let input = r#""out_dir=$$(dirname $(out)) && assets_dir=\"assets\" " +
+        "&& mkdir -p $$out_dir/$$assets_dir && src_protos=($(locations assets/*.textpb)) " +
+        "&& for file in $${src_protos[@]} ; do fname=$$(basename $$file) " +
+        "&& if ! ($(location aprotoc) --encode=longevity.profile.Configuration " +
+        "$(location :profile-proto-def) < $$file > " +
+        "$$out_dir/$$assets_dir/$${fname//.textpb/.pb}) ; then " +
+        "echo \"\x1b[0;31mFailed to parse profile $$file. See above for errors.\x1b[0m\" " +
+        "&& exit 1 ; fi ; done && jar cf $(out) -C $$(dirname $(out)) $$assets_dir""#;
+        let output = parse_expr(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+        assert!(output.as_ref().unwrap().0.is_empty());
+
+    }
+    #[test]
+    fn test_linecomment_wo_eol() {
+        let input = r#"// foo"#;
+        let output = BluePrint::parse(input);
+        assert!(output.is_ok());
+
+    }
+    #[test]
+    fn test_defines_extends(){
+        let input = r#"
+        var = ["a", "b"]
+        var2 = 12
+        var += ["c"]
+        var2 += 1
+        var3 = "abc"
+        var3 += "def"
+        "#;
+        let output = BluePrint::parse(input);
+        assert!(output.is_ok());
+        let bp = output.unwrap();
+        assert_eq!(bp.variables.get("var").unwrap(), &Value::Array(vec!["a".into(), "b".into(), "c".into()]));
+        assert_eq!(bp.variables.get("var2").unwrap(), &Value::Integer(13));
+        assert_eq!(bp.variables.get("var3").unwrap(), &Value::String("abcdef".to_string()));
+    }
+
+    #[test]
+    fn test_defines_extends_error(){
+        let input = r#"
+        var = ["a", "b"]
+        var2 = 12
+        var += 1
+        var2 += "a"
+        "#;
+        let output = BluePrint::parse(input);
+        println!("Error: {}", output.unwrap_err());
+        // assert!(output.is_err());
+    }
+    #[test]
+    fn test_function() {
+        let input = r#"method("ss")"#;
+        let output = parse_expr(input);
+        display_error(input, &output);
+        assert!(output.is_ok());
+        assert_eq!(output.unwrap().1, Value::Function(Function {
+            name: "method".to_string(),
+            args: vec![Value::String("ss".to_string())]
+        }));
+
+    }
+    #[test]
+    fn test_aosp_db() {
+        // generate tarball from aosp tree
+        // fd -g Android.bp | tar cJf ../rs-bp/src/test_db.tar.xz -T -
+        let data = include_bytes!("test_db.tar.xz");
+        let mut archive = tar::Archive::new(liblzma::read::XzDecoder::new(&data[..]));
+        let mut count = 0;
+        let mut bytes = 0;
+        let mut num_errors = 0;
+        let mut all_bp = Vec::new();
+        // first decompress in memory to avoid disk IO for measuring performance
+        for entry in archive.entries().unwrap() {
+            let entry = entry.unwrap();
+            let mut entry_data = std::io::BufReader::new(entry);
+            let mut contents = String::new();
+            entry_data.read_to_string(&mut contents).unwrap();
+            bytes += contents.len();
+            all_bp.push((format!("{:?}", entry_data.into_inner().path().unwrap()), contents));
+        }
+        let now = std::time::Instant::now();
+        for (path, contents) in all_bp {
+            let output = BluePrint::parse(&contents);
+            if output.is_err() {
+                println!("Error for file: {:?}", path);
+                println!("File content: {}", contents);
+                println!("Error: {}", output.unwrap_err());
+                num_errors += 1;
+            }
+            count += 1;
+        }
+        let elapsed = now.elapsed().as_secs_f32();
+        println!("{} files ({} bytes) parsed in {:.3}s {}MB/s", count, bytes, elapsed, bytes as f32 / elapsed / 1024.0 / 1024.0);
+        assert_eq!(num_errors, 0);
+    }
+}
diff --git a/crates/android_bp/src/utils.rs b/crates/android_bp/src/utils.rs
new file mode 100644
index 0000000..ebedd30
--- /dev/null
+++ b/crates/android_bp/src/utils.rs
@@ -0,0 +1,107 @@
+use nom::{
+    branch::alt,
+    bytes::complete::{tag, take_until, take_while},
+    character::complete::{alpha1, alphanumeric1, digit1, multispace1},
+    combinator::{map, map_res, opt, recognize, value},
+    error::{context, VerboseError},
+    multi::{many0, many0_count, many1},
+    sequence::{delimited, pair, tuple},
+    IResult, Parser,
+};
+use crate::string::parse_string;
+
+/// Result type with verbose error
+pub(crate) type VerboseResult<'a, T> = IResult<&'a str, T, VerboseError<&'a str>>;
+
+pub(crate) fn comment(input: &str) -> VerboseResult<()> {
+    context(
+        "comment",
+        value(
+            (),
+            tuple((tag("//"), take_while(|c|c!='\n'))),
+        ),
+    )(input)
+}
+
+pub(crate) fn multiline_comment(input: &str) -> VerboseResult<()> {
+    context(
+        "multiline comment",
+        value((), delimited(tag("/*"), take_until("*/"), tag("*/"))),
+    )(input)
+}
+
+pub(crate) fn space_or_comments(input: &str) -> VerboseResult<()> {
+    value(
+        (),
+        many0(alt((value((), multispace1), comment, multiline_comment))),
+    )(input)
+}
+pub(crate) fn space_or_comments1(input: &str) -> VerboseResult<()> {
+    value(
+        (),
+        many1(alt((value((), multispace1), comment, multiline_comment))),
+    )(input)
+}
+
+pub(crate)fn ws<'a, F, O>(inner: F) -> impl Parser<&'a str, O, VerboseError<&'a str>>
+    where
+    F: Parser<&'a str, O, VerboseError<&'a str>>,
+{
+    delimited(
+        space_or_comments,
+        inner,
+        space_or_comments
+    )
+}
+pub(crate) fn identifier(input: &str) -> VerboseResult<&str> {
+    recognize(pair(
+        alt((alpha1, tag("_"))),
+        many0_count(alt((alphanumeric1, tag("_")))),
+    ))(input)
+}
+
+pub(crate) fn string_literal(input: &str) -> VerboseResult<String> {
+    context(
+        "string",
+        parse_string
+    )(input)
+}
+
+pub(crate) fn comma(input: &str) -> VerboseResult<&str> {
+    ws(tag(",")).parse(input)
+}
+
+pub(crate) fn parse_bool(input: &str) -> VerboseResult<bool> {
+    alt((map(tag("true"), |_| true), map(tag("false"), |_| false)))(input)
+}
+
+pub(crate) fn parse_int(input: &str) -> VerboseResult<i64> {
+    map_res(
+        recognize(pair(opt(tag("-")), digit1)),
+        |x| i64::from_str_radix(x, 10),
+    )(input)
+}
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_parse_bool() {
+        let input = "true";
+        let expected_output = Ok(("", true));
+        assert_eq!(parse_bool(input), expected_output);
+    }
+    #[test]
+    fn test_parse_int() {
+        let input = "123";
+        let expected_output = Ok(("", 123));
+        assert_eq!(parse_int(input), expected_output);
+    }
+    #[test]
+    fn test_parse_nint() {
+        let input: &str = "-123";
+        let expected_output = Ok(("", -123));
+        assert_eq!(parse_int(input), expected_output);
+    }
+}
+
diff --git a/crates/drm-ffi/.android-checksum.json b/crates/drm-ffi/.android-checksum.json
index 599e7d2..521c67a 100644
--- a/crates/drm-ffi/.android-checksum.json
+++ b/crates/drm-ffi/.android-checksum.json
@@ -1 +1 @@
-{"package":null,"files":{".cargo-checksum.json":"a1a12763d844e46ffef777f5b83fe35a06bb13bf1a1a6ade5ffbd3444f44df9b","Android.bp":"028dbe76be9cce9f94799e465ca5fa8624dd833d1290a2b8565468d3b69323ad","Cargo.toml":"7b33decfdf0874ee3826b7e351e5d8c27291f3f08d406a07178d3f0ddfed8916","LICENSE":"38620a3cfaeec97a9197e8c39e436ea7f0bc86699b1f1c35f1aa41785b6d4eac","METADATA":"77158c13fc92f4dbfb313b228ea22060f2ae5b5d2b87689b5ca69075b91dc797","MODULE_LICENSE_MIT":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","cargo_embargo.json":"aa45a963da01d3f018be316cd5b7646a5b413ce2611c5218f2914d2e8a9efd0e","src/gem.rs":"a6b414ac00651970e46031626dce021182f9f9bb9a3a10646410de306ad0939e","src/ioctl.rs":"09c6226e1644f17147e2be052ca8597b5ccc52918a2b76512fbbc79dcc468cb0","src/lib.rs":"06d99efdecd912957491411fc72f7f52c5e05dfb055a111f17bf833e69514067","src/mode.rs":"7b3c0af41a1e4a87207f392ff9fba06fe4d2f47daa40c3c6c5647d71df1f8ad2","src/syncobj.rs":"f676f0ab659e71dfe9d5bb1010c91a6a2d48fabcca0118a54e275c534ac418af","src/utils.rs":"3770cbcbe656e828d3e6cb2673a83396d744498f5a9ff2975126eee70f2b3d08"}}
\ No newline at end of file
+{"package":null,"files":{".cargo-checksum.json":"7bdc2ccd0ecd95a020630341e64f01502ede18371e808efa9a755d22885eb186","Android.bp":"9248c7e06c43d3c3d2c92741a0e3e7add3b647b964a0d384a4f371a941d3b3bf","Cargo.toml":"440e17072ab09e910da4b8d510c1a9cb55924d14ec1d24f468f4912a8d5bcd5d","LICENSE":"38620a3cfaeec97a9197e8c39e436ea7f0bc86699b1f1c35f1aa41785b6d4eac","METADATA":"f67e2294b8d77a5eddd99302604a87df0ec514266ab2c80a24cf68ea4929fa15","MODULE_LICENSE_MIT":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","cargo_embargo.json":"aa45a963da01d3f018be316cd5b7646a5b413ce2611c5218f2914d2e8a9efd0e","src/gem.rs":"a6b414ac00651970e46031626dce021182f9f9bb9a3a10646410de306ad0939e","src/ioctl.rs":"09c6226e1644f17147e2be052ca8597b5ccc52918a2b76512fbbc79dcc468cb0","src/lib.rs":"97bd32903a1a71eb8cf996a44520eb246951b6e633c46e42467d271a90df99ac","src/mode.rs":"868793162bcf14ba04ff945afde81776a1f764e85ffe2dcbc10f39e3724417a5","src/syncobj.rs":"f676f0ab659e71dfe9d5bb1010c91a6a2d48fabcca0118a54e275c534ac418af","src/utils.rs":"305285d984cb58180b481546319d2c4714b6a9769897a2a522b5b725ddfe435a"}}
\ No newline at end of file
diff --git a/crates/drm-ffi/.cargo-checksum.json b/crates/drm-ffi/.cargo-checksum.json
index 97cf1cf..bb9adf5 100644
--- a/crates/drm-ffi/.cargo-checksum.json
+++ b/crates/drm-ffi/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.toml":"1439b5e87cdc3749f53871374eb4f764d648bb086e0a60950e44a92d71fdf82f","LICENSE":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","src/gem.rs":"d72e891d65a984a6e8d6f94c8064db16fa357de818a0a9663a9afd8b0a142123","src/ioctl.rs":"81ca595aec29f159b9b27829a15bf118ca1594828a975af3efb8dbdbaed5f279","src/lib.rs":"530fe681491369d541abef623be3486d974f890d9afb6379eeffeaf6b488edb4","src/mode.rs":"7030f036abf987a0b72164a28f7d62109df2823d1fcb7f13b4f656b993f9c35a","src/syncobj.rs":"01a78f587456f6ea2d92c01e7ab7bc146ec9857349185fd5c7e035a5f8bc53e9","src/utils.rs":"229b8dc510e459241f373e543a22ab4102b0426bfa3d0c3db4e4cd590dc0aa11"},"package":"97c98727e48b7ccb4f4aea8cfe881e5b07f702d17b7875991881b41af7278d53"}
\ No newline at end of file
+{"files":{"Cargo.toml":"37194494d53ff287a2559c39cd16a33d9bc8e0a36d9acc771e95abdd226fd68b","LICENSE":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","src/gem.rs":"d72e891d65a984a6e8d6f94c8064db16fa357de818a0a9663a9afd8b0a142123","src/ioctl.rs":"81ca595aec29f159b9b27829a15bf118ca1594828a975af3efb8dbdbaed5f279","src/lib.rs":"df8e259cd8fc9861bd9ec3554236ead3b3c47122336547053bba88393bc461d2","src/mode.rs":"d6e12a30a18082e49f005f528d9fd0e0acf52ab4afdd5a1446787d292e1b2d06","src/syncobj.rs":"01a78f587456f6ea2d92c01e7ab7bc146ec9857349185fd5c7e035a5f8bc53e9","src/utils.rs":"9064848a7ff818785c789c78ca7edb06484fec700b63fb82e9e3258fae4f64e2"},"package":"d8e41459d99a9b529845f6d2c909eb9adf3b6d2f82635ae40be8de0601726e8b"}
\ No newline at end of file
diff --git a/crates/drm-ffi/Android.bp b/crates/drm-ffi/Android.bp
index 6851ce2..03a5509 100644
--- a/crates/drm-ffi/Android.bp
+++ b/crates/drm-ffi/Android.bp
@@ -18,7 +18,7 @@
     host_supported: true,
     crate_name: "drm_ffi",
     cargo_env_compat: true,
-    cargo_pkg_version: "0.8.0",
+    cargo_pkg_version: "0.9.0",
     crate_root: "src/lib.rs",
     edition: "2021",
     rustlibs: [
diff --git a/crates/drm-ffi/Cargo.toml b/crates/drm-ffi/Cargo.toml
index 455c416..20baf09 100644
--- a/crates/drm-ffi/Cargo.toml
+++ b/crates/drm-ffi/Cargo.toml
@@ -11,16 +11,26 @@
 
 [package]
 edition = "2021"
-rust-version = "1.65"
+rust-version = "1.66"
 name = "drm-ffi"
-version = "0.8.0"
+version = "0.9.0"
 authors = ["Tyler Slabinski <[email protected]>"]
+build = false
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
 description = "Safe, low-level bindings to the Direct Rendering Manager API"
+readme = false
 license = "MIT"
 repository = "https://github.com/Smithay/drm-rs"
 
+[lib]
+name = "drm_ffi"
+path = "src/lib.rs"
+
 [dependencies.drm-sys]
-version = "0.7.0"
+version = "0.8.0"
 
 [dependencies.rustix]
 version = "0.38.22"
diff --git a/crates/drm-ffi/METADATA b/crates/drm-ffi/METADATA
index 75981de..46ddec9 100644
--- a/crates/drm-ffi/METADATA
+++ b/crates/drm-ffi/METADATA
@@ -1,17 +1,17 @@
 name: "drm-ffi"
 description: "Safe, low-level bindings to the Direct Rendering Manager API"
 third_party {
-  version: "0.8.0"
+  version: "0.9.0"
   license_type: NOTICE
   last_upgrade_date {
-    year: 2024
-    month: 8
-    day: 8
+    year: 2025
+    month: 1
+    day: 22
   }
   homepage: "https://crates.io/crates/drm-ffi"
   identifier {
     type: "Archive"
-    value: "https://static.crates.io/crates/drm-ffi/drm-ffi-0.8.0.crate"
-    version: "0.8.0"
+    value: "https://static.crates.io/crates/drm-ffi/drm-ffi-0.9.0.crate"
+    version: "0.9.0"
   }
 }
diff --git a/crates/drm-ffi/src/lib.rs b/crates/drm-ffi/src/lib.rs
index 4be9614..4565507 100644
--- a/crates/drm-ffi/src/lib.rs
+++ b/crates/drm-ffi/src/lib.rs
@@ -160,9 +160,7 @@
     mut desc_buf: Option<&mut Vec<i8>>,
 ) -> io::Result<drm_version> {
     let mut sizes = drm_version::default();
-    unsafe {
-        ioctl::get_version(fd, &mut sizes)?;
-    }
+    unsafe { ioctl::get_version(fd, &mut sizes) }?;
 
     map_reserve!(name_buf, sizes.name_len as usize);
     map_reserve!(date_buf, sizes.date_len as usize);
@@ -178,9 +176,7 @@
         ..Default::default()
     };
 
-    unsafe {
-        ioctl::get_version(fd, &mut version)?;
-    }
+    unsafe { ioctl::get_version(fd, &mut version) }?;
 
     map_set!(name_buf, version.name_len as usize);
     map_set!(date_buf, version.date_len as usize);
diff --git a/crates/drm-ffi/src/mode.rs b/crates/drm-ffi/src/mode.rs
index 1b30b8f..2fe66d3 100644
--- a/crates/drm-ffi/src/mode.rs
+++ b/crates/drm-ffi/src/mode.rs
@@ -396,7 +396,7 @@
         } else {
             &tmp_mode as *const _ as _
         },
-        count_modes: if force_probe { 0 } else { 1 },
+        count_modes: u32::from(!force_probe),
         ..Default::default()
     };
 
@@ -427,13 +427,7 @@
             prop_values_ptr: map_ptr!(&prop_values),
             count_modes: match &modes {
                 Some(b) => b.capacity() as _,
-                None => {
-                    if force_probe {
-                        0
-                    } else {
-                        1
-                    }
-                }
+                None => u32::from(!force_probe),
             },
             count_props: map_len!(&props),
             count_encoders: map_len!(&encoders),
diff --git a/crates/drm-ffi/src/utils.rs b/crates/drm-ffi/src/utils.rs
index 4055365..8788880 100644
--- a/crates/drm-ffi/src/utils.rs
+++ b/crates/drm-ffi/src/utils.rs
@@ -18,15 +18,30 @@
     };
 }
 
-/// Takes an `Option<&mut Vec<T>>` style buffer and shrinks it.
+/// Takes an `Option<&mut Vec<T>>` style buffer and reserves space.
 macro_rules! map_reserve {
     ($buffer:expr, $size:expr) => {
         match $buffer {
-            Some(ref mut b) => b.reserve_exact($size - b.len()),
+            Some(ref mut b) => crate::utils::map_reserve_inner(b, $size),
             _ => (),
         }
     };
 }
+
+pub(crate) fn map_reserve_inner<T>(b: &mut Vec<T>, size: usize) {
+    let old_len = b.len();
+    if size <= old_len {
+        return;
+    }
+    b.reserve_exact(size - old_len);
+
+    // `memset` to 0, at least so Valgrind doesn't complain
+    unsafe {
+        let ptr = b.as_mut_ptr().add(old_len) as *mut u8;
+        ptr.write_bytes(0, (size - old_len) * std::mem::size_of::<T>());
+    }
+}
+
 /// Takes an `Option<&mut Vec<T>>` style buffer and shrinks it.
 macro_rules! map_set {
     ($buffer:expr, $min:expr) => {
diff --git a/crates/drm/.android-checksum.json b/crates/drm/.android-checksum.json
index 2605a97..6dc6ffc 100644
--- a/crates/drm/.android-checksum.json
+++ b/crates/drm/.android-checksum.json
@@ -1 +1 @@
-{"package":null,"files":{".cargo-checksum.json":"7622227cc70233ba27b110587fa9622f932e4bbf28383cd625de20ce915bc87f","Android.bp":"d39b980ff4467eadeb0b72637cb4cac54807c3f5c8e42fb418d9646a2dcafa2a","Cargo.lock":"2603fc36c65f289b85b27ddd87919f6dd85677ae03c40643bfd0266a70d05498","Cargo.toml":"2c67da58119afcfbc8b575c8d0b83c130df8004ef08ce7c0045fbe3ef78e44ae","LICENSE":"38620a3cfaeec97a9197e8c39e436ea7f0bc86699b1f1c35f1aa41785b6d4eac","METADATA":"142e9ca06ce4c8a24d8e0b55b3c21d307fabb1b9415fdbc9681919872a78537e","MODULE_LICENSE_MIT":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","README.md":"ce8181d5a89663eef91ba286e847aae22ae3a934ec5a211f4d084682f6e111df","cargo_embargo.json":"c998c44184a9f0f60dd35cc33905e5f5363f971e4b251ff70a5212c354d445ca","examples/atomic_modeset.rs":"7ecc010bf0383c2a222832bb4a4276b74f548db8aa6920bd311bf97b951a078e","examples/basic.rs":"7d79b293b7890bf1217ce750fd0c380bdcf1c7ecff215ffbd3dbc3ee22a30321","examples/ffi.rs":"aa3877e27804f805e03f5c045832968871fccfb0c00de1e9094e0432849da5f5","examples/images/1.png":"1ea0debbb1085fe79af82aa0f51fe9e7bd876f68b72d96281ca79791be20c2d9","examples/images/2.png":"4a2921f480988b5b410368df43154e48a8bbaeb7f2956812886021d14319fae9","examples/images/3.png":"705233c323b62344eea668684cf2bc595f05756a8b1cf20ed095f812f132eff2","examples/images/4.png":"873d3289821740df8ef4ed4a0cbd37119f7a5abbb343952e4bfdfb52a0575d3d","examples/kms_interactive.rs":"73fea8a6311b27f01d1ec9b7e51964c803be7e748fbdda3cecd0b141682dfcc1","examples/legacy_modeset.rs":"fd2a88d751ab7e6a3023a0bdc7d44251418aaff89fbbe0d06d263df107b73ba7","examples/list_modes.rs":"10c92dee79ea9c5bf7a967cfdbc268a1a7b1391f3910b63a28de17ed696aca23","examples/properties.rs":"0ee487a01d851b8c70e05c7f5542af93042806816b2791af6070cd938f95aa24","examples/resources.rs":"3f23e8c628dc23be1b355280ce60c9e61379a0b559ec682fe2b7d4e3f9f8d6d7","examples/syncobj.rs":"55dc39a8c80f7d5083817b4f163a0e64cfe4d7da82742c47275d6a3ee215385f","examples/utils/mod.rs":"bca0faf411b133a8d3af541b7205f28419d0f38e7626a7af046378526a0df423","src/buffer/mod.rs":"da74c088967eed89c6829daf4f192139c3a385d13bc36e32e9b13c60aa950b11","src/control/atomic.rs":"0143bdbd1e6921553723df31455c1618c9a4212c50bc83a157af32ad2b371f06","src/control/connector.rs":"47a1218fbef113492f0b63ba170491dc8f0387211dfdf4558d6b4f80974316bd","src/control/crtc.rs":"3a14646b5a3c2bb9e9a1b160e3e4017297f8fd3ab2e0c1f6bda3cf802fde7fc9","src/control/dumbbuffer.rs":"78fc429e203c5e5688c10927b9d395e064fd398c39d8d3433e94329a7642f753","src/control/encoder.rs":"0ab2eb3b367c7703e7cfddb3aac0b5abba4909fcbd9f4ff67a4ff5fc3931ef05","src/control/framebuffer.rs":"b4e90ff1af2561348f3cfaeeacb86f74b985e5747cd6e13e7cb6c6dd064568a2","src/control/mod.rs":"305bf50d9e4d4fa4595fadef1cae804da821b40f79256b489beb83595888dc32","src/control/plane.rs":"5e140c604303606ab880f9ecd724bdf401cd679941884c18d6a83ba8f22164ea","src/control/property.rs":"a8d860241d4f66d894a8f0604477b1f3d7af079e937877f0d48d63d1e5219b4d","src/control/syncobj.rs":"90003a84e9221a3c9576272a40930182535dc729f1186aac5abfb027da3b32bd","src/lib.rs":"b5e78aea7e8d6a562914c2d594faad97a4cd5c9f3cde757b227887ded6df23f2","src/util.rs":"4ead0b258e403c16e55beba6576c395aff9330cb0cbcd58c5721d9a03b35c8e7"}}
\ No newline at end of file
+{"package":null,"files":{".cargo-checksum.json":"3c147ef7e5b0a6cbcc43bffbf89ec8646b3f1a85f11cacd90d24020b40818c0a","Android.bp":"7f6df45fb0d98f3ef5cda689db32a648e5a655ea312644e52240bbb881f38fbb","Cargo.lock":"686da571eb9b352c3febcf4980293ab1b360acb139238e8a906523b16d2728a0","Cargo.toml":"dc49384534378f8a1b7e52b49298e6405b68f3d64d87818eb1794e55169ebf82","LICENSE":"38620a3cfaeec97a9197e8c39e436ea7f0bc86699b1f1c35f1aa41785b6d4eac","METADATA":"3bdf876903885100fe78db06cb1cf04c1d0ed0c169e234c93a452390aa9befe0","MODULE_LICENSE_MIT":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","README.md":"ce8181d5a89663eef91ba286e847aae22ae3a934ec5a211f4d084682f6e111df","cargo_embargo.json":"c998c44184a9f0f60dd35cc33905e5f5363f971e4b251ff70a5212c354d445ca","examples/atomic_modeset.rs":"7ecc010bf0383c2a222832bb4a4276b74f548db8aa6920bd311bf97b951a078e","examples/basic.rs":"7d79b293b7890bf1217ce750fd0c380bdcf1c7ecff215ffbd3dbc3ee22a30321","examples/ffi.rs":"aa3877e27804f805e03f5c045832968871fccfb0c00de1e9094e0432849da5f5","examples/images/1.png":"1ea0debbb1085fe79af82aa0f51fe9e7bd876f68b72d96281ca79791be20c2d9","examples/images/2.png":"4a2921f480988b5b410368df43154e48a8bbaeb7f2956812886021d14319fae9","examples/images/3.png":"705233c323b62344eea668684cf2bc595f05756a8b1cf20ed095f812f132eff2","examples/images/4.png":"873d3289821740df8ef4ed4a0cbd37119f7a5abbb343952e4bfdfb52a0575d3d","examples/kms_interactive.rs":"73fea8a6311b27f01d1ec9b7e51964c803be7e748fbdda3cecd0b141682dfcc1","examples/legacy_modeset.rs":"fd2a88d751ab7e6a3023a0bdc7d44251418aaff89fbbe0d06d263df107b73ba7","examples/list_modes.rs":"10c92dee79ea9c5bf7a967cfdbc268a1a7b1391f3910b63a28de17ed696aca23","examples/properties.rs":"0ee487a01d851b8c70e05c7f5542af93042806816b2791af6070cd938f95aa24","examples/resources.rs":"3f23e8c628dc23be1b355280ce60c9e61379a0b559ec682fe2b7d4e3f9f8d6d7","examples/syncobj.rs":"55dc39a8c80f7d5083817b4f163a0e64cfe4d7da82742c47275d6a3ee215385f","examples/utils/mod.rs":"bca0faf411b133a8d3af541b7205f28419d0f38e7626a7af046378526a0df423","src/buffer/mod.rs":"da74c088967eed89c6829daf4f192139c3a385d13bc36e32e9b13c60aa950b11","src/control/atomic.rs":"0143bdbd1e6921553723df31455c1618c9a4212c50bc83a157af32ad2b371f06","src/control/connector.rs":"47a1218fbef113492f0b63ba170491dc8f0387211dfdf4558d6b4f80974316bd","src/control/crtc.rs":"3a14646b5a3c2bb9e9a1b160e3e4017297f8fd3ab2e0c1f6bda3cf802fde7fc9","src/control/dumbbuffer.rs":"78fc429e203c5e5688c10927b9d395e064fd398c39d8d3433e94329a7642f753","src/control/encoder.rs":"0ab2eb3b367c7703e7cfddb3aac0b5abba4909fcbd9f4ff67a4ff5fc3931ef05","src/control/framebuffer.rs":"b4e90ff1af2561348f3cfaeeacb86f74b985e5747cd6e13e7cb6c6dd064568a2","src/control/mod.rs":"305bf50d9e4d4fa4595fadef1cae804da821b40f79256b489beb83595888dc32","src/control/plane.rs":"5e140c604303606ab880f9ecd724bdf401cd679941884c18d6a83ba8f22164ea","src/control/property.rs":"a8d860241d4f66d894a8f0604477b1f3d7af079e937877f0d48d63d1e5219b4d","src/control/syncobj.rs":"90003a84e9221a3c9576272a40930182535dc729f1186aac5abfb027da3b32bd","src/lib.rs":"899f6574c7f4f9f378b61dcd8a8d8722ac8a4bd1019b1c5f0eebe949d6533ee1","src/node/constants.rs":"31e20faa77b0bb3cb2cb844cb4468260277dce30693b5bd44e2a3b7b86e4d2c9","src/node/mod.rs":"17af9bcfbb397dde2f6a5e129ef30106732a87c49b8c01ff6aa9fbbaee744d9d","src/util.rs":"4ead0b258e403c16e55beba6576c395aff9330cb0cbcd58c5721d9a03b35c8e7"}}
\ No newline at end of file
diff --git a/crates/drm/.cargo-checksum.json b/crates/drm/.cargo-checksum.json
index 296abb6..781b8ed 100644
--- a/crates/drm/.cargo-checksum.json
+++ b/crates/drm/.cargo-checksum.json
@@ -1 +1 @@
-{"files":{"Cargo.lock":"ec20fc41a5ef37bb9b77b00a8d85f914f0df12d7c786f1b03f2889b009b50c61","Cargo.toml":"598dda43b348568b2b241985f22df10849dcb1d53357b0e1ad0dc375bf0d64df","LICENSE":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"10a848728050b068bf4f6671b475aa2e5e046d778a3ebd302280ddec85a8963b","examples/atomic_modeset.rs":"fc9c6a9804946a7fb547309f04b7537ba781e1d65edc8cce4f036f8a9d8e9537","examples/basic.rs":"4503d0430a61440269d6d0c7ca5e710213927a8ba1fa2e361b683df90218c743","examples/ffi.rs":"63d7016f5a565f62195f70094ba7a979fca4b61b40f2adbf86441d3473d7fb9f","examples/images/1.png":"e7f44d5d6c04ff16dad3763f4d279bc5374b0623d4e40d0683bc9f03896f054c","examples/images/2.png":"23b81566fa6b5d1ae205e6b5f67d7cec7a4580d2aff424e2077d3b1e6e5cb479","examples/images/3.png":"1fab7971df22ab6176d032ed5f2c4d7fe480d2e3c19be242d85d156253b4dcad","examples/images/4.png":"e68a20315e0ffcc2d7f124481f69f17a9cb4ecf8642cc6a92ade807c9a351445","examples/kms_interactive.rs":"460dd14ee906c06f4ba11fc7c1c25711f387e5acdecc43b76672b84c0996d86f","examples/legacy_modeset.rs":"dc6f386df8170a7a653319f2b68a136dd72855939b62e44841f429f3d57604dd","examples/list_modes.rs":"5b617cb0b605a13a7c0f1a8750bb41d9c05a681958878ca44b80ddf96b7f3da8","examples/properties.rs":"93f869e30f9416e2e8132d624127cbf11957c5c20dbc0ef5d7f0a70a50c7ccb2","examples/resources.rs":"6f279f8ecf11bbc5ce59b996590ffd1f3b9ae081b63ef47d1b8a77b104e3b8b3","examples/syncobj.rs":"7ce818f6b09cbdcefac727622e3c79dd9b6b03b21a91f968a920ca3fe5d8cab8","examples/utils/mod.rs":"8dc5b662dec331c6c4189f8f9d4b6a0a151fa2340c6b5cb4c6a2723eafb52af7","src/buffer/mod.rs":"1e0a9447eb25164932a871c87d3a643b6221cf26f2e840f2ccda182b4fcf2935","src/control/atomic.rs":"cf5ab004409a95e3f101c0242416945e8d731f95b61994a3a709acb4a97382b1","src/control/connector.rs":"330cc5b395f335f0755ffd0208e7f7c6565bc706990712d718c088afbcc45089","src/control/crtc.rs":"d94bbf07a3fd5edad78978676f57b95ac54a89019ac07afed1b54bdfc7acf25b","src/control/dumbbuffer.rs":"d8e37866e8f21888994af843714e1e2c8586bb925cbccc11e5d09c234a1a618b","src/control/encoder.rs":"60f268a78230441dad6ce6960a6e929b6438c8c47c70a5864e41d7ff48f680b6","src/control/framebuffer.rs":"d119e9eda632cc2ab088f4efd27a289ce286f6abef1d6a910c6669edeef3c53b","src/control/mod.rs":"25a6f6d540642354cf420a9abde4561fc8751ea7902cf1fd76ceb185de94d56e","src/control/plane.rs":"497d285fd428e2e2698f643922704d82b4aa1573925761df624409beaff1424a","src/control/property.rs":"e5a6e3f07cb74f471cfa652cbd4aa1966883d6b3edbe98d10de5775fcf500174","src/control/syncobj.rs":"20bd8d3a864aca4452b316b0c82fb66ea61d9453f3a581d0c5c168cf2e5c56e3","src/lib.rs":"c8b8d3664149dadf2c03b3fbf00e9a855010d17f7088f0e4d8a5a443bcc22f7f","src/util.rs":"47eb207a03b27fad0892026748ff4cd807a75f98102a4fbe7687cf0236583783"},"package":"98888c4bbd601524c11a7ed63f814b8825f420514f78e96f752c437ae9cbb5d1"}
\ No newline at end of file
+{"files":{"Cargo.lock":"84ffc1a31494e9c3d0870e98313d8010b6b980f1d73a4d247a9396634b285f1e","Cargo.toml":"68ef0ded30971014ba4b8f1efbbe792a5f7c20c7f8fb457de943e292a733ca60","LICENSE":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"10a848728050b068bf4f6671b475aa2e5e046d778a3ebd302280ddec85a8963b","examples/atomic_modeset.rs":"fc9c6a9804946a7fb547309f04b7537ba781e1d65edc8cce4f036f8a9d8e9537","examples/basic.rs":"4503d0430a61440269d6d0c7ca5e710213927a8ba1fa2e361b683df90218c743","examples/ffi.rs":"63d7016f5a565f62195f70094ba7a979fca4b61b40f2adbf86441d3473d7fb9f","examples/images/1.png":"e7f44d5d6c04ff16dad3763f4d279bc5374b0623d4e40d0683bc9f03896f054c","examples/images/2.png":"23b81566fa6b5d1ae205e6b5f67d7cec7a4580d2aff424e2077d3b1e6e5cb479","examples/images/3.png":"1fab7971df22ab6176d032ed5f2c4d7fe480d2e3c19be242d85d156253b4dcad","examples/images/4.png":"e68a20315e0ffcc2d7f124481f69f17a9cb4ecf8642cc6a92ade807c9a351445","examples/kms_interactive.rs":"460dd14ee906c06f4ba11fc7c1c25711f387e5acdecc43b76672b84c0996d86f","examples/legacy_modeset.rs":"dc6f386df8170a7a653319f2b68a136dd72855939b62e44841f429f3d57604dd","examples/list_modes.rs":"5b617cb0b605a13a7c0f1a8750bb41d9c05a681958878ca44b80ddf96b7f3da8","examples/properties.rs":"93f869e30f9416e2e8132d624127cbf11957c5c20dbc0ef5d7f0a70a50c7ccb2","examples/resources.rs":"6f279f8ecf11bbc5ce59b996590ffd1f3b9ae081b63ef47d1b8a77b104e3b8b3","examples/syncobj.rs":"7ce818f6b09cbdcefac727622e3c79dd9b6b03b21a91f968a920ca3fe5d8cab8","examples/utils/mod.rs":"8dc5b662dec331c6c4189f8f9d4b6a0a151fa2340c6b5cb4c6a2723eafb52af7","src/buffer/mod.rs":"1e0a9447eb25164932a871c87d3a643b6221cf26f2e840f2ccda182b4fcf2935","src/control/atomic.rs":"cf5ab004409a95e3f101c0242416945e8d731f95b61994a3a709acb4a97382b1","src/control/connector.rs":"330cc5b395f335f0755ffd0208e7f7c6565bc706990712d718c088afbcc45089","src/control/crtc.rs":"d94bbf07a3fd5edad78978676f57b95ac54a89019ac07afed1b54bdfc7acf25b","src/control/dumbbuffer.rs":"d8e37866e8f21888994af843714e1e2c8586bb925cbccc11e5d09c234a1a618b","src/control/encoder.rs":"60f268a78230441dad6ce6960a6e929b6438c8c47c70a5864e41d7ff48f680b6","src/control/framebuffer.rs":"d119e9eda632cc2ab088f4efd27a289ce286f6abef1d6a910c6669edeef3c53b","src/control/mod.rs":"25a6f6d540642354cf420a9abde4561fc8751ea7902cf1fd76ceb185de94d56e","src/control/plane.rs":"497d285fd428e2e2698f643922704d82b4aa1573925761df624409beaff1424a","src/control/property.rs":"e5a6e3f07cb74f471cfa652cbd4aa1966883d6b3edbe98d10de5775fcf500174","src/control/syncobj.rs":"20bd8d3a864aca4452b316b0c82fb66ea61d9453f3a581d0c5c168cf2e5c56e3","src/lib.rs":"3a720d2ecbb0af76e80ae8afb197abcad7b51e97233648ed4e8fe95c232a3479","src/node/constants.rs":"32d75f7d3a1f0773341d7b5c52a4ec693098f286b0d362f91240ed330a6cfd81","src/node/mod.rs":"cde4e38c55d5f2143dcd52f217d9fa2b55c6f31a9cb975a3ee4c17984a3bd1ee","src/util.rs":"47eb207a03b27fad0892026748ff4cd807a75f98102a4fbe7687cf0236583783"},"package":"80bc8c5c6c2941f70a55c15f8d9f00f9710ebda3ffda98075f996a0e6c92756f"}
\ No newline at end of file
diff --git a/crates/drm/Android.bp b/crates/drm/Android.bp
index 6fd5666..724ca91 100644
--- a/crates/drm/Android.bp
+++ b/crates/drm/Android.bp
@@ -18,7 +18,7 @@
     host_supported: true,
     crate_name: "drm",
     cargo_env_compat: true,
-    cargo_pkg_version: "0.12.0",
+    cargo_pkg_version: "0.14.1",
     crate_root: "src/lib.rs",
     edition: "2021",
     rustlibs: [
diff --git a/crates/drm/Cargo.lock b/crates/drm/Cargo.lock
index 34771f9..a01c0da 100644
--- a/crates/drm/Cargo.lock
+++ b/crates/drm/Cargo.lock
@@ -3,10 +3,10 @@
 version = 3
 
 [[package]]
-name = "adler"
-version = "1.0.2"
+name = "adler2"
+version = "2.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
 
 [[package]]
 name = "aho-corasick"
@@ -19,17 +19,17 @@
 
 [[package]]
 name = "autocfg"
-version = "1.2.0"
+version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80"
+checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
 
 [[package]]
 name = "bindgen"
-version = "0.69.4"
+version = "0.69.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0"
+checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "cexpr",
  "clang-sys",
  "itertools",
@@ -54,24 +54,24 @@
 
 [[package]]
 name = "bitflags"
-version = "2.5.0"
+version = "2.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
+checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
 
 [[package]]
 name = "bytemuck"
-version = "1.15.0"
+version = "1.19.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15"
+checksum = "8334215b81e418a0a7bdb8ef0849474f40bb10c8b71f1c4ed315cff49f32494d"
 dependencies = [
  "bytemuck_derive",
 ]
 
 [[package]]
 name = "bytemuck_derive"
-version = "1.6.0"
+version = "1.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4da9a32f3fed317401fa3c862968128267c3106685286e15d5aaa3d7389c2f60"
+checksum = "bcfcc3cd946cb52f0bbfdbbcfa2f4e24f75ebb6c0e1002f7c25904fada18b9ec"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -101,9 +101,9 @@
 
 [[package]]
 name = "clang-sys"
-version = "1.7.0"
+version = "1.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67523a3b4be3ce1989d607a828d036249522dd9c1c8de7f4dd2dae43a37369d1"
+checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
 dependencies = [
  "glob",
  "libc",
@@ -112,9 +112,9 @@
 
 [[package]]
 name = "clipboard-win"
-version = "5.3.1"
+version = "5.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "79f4473f5144e20d9aceaf2972478f06ddf687831eafeeb434fbaf0acc4144ad"
+checksum = "15efe7a882b08f34e38556b14f2fb3daa98769d06c7f0c1b076dfd0d983bc892"
 dependencies = [
  "error-code",
 ]
@@ -127,31 +127,32 @@
 
 [[package]]
 name = "crc32fast"
-version = "1.4.0"
+version = "1.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa"
+checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3"
 dependencies = [
  "cfg-if",
 ]
 
 [[package]]
 name = "drm"
-version = "0.12.0"
+version = "0.14.1"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "bytemuck",
  "drm-ffi",
  "drm-fourcc",
  "image",
+ "libc",
  "rustix",
  "rustyline",
 ]
 
 [[package]]
 name = "drm-ffi"
-version = "0.8.0"
+version = "0.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "97c98727e48b7ccb4f4aea8cfe881e5b07f702d17b7875991881b41af7278d53"
+checksum = "d8e41459d99a9b529845f6d2c909eb9adf3b6d2f82635ae40be8de0601726e8b"
 dependencies = [
  "drm-sys",
  "rustix",
@@ -165,21 +166,21 @@
 
 [[package]]
 name = "drm-sys"
-version = "0.7.0"
+version = "0.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd39dde40b6e196c2e8763f23d119ddb1a8714534bf7d77fa97a65b0feda3986"
+checksum = "bafb66c8dbc944d69e15cfcc661df7e703beffbaec8bd63151368b06c5f9858c"
 dependencies = [
  "bindgen",
  "libc",
- "linux-raw-sys 0.6.4",
+ "linux-raw-sys 0.6.5",
  "pkg-config",
 ]
 
 [[package]]
 name = "either"
-version = "1.11.0"
+version = "1.13.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a47c1c47d2f5964e29c61246e81db715514cd532db6b5116a25ea3c03d6780a2"
+checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
 
 [[package]]
 name = "endian-type"
@@ -189,9 +190,9 @@
 
 [[package]]
 name = "errno"
-version = "0.3.8"
+version = "0.3.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
+checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba"
 dependencies = [
  "libc",
  "windows-sys",
@@ -199,9 +200,9 @@
 
 [[package]]
 name = "error-code"
-version = "3.2.0"
+version = "3.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a0474425d51df81997e2f90a21591180b38eccf27292d755f3e30750225c175b"
+checksum = "a5d9305ccc6942a704f4335694ecd3de2ea531b114ac2d51f5f843750787a92f"
 
 [[package]]
 name = "fd-lock"
@@ -216,18 +217,18 @@
 
 [[package]]
 name = "fdeflate"
-version = "0.3.4"
+version = "0.3.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f9bfee30e4dedf0ab8b422f03af778d9612b63f502710fc500a334ebe2de645"
+checksum = "d8090f921a24b04994d9929e204f50b498a33ea6ba559ffaa05e04f7ee7fb5ab"
 dependencies = [
  "simd-adler32",
 ]
 
 [[package]]
 name = "flate2"
-version = "1.0.28"
+version = "1.0.34"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e"
+checksum = "a1b589b4dc103969ad3cf85c950899926ec64300a1a46d76c03a6072957036f0"
 dependencies = [
  "crc32fast",
  "miniz_oxide",
@@ -272,9 +273,9 @@
 
 [[package]]
 name = "lazy_static"
-version = "1.4.0"
+version = "1.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
 
 [[package]]
 name = "lazycell"
@@ -284,15 +285,15 @@
 
 [[package]]
 name = "libc"
-version = "0.2.153"
+version = "0.2.161"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
+checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1"
 
 [[package]]
 name = "libloading"
-version = "0.8.3"
+version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19"
+checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
 dependencies = [
  "cfg-if",
  "windows-targets",
@@ -300,27 +301,27 @@
 
 [[package]]
 name = "linux-raw-sys"
-version = "0.4.13"
+version = "0.4.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
+checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
 
 [[package]]
 name = "linux-raw-sys"
-version = "0.6.4"
+version = "0.6.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f0b5399f6804fbab912acbd8878ed3532d506b7c951b8f9f164ef90fef39e3f4"
+checksum = "2a385b1be4e5c3e362ad2ffa73c392e53f031eaa5b7d648e64cd87f27f6063d7"
 
 [[package]]
 name = "log"
-version = "0.4.21"
+version = "0.4.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
+checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
 
 [[package]]
 name = "memchr"
-version = "2.7.2"
+version = "2.7.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
+checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
 
 [[package]]
 name = "minimal-lexical"
@@ -330,11 +331,11 @@
 
 [[package]]
 name = "miniz_oxide"
-version = "0.7.2"
+version = "0.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7"
+checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1"
 dependencies = [
- "adler",
+ "adler2",
  "simd-adler32",
 ]
 
@@ -353,7 +354,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "cfg-if",
  "libc",
 ]
@@ -370,30 +371,30 @@
 
 [[package]]
 name = "num-traits"
-version = "0.2.18"
+version = "0.2.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
 dependencies = [
  "autocfg",
 ]
 
 [[package]]
 name = "once_cell"
-version = "1.19.0"
+version = "1.20.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
 
 [[package]]
 name = "pkg-config"
-version = "0.3.30"
+version = "0.3.31"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec"
+checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
 
 [[package]]
 name = "png"
-version = "0.17.13"
+version = "0.17.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "06e4b0d3d1312775e782c86c91a111aa1f910cbb65e1337f9975b5f9a554b5e1"
+checksum = "52f9d46a34a05a6a57566bc2bfae066ef07585a6e3fa30fbbdff5936380623f0"
 dependencies = [
  "bitflags 1.3.2",
  "crc32fast",
@@ -404,9 +405,9 @@
 
 [[package]]
 name = "prettyplease"
-version = "0.2.19"
+version = "0.2.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ac2cf0f2e4f42b49f5ffd07dae8d746508ef7526c13940e5f524012ae6c6550"
+checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033"
 dependencies = [
  "proc-macro2",
  "syn",
@@ -414,18 +415,18 @@
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.81"
+version = "1.0.89"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba"
+checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e"
 dependencies = [
  "unicode-ident",
 ]
 
 [[package]]
 name = "quote"
-version = "1.0.36"
+version = "1.0.37"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
+checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
 dependencies = [
  "proc-macro2",
 ]
@@ -442,9 +443,9 @@
 
 [[package]]
 name = "regex"
-version = "1.10.4"
+version = "1.11.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c"
+checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
 dependencies = [
  "aho-corasick",
  "memchr",
@@ -454,9 +455,9 @@
 
 [[package]]
 name = "regex-automata"
-version = "0.4.6"
+version = "0.4.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
+checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3"
 dependencies = [
  "aho-corasick",
  "memchr",
@@ -465,9 +466,9 @@
 
 [[package]]
 name = "regex-syntax"
-version = "0.8.3"
+version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56"
+checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
 
 [[package]]
 name = "rustc-hash"
@@ -477,14 +478,14 @@
 
 [[package]]
 name = "rustix"
-version = "0.38.32"
+version = "0.38.37"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89"
+checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "errno",
  "libc",
- "linux-raw-sys 0.4.13",
+ "linux-raw-sys 0.4.14",
  "windows-sys",
 ]
 
@@ -494,7 +495,7 @@
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "02a2d683a4ac90aeef5b1013933f6d977bd37d51ff3f4dad829d4931a7e6be86"
 dependencies = [
- "bitflags 2.5.0",
+ "bitflags 2.6.0",
  "cfg-if",
  "clipboard-win",
  "fd-lock",
@@ -530,9 +531,9 @@
 
 [[package]]
 name = "syn"
-version = "2.0.60"
+version = "2.0.85"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3"
+checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -541,27 +542,27 @@
 
 [[package]]
 name = "unicode-ident"
-version = "1.0.12"
+version = "1.0.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
 
 [[package]]
 name = "unicode-segmentation"
-version = "1.11.0"
+version = "1.12.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
+checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
 
 [[package]]
 name = "unicode-width"
-version = "0.1.11"
+version = "0.1.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
+checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
 
 [[package]]
 name = "utf8parse"
-version = "0.2.1"
+version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
+checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
 
 [[package]]
 name = "which"
@@ -608,9 +609,9 @@
 
 [[package]]
 name = "windows-targets"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
 dependencies = [
  "windows_aarch64_gnullvm",
  "windows_aarch64_msvc",
@@ -624,48 +625,48 @@
 
 [[package]]
 name = "windows_aarch64_gnullvm"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
 
 [[package]]
 name = "windows_aarch64_msvc"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
 
 [[package]]
 name = "windows_i686_gnu"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
 
 [[package]]
 name = "windows_i686_gnullvm"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
 
 [[package]]
 name = "windows_i686_msvc"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
 
 [[package]]
 name = "windows_x86_64_gnu"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
 
 [[package]]
 name = "windows_x86_64_msvc"
-version = "0.52.5"
+version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
diff --git a/crates/drm/Cargo.toml b/crates/drm/Cargo.toml
index e10b51d..32f2fb5 100644
--- a/crates/drm/Cargo.toml
+++ b/crates/drm/Cargo.toml
@@ -11,22 +11,67 @@
 
 [package]
 edition = "2021"
-rust-version = "1.65"
+rust-version = "1.66"
 name = "drm"
-version = "0.12.0"
+version = "0.14.1"
 authors = [
     "Tyler Slabinski <[email protected]>",
     "Victoria Brekenfeld <[email protected]>",
 ]
+build = false
 exclude = [
     ".gitignore",
     ".github",
 ]
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
 description = "Safe, low-level bindings to the Direct Rendering Manager API"
 readme = "README.md"
 license = "MIT"
 repository = "https://github.com/Smithay/drm-rs"
 
+[lib]
+name = "drm"
+path = "src/lib.rs"
+
+[[example]]
+name = "atomic_modeset"
+path = "examples/atomic_modeset.rs"
+
+[[example]]
+name = "basic"
+path = "examples/basic.rs"
+
+[[example]]
+name = "ffi"
+path = "examples/ffi.rs"
+
+[[example]]
+name = "kms_interactive"
+path = "examples/kms_interactive.rs"
+
+[[example]]
+name = "legacy_modeset"
+path = "examples/legacy_modeset.rs"
+
+[[example]]
+name = "list_modes"
+path = "examples/list_modes.rs"
+
+[[example]]
+name = "properties"
+path = "examples/properties.rs"
+
+[[example]]
+name = "resources"
+path = "examples/resources.rs"
+
+[[example]]
+name = "syncobj"
+path = "examples/syncobj.rs"
+
 [dependencies.bitflags]
 version = "2"
 
@@ -38,7 +83,7 @@
 ]
 
 [dependencies.drm-ffi]
-version = "0.8.0"
+version = "0.9.0"
 
 [dependencies.drm-fourcc]
 version = "^2.2.0"
@@ -67,3 +112,6 @@
 
 [features]
 use_bindgen = ["drm-ffi/use_bindgen"]
+
+[target.'cfg(target_os = "freebsd")'.dependencies.libc]
+version = "0.2"
diff --git a/crates/drm/METADATA b/crates/drm/METADATA
index 59f6233..ea8b9bf 100644
--- a/crates/drm/METADATA
+++ b/crates/drm/METADATA
@@ -1,17 +1,17 @@
 name: "drm"
 description: "Safe, low-level bindings to the Direct Rendering Manager API"
 third_party {
-  version: "0.12.0"
+  version: "0.14.1"
   license_type: NOTICE
   last_upgrade_date {
-    year: 2024
-    month: 10
-    day: 3
+    year: 2025
+    month: 1
+    day: 22
   }
   homepage: "https://crates.io/crates/drm"
   identifier {
     type: "Archive"
-    value: "https://static.crates.io/crates/drm/drm-0.12.0.crate"
-    version: "0.12.0"
+    value: "https://static.crates.io/crates/drm/drm-0.14.1.crate"
+    version: "0.14.1"
   }
 }
diff --git a/crates/drm/src/lib.rs b/crates/drm/src/lib.rs
index f5299a7..1cee3f0 100644
--- a/crates/drm/src/lib.rs
+++ b/crates/drm/src/lib.rs
@@ -32,6 +32,7 @@
 
 pub mod buffer;
 pub mod control;
+pub mod node;
 
 use std::ffi::{OsStr, OsString};
 use std::time::Duration;
@@ -165,18 +166,24 @@
         let mut date = Vec::new();
         let mut desc = Vec::new();
 
-        let _ = drm_ffi::get_version(
+        let v = drm_ffi::get_version(
             self.as_fd(),
             Some(&mut name),
             Some(&mut date),
             Some(&mut desc),
         )?;
 
+        let version = (v.version_major, v.version_minor, v.version_patchlevel);
         let name = OsString::from_vec(unsafe { transmute_vec(name) });
         let date = OsString::from_vec(unsafe { transmute_vec(date) });
         let desc = OsString::from_vec(unsafe { transmute_vec(desc) });
 
-        let driver = Driver { name, date, desc };
+        let driver = Driver {
+            version,
+            name,
+            date,
+            desc,
+        };
 
         Ok(driver)
     }
@@ -238,6 +245,8 @@
 /// Driver version of a device.
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Driver {
+    /// Version of the driver in `(major, minor, patchlevel)` format
+    pub version: (i32, i32, i32),
     /// Name of the driver
     pub name: OsString,
     /// Date driver was published
diff --git a/crates/drm/src/node/constants.rs b/crates/drm/src/node/constants.rs
new file mode 100644
index 0000000..4828842
--- /dev/null
+++ b/crates/drm/src/node/constants.rs
@@ -0,0 +1,45 @@
+//! OS-Specific DRM constants.
+
+/// DRM major value.
+#[cfg(target_os = "dragonfly")]
+pub const DRM_MAJOR: u32 = 145;
+
+/// DRM major value.
+#[cfg(target_os = "netbsd")]
+pub const DRM_MAJOR: u32 = 34;
+
+/// DRM major value.
+#[cfg(all(target_os = "openbsd", target_arch = "x86"))]
+pub const DRM_MAJOR: u32 = 88;
+
+/// DRM major value.
+#[cfg(all(target_os = "openbsd", not(target_arch = "x86")))]
+pub const DRM_MAJOR: u32 = 87;
+
+/// DRM major value.
+#[cfg(not(any(target_os = "dragonfly", target_os = "netbsd", target_os = "openbsd")))]
+pub const DRM_MAJOR: u32 = 226;
+
+/// Primary DRM node prefix.
+#[cfg(not(target_os = "openbsd"))]
+pub const PRIMARY_NAME: &str = "card";
+
+/// Primary DRM node prefix.
+#[cfg(target_os = "openbsd")]
+pub const PRIMARY_NAME: &str = "drm";
+
+/// Control DRM node prefix.
+#[cfg(not(target_os = "openbsd"))]
+pub const CONTROL_NAME: &str = "controlD";
+
+/// Control DRM node prefix.
+#[cfg(target_os = "openbsd")]
+pub const CONTROL_NAME: &str = "drmC";
+
+/// Render DRM node prefix.
+#[cfg(not(target_os = "openbsd"))]
+pub const RENDER_NAME: &str = "renderD";
+
+/// Render DRM node prefix.
+#[cfg(target_os = "openbsd")]
+pub const RENDER_NAME: &str = "drmR";
diff --git a/crates/drm/src/node/mod.rs b/crates/drm/src/node/mod.rs
new file mode 100644
index 0000000..5fcecb3
--- /dev/null
+++ b/crates/drm/src/node/mod.rs
@@ -0,0 +1,382 @@
+//! Module for abstractions on drm device nodes.
+
+pub mod constants;
+
+use std::error::Error;
+use std::fmt::{self, Debug, Display, Formatter};
+use std::io;
+use std::os::unix::io::AsFd;
+use std::path::{Path, PathBuf};
+
+use rustix::fs::{fstat, major, minor, stat, Dev as dev_t, Stat};
+
+use crate::node::constants::*;
+
+/// A node which refers to a DRM device.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct DrmNode {
+    dev: dev_t,
+    ty: NodeType,
+}
+
+impl DrmNode {
+    /// Creates a DRM node from an open drm device.
+    pub fn from_file<A: AsFd>(file: A) -> Result<DrmNode, CreateDrmNodeError> {
+        let stat = fstat(file).map_err(Into::<io::Error>::into)?;
+        DrmNode::from_stat(stat)
+    }
+
+    /// Creates a DRM node from path.
+    pub fn from_path<A: AsRef<Path>>(path: A) -> Result<DrmNode, CreateDrmNodeError> {
+        let stat = stat(path.as_ref()).map_err(Into::<io::Error>::into)?;
+        DrmNode::from_stat(stat)
+    }
+
+    /// Creates a DRM node from a file stat.
+    pub fn from_stat(stat: Stat) -> Result<DrmNode, CreateDrmNodeError> {
+        let dev = stat.st_rdev;
+        DrmNode::from_dev_id(dev)
+    }
+
+    /// Creates a DRM node from a [`dev_t`].
+    pub fn from_dev_id(dev: dev_t) -> Result<Self, CreateDrmNodeError> {
+        if !is_device_drm(dev) {
+            return Err(CreateDrmNodeError::NotDrmNode);
+        }
+
+        // The type of the DRM node is determined by the minor number ranges:
+        //   0 -  63 -> Primary
+        //  64 - 127 -> Control
+        // 128 - 255 -> Render
+        let ty = match minor(dev) >> 6 {
+            0 => NodeType::Primary,
+            1 => NodeType::Control,
+            2 => NodeType::Render,
+            _ => return Err(CreateDrmNodeError::NotDrmNode),
+        };
+
+        Ok(DrmNode { dev, ty })
+    }
+
+    /// Returns the type of the DRM node.
+    pub fn ty(&self) -> NodeType {
+        self.ty
+    }
+
+    /// Returns the device_id of the underlying DRM node.
+    pub fn dev_id(&self) -> dev_t {
+        self.dev
+    }
+
+    /// Returns the path of the open device if possible.
+    pub fn dev_path(&self) -> Option<PathBuf> {
+        node_path(self, self.ty).ok()
+    }
+
+    /// Returns the path of the specified node type matching the device, if available.
+    pub fn dev_path_with_type(&self, ty: NodeType) -> Option<PathBuf> {
+        node_path(self, ty).ok()
+    }
+
+    /// Returns a new node of the specified node type matching the device, if available.
+    pub fn node_with_type(&self, ty: NodeType) -> Option<Result<DrmNode, CreateDrmNodeError>> {
+        self.dev_path_with_type(ty).map(DrmNode::from_path)
+    }
+
+    /// Returns the major device number of the DRM device.
+    pub fn major(&self) -> u32 {
+        major(self.dev_id())
+    }
+
+    /// Returns the minor device number of the DRM device.
+    pub fn minor(&self) -> u32 {
+        minor(self.dev_id())
+    }
+
+    /// Returns whether the DRM device has render nodes.
+    pub fn has_render(&self) -> bool {
+        #[cfg(target_os = "linux")]
+        {
+            node_path(self, NodeType::Render).is_ok()
+        }
+
+        // TODO: More robust checks on non-linux.
+
+        #[cfg(target_os = "freebsd")]
+        {
+            false
+        }
+
+        #[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
+        {
+            false
+        }
+    }
+}
+
+impl Display for DrmNode {
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        write!(f, "{}{}", self.ty.minor_name_prefix(), minor(self.dev_id()))
+    }
+}
+
+/// A type of node
+#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
+pub enum NodeType {
+    /// A primary node may be used to allocate buffers.
+    ///
+    /// If no other node is present, this may be used to post a buffer to an output with mode-setting.
+    Primary,
+
+    /// A control node may be used for mode-setting.
+    ///
+    /// This is almost never used since no DRM API for control nodes is available yet.
+    Control,
+
+    /// A render node may be used by a client to allocate buffers.
+    ///
+    /// Mode-setting is not possible with a render node.
+    Render,
+}
+
+impl NodeType {
+    /// Returns a string representing the prefix of a minor device's name.
+    ///
+    /// For example, on Linux with a primary node, the returned string would be `card`.
+    pub fn minor_name_prefix(&self) -> &'static str {
+        match self {
+            NodeType::Primary => PRIMARY_NAME,
+            NodeType::Control => CONTROL_NAME,
+            NodeType::Render => RENDER_NAME,
+        }
+    }
+
+    #[cfg(not(target_os = "linux"))]
+    fn minor_base(&self) -> u32 {
+        match self {
+            NodeType::Primary => 0,
+            NodeType::Control => 64,
+            NodeType::Render => 128,
+        }
+    }
+}
+
+impl Display for NodeType {
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        Debug::fmt(self, f)
+    }
+}
+
+/// An error that may occur when creating a [`DrmNode`] from a file descriptor.
+#[derive(Debug)]
+pub enum CreateDrmNodeError {
+    /// Some underlying IO error occured while trying to create a DRM node.
+    Io(io::Error),
+
+    /// The provided file descriptor does not refer to a DRM node.
+    NotDrmNode,
+}
+
+impl Display for CreateDrmNodeError {
+    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
+        match self {
+            Self::Io(err) => Display::fmt(err, f),
+            Self::NotDrmNode => {
+                f.write_str("the provided file descriptor does not refer to a DRM node")
+            }
+        }
+    }
+}
+
+impl Error for CreateDrmNodeError {
+    fn source(&self) -> Option<&(dyn Error + 'static)> {
+        match self {
+            Self::Io(err) => Some(err),
+            Self::NotDrmNode => None,
+        }
+    }
+}
+
+impl From<io::Error> for CreateDrmNodeError {
+    #[inline]
+    fn from(err: io::Error) -> Self {
+        CreateDrmNodeError::Io(err)
+    }
+}
+
+#[cfg(target_os = "freebsd")]
+fn devname(dev: dev_t) -> Option<String> {
+    use std::os::raw::{c_char, c_int};
+
+    // Matching value of SPECNAMELEN in FreeBSD 13+
+    let mut dev_name = vec![0u8; 255];
+
+    let buf: *mut c_char = unsafe {
+        libc::devname_r(
+            dev,
+            libc::S_IFCHR, // Must be S_IFCHR or S_IFBLK
+            dev_name.as_mut_ptr() as *mut c_char,
+            dev_name.len() as c_int,
+        )
+    };
+
+    // Buffer was too small (weird issue with the size of buffer) or the device could not be named.
+    if buf.is_null() {
+        return None;
+    }
+
+    // SAFETY: The buffer written to by devname_r is guaranteed to be NUL terminated.
+    unsafe { dev_name.set_len(libc::strlen(buf)) };
+
+    Some(String::from_utf8(dev_name).expect("Returned device name is not valid utf8"))
+}
+
+/// Returns if the given device by major:minor pair is a DRM device.
+#[cfg(target_os = "linux")]
+pub fn is_device_drm(dev: dev_t) -> bool {
+    // We `stat` the path rather than comparing the major to support dynamic device numbers:
+    //   https://gitlab.freedesktop.org/mesa/drm/-/commit/f8392583418aef5e27bfed9989aeb601e20cc96d
+    let path = format!("/sys/dev/char/{}:{}/device/drm", major(dev), minor(dev));
+    stat(path.as_str()).is_ok()
+}
+
+/// Returns if the given device by major:minor pair is a DRM device.
+#[cfg(target_os = "freebsd")]
+pub fn is_device_drm(dev: dev_t) -> bool {
+    devname(dev).map_or(false, |dev_name| {
+        dev_name.starts_with("drm/")
+            || dev_name.starts_with("dri/card")
+            || dev_name.starts_with("dri/control")
+            || dev_name.starts_with("dri/renderD")
+    })
+}
+
+/// Returns if the given device by major:minor pair is a DRM device.
+#[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
+pub fn is_device_drm(dev: dev_t) -> bool {
+    major(dev) == DRM_MAJOR
+}
+
+/// Returns the path of a specific type of node from the same DRM device as another path of the same node.
+pub fn path_to_type<P: AsRef<Path>>(path: P, ty: NodeType) -> io::Result<PathBuf> {
+    let stat = stat(path.as_ref()).map_err(Into::<io::Error>::into)?;
+    dev_path(stat.st_rdev, ty)
+}
+
+/// Returns the path of a specific type of node from the same DRM device as an existing [`DrmNode`].
+pub fn node_path(node: &DrmNode, ty: NodeType) -> io::Result<PathBuf> {
+    dev_path(node.dev, ty)
+}
+
+/// Returns the path of a specific type of node from the DRM device described by major and minor device numbers.
+#[cfg(target_os = "linux")]
+pub fn dev_path(dev: dev_t, ty: NodeType) -> io::Result<PathBuf> {
+    use std::fs;
+    use std::io::ErrorKind;
+
+    if !is_device_drm(dev) {
+        return Err(io::Error::new(
+            ErrorKind::NotFound,
+            format!("{}:{} is no DRM device", major(dev), minor(dev)),
+        ));
+    }
+
+    let read = fs::read_dir(format!(
+        "/sys/dev/char/{}:{}/device/drm",
+        major(dev),
+        minor(dev)
+    ))?;
+
+    for entry in read.flatten() {
+        let name = entry.file_name();
+        let name = name.to_string_lossy();
+
+        // Only 1 primary, control and render node may exist simultaneously, so the
+        // first occurrence is good enough.
+        if name.starts_with(ty.minor_name_prefix()) {
+            let path = Path::new("/dev/dri").join(&*name);
+            if path.exists() {
+                return Ok(path);
+            }
+        }
+    }
+
+    Err(io::Error::new(
+        ErrorKind::NotFound,
+        format!(
+            "Could not find node of type {} from DRM device {}:{}",
+            ty,
+            major(dev),
+            minor(dev)
+        ),
+    ))
+}
+
+/// Returns the path of a specific type of node from the DRM device described by major and minor device numbers.
+#[cfg(target_os = "freebsd")]
+pub fn dev_path(dev: dev_t, ty: NodeType) -> io::Result<PathBuf> {
+    // Based on libdrm `drmGetMinorNameForFD`. Should be updated if the code
+    // there is replaced with anything more sensible...
+
+    use std::io::ErrorKind;
+
+    if !is_device_drm(dev) {
+        return Err(io::Error::new(
+            ErrorKind::NotFound,
+            format!("{}:{} is no DRM device", major(dev), minor(dev)),
+        ));
+    }
+
+    if let Some(dev_name) = devname(dev) {
+        let suffix = dev_name.trim_start_matches(|c: char| !c.is_numeric());
+        if let Ok(old_id) = suffix.parse::<u32>() {
+            let id_mask = 0b11_1111;
+            let id = old_id & id_mask + ty.minor_base();
+            let path = PathBuf::from(format!("/dev/dri/{}{}", ty.minor_name_prefix(), id));
+            if path.exists() {
+                return Ok(path);
+            }
+        }
+    }
+
+    Err(io::Error::new(
+        ErrorKind::NotFound,
+        format!(
+            "Could not find node of type {} from DRM device {}:{}",
+            ty,
+            major(dev),
+            minor(dev)
+        ),
+    ))
+}
+
+/// Returns the path of a specific type of node from the DRM device described by major and minor device numbers.
+#[cfg(not(any(target_os = "linux", target_os = "freebsd")))]
+pub fn dev_path(dev: dev_t, ty: NodeType) -> io::Result<PathBuf> {
+    use std::io::ErrorKind;
+
+    if !is_device_drm(dev) {
+        return Err(io::Error::new(
+            ErrorKind::NotFound,
+            format!("{}:{} is no DRM device", major(dev), minor(dev)),
+        ));
+    }
+
+    let old_id = minor(dev);
+    let id_mask = 0b11_1111;
+    let id = old_id & id_mask + ty.minor_base();
+    let path = PathBuf::from(format!("/dev/dri/{}{}", ty.minor_name_prefix(), id));
+    if path.exists() {
+        return Ok(path);
+    }
+
+    Err(io::Error::new(
+        ErrorKind::NotFound,
+        format!(
+            "Could not find node of type {} for DRM device {}:{}",
+            ty,
+            major(dev),
+            minor(dev)
+        ),
+    ))
+}
diff --git a/crates/foldhash/.android-checksum.json b/crates/foldhash/.android-checksum.json
new file mode 100644
index 0000000..5d9d33a
--- /dev/null
+++ b/crates/foldhash/.android-checksum.json
@@ -0,0 +1 @@
+{"package":null,"files":{".cargo-checksum.json":"17157d0fd87f28e79025933ea52b87726d6b11bcaba8e174f6e7cd05b8261f21","README.md":"7001e5e97345c350ea56c617a4977a655f9cbe2de0dcdf1d354b4b19b7640607","LICENSE":"8be19fe391cb15f9cfd08f2d3a6b18c531eb3949a5077724f425953a65b33e9f","src/seed.rs":"999b6432c5a5def0aabefce4e9c5724962383f59c4d13ca2e365f6c4a9f0598a","Cargo.toml":"b01184d21495eb88dfcd0bdb3ba995a63f7fe1ad86ec49aa772d866f38afbb99","src/lib.rs":"a65700a106bf354ca4bd9eee25d4eead96871c4ca8b26389d4db0e2ea87d3c2a","src/convenience.rs":"0b8e6016da16d68148d690822c3f438ff8a423e9a88a9d177968fd8363e14df2","MODULE_LICENSE_ZLIB":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","Android.bp":"62b0b823f274f9e3441371e00e4f310074f219aa44961d455282379bfbe529e7","METADATA":"07558e7453064f3a550163bc0482b368b36be905ea8f6647eede6ae3d41fbe81","cargo_embargo.json":"0be745f01ba4955b20f2cb5011168c4d8cd396af75c007035ec693c67b17bce7"}}
\ No newline at end of file
diff --git a/crates/foldhash/.cargo-checksum.json b/crates/foldhash/.cargo-checksum.json
new file mode 100644
index 0000000..9c33333
--- /dev/null
+++ b/crates/foldhash/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"128e508349ef346f424d0092228c47f7f6f6459fc8ceafc509e3918ef31cd717","LICENSE":"b1181a40b2a7b25cf66fd01481713bc1005df082c53ef73e851e55071b102744","README.md":"fe47dcae2123a581799544a577b3a464962f3b51323b5495c53903b3bd3cd4ed","src/convenience.rs":"5e1b3e9fc7e89f35680fc87719ffc4bd8b98e9d5b443ff5abe1749f235a1f601","src/lib.rs":"d0d4fddddaa2353b9cb934535583d170dfe2fe8b3d1816edd667f86218d90e13","src/seed.rs":"90b1bb8f5117a9d46e83d10fc04cc5ad33fe9950727ad5f0633357aa7330e86b"},"package":"f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2"}
\ No newline at end of file
diff --git a/crates/foldhash/Android.bp b/crates/foldhash/Android.bp
new file mode 100644
index 0000000..acf32de
--- /dev/null
+++ b/crates/foldhash/Android.bp
@@ -0,0 +1,34 @@
+// This file is generated by cargo_embargo.
+// Do not modify this file because the changes will be overridden on upgrade.
+
+package {
+    default_applicable_licenses: ["external_rust_crates_foldhash_license"],
+    default_team: "trendy_team_android_rust",
+}
+
+license {
+    name: "external_rust_crates_foldhash_license",
+    visibility: [":__subpackages__"],
+    license_kinds: ["SPDX-license-identifier-Zlib"],
+    license_text: ["LICENSE"],
+}
+
+rust_library {
+    name: "libfoldhash",
+    host_supported: true,
+    crate_name: "foldhash",
+    cargo_env_compat: true,
+    cargo_pkg_version: "0.1.3",
+    crate_root: "src/lib.rs",
+    edition: "2021",
+    features: [
+        "default",
+        "std",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+    product_available: true,
+    vendor_available: true,
+}
diff --git a/crates/foldhash/Cargo.toml b/crates/foldhash/Cargo.toml
new file mode 100644
index 0000000..f7a751e
--- /dev/null
+++ b/crates/foldhash/Cargo.toml
@@ -0,0 +1,74 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "foldhash"
+version = "0.1.3"
+authors = ["Orson Peters <[email protected]>"]
+build = false
+exclude = [
+    "benches",
+    "tools",
+    "assets",
+]
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+description = "A fast, non-cryptographic, minimally DoS-resistant hashing algorithm."
+readme = "README.md"
+keywords = [
+    "hash",
+    "hasher",
+    "no-std",
+]
+categories = [
+    "algorithms",
+    "no-std",
+]
+license = "Zlib"
+repository = "https://github.com/orlp/foldhash"
+
+[profile.release]
+lto = "thin"
+
+[lib]
+name = "foldhash"
+path = "src/lib.rs"
+bench = false
+
+[dependencies]
+
+[dev-dependencies.ahash]
+version = "0.8"
+
+[dev-dependencies.chrono]
+version = "0.4"
+
+[dev-dependencies.criterion]
+version = "0.5"
+
+[dev-dependencies.fxhash]
+version = "0.2"
+
+[dev-dependencies.hashbrown]
+version = "0.14"
+
+[dev-dependencies.rand]
+version = "0.8"
+
+[dev-dependencies.uuid]
+version = "1.8"
+
+[features]
+default = ["std"]
+std = []
diff --git a/crates/foldhash/LICENSE b/crates/foldhash/LICENSE
new file mode 100644
index 0000000..2f65b0a
--- /dev/null
+++ b/crates/foldhash/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2024 Orson Peters
+
+This software is provided 'as-is', without any express or implied warranty. In
+no event will the authors be held liable for any damages arising from the use of
+this software.
+
+Permission is granted to anyone to use this software for any purpose, including
+commercial applications, and to alter it and redistribute it freely, subject to
+the following restrictions:
+
+1. The origin of this software must not be misrepresented; you must not claim
+    that you wrote the original software. If you use this software in a product,
+    an acknowledgment in the product documentation would be appreciated but is
+    not required.
+
+2. Altered source versions must be plainly marked as such, and must not be
+    misrepresented as being the original software.
+
+3. This notice may not be removed or altered from any source distribution.
\ No newline at end of file
diff --git a/crates/foldhash/METADATA b/crates/foldhash/METADATA
new file mode 100644
index 0000000..d8f863f
--- /dev/null
+++ b/crates/foldhash/METADATA
@@ -0,0 +1,17 @@
+name: "foldhash"
+description: "A fast, non-cryptographic, minimally DoS-resistant hashing algorithm."
+third_party {
+  version: "0.1.3"
+  license_type: NOTICE
+  last_upgrade_date {
+    year: 2024
+    month: 12
+    day: 16
+  }
+  homepage: "https://crates.io/crates/foldhash"
+  identifier {
+    type: "Archive"
+    value: "https://static.crates.io/crates/foldhash/foldhash-0.1.3.crate"
+    version: "0.1.3"
+  }
+}
diff --git a/crates/foldhash/MODULE_LICENSE_ZLIB b/crates/foldhash/MODULE_LICENSE_ZLIB
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/crates/foldhash/MODULE_LICENSE_ZLIB
diff --git a/crates/foldhash/README.md b/crates/foldhash/README.md
new file mode 100644
index 0000000..706cd50
--- /dev/null
+++ b/crates/foldhash/README.md
@@ -0,0 +1,277 @@
+# Foldhash
+  
+This repository contains foldhash, a fast, non-cryptographic, minimally
+DoS-resistant hashing algorithm implemented in Rust designed for computational
+uses such as hash maps, bloom filters, count sketching, etc.
+
+When should you **not** use foldhash:
+
+- You are afraid of people studying your long-running program's behavior to
+  reverse engineer its internal random state and using this knowledge to create
+  many colliding inputs for computational complexity attacks. For more details
+  see the section "HashDoS resistance".
+
+- You expect foldhash to have a consistent output across versions or
+  platforms, such as for persistent file formats or communication protocols.
+  
+- You are relying on foldhash's properties for any kind of security.
+  Foldhash is **not appropriate for any cryptographic purpose**.
+
+Foldhash has two variants, one optimized for speed which is ideal for data
+structures such as hash maps and bloom filters, and one optimized for
+statistical quality which is ideal for algorithms such as
+[HyperLogLog](https://en.wikipedia.org/wiki/HyperLogLog) and
+[MinHash](https://en.wikipedia.org/wiki/MinHash).
+
+Foldhash can be used in a `#![no_std]` environment by disabling its default
+`"std"` feature.
+  
+
+## Performance
+
+We evaluated foldhash against three commonly used hashes in Rust:
+[aHash](https://github.com/tkaitchuck/aHash) v0.8.11,
+[fxhash](https://github.com/cbreeden/fxhash) v0.2.1, and
+[SipHash-1-3](https://en.wikipedia.org/wiki/SipHash), the default hash algorithm
+in Rust at the time of writing. We evaluated both variants foldhash provides,
+`foldhash-f` and `foldhash-q`, which correspond to `foldhash::fast` and
+`foldhash::quality` in the crate respectively.
+
+First we note that hashers with random state inflate the size of your `HashMap`,
+which may or may not be important for your performance:
+```rust
+std::mem::size_of::<foldhash::HashMap<u32, u32>>() = 40  // (both variants)
+std::mem::size_of::<ahash::HashMap<u32, u32>>() = 64
+std::mem::size_of::<fxhash::FxHashMap<u32, u32>>() = 32
+std::mem::size_of::<std::collections::HashMap<u32, u32>>() = 48
+```
+
+We tested runtime performance on two machines, one with a 2023 Apple M2 CPU, one
+with a 2023 Intel Xeon Platinum 8481C server CPU, both with stable Rust 1.80.1.
+Since one of our competitors (aHash) is reliant on AES-based instructions for
+optimal performance we have included both a benchmark with and without
+`-C target-cpu=native` for the Intel machine.
+
+We tested across a wide variety of data types we consider representative of
+types / distributions one might hash in the real world, in the context of a hash
+table key:
+
+- `u32` - random 32-bit unsigned integers,
+- `u32pair` - pairs of random 32-bit unsigned integers,
+- `u64` - random 64-bit unsigned integers,
+- `u64pair` - pairs of random 64-bit unsigned integers,
+- `u64lobits` - 64-bit unsigned integers where only the bottom 16 bits vary,
+- `u64hibits` - 64-bit unsigned integers where only the top 16 bits vary,
+- `ipv4` - [`std::net::Ipv4Addr`](https://doc.rust-lang.org/std/net/struct.Ipv4Addr.html), which is equivalent to `[u8; 4]`, 
+- `ipv6` - [`std::net::Ipv6Addr`](https://doc.rust-lang.org/std/net/struct.Ipv6Addr.html), which is equivalent to `[u8; 16]`, 
+- `rgba` - random `(u8, u8, u8, u8)` tuples,
+- `strenglishword` - strings containing words sampled uniformly from the top 10,000 most common English words,
+- `struuid` - random UUIDs, hashed in string representation,
+- `strurl` - strings containing URLs sampled uniformly from a corpus of 10,000 URLs,
+- `strdate` - random `YYYY-MM-DD` date strings,
+- `accesslog` - `(u128, u32, chrono::NaiveDate, bool)`, meant to simulate a typical
+   larger compound type, in this case `(resource_id, user_id, date, success)`
+   for an access log.
+- `kilobyte` - random bytestrings one kilobyte in length,
+- `tenkilobyte` - random bytestrings ten kilobytes in length.
+
+We tested the performance of hashing the above data types in the following four contexts:
+
+- `hashonly` - only the time it takes to hash the value,
+- `lookupmiss` - the time it takes to do a lookup in a 1,000 element hash map
+of random elements, only sampling keys of which we know that are not in the hash map,
+- `lookuphit` - similar to `lookupmiss`, except the keys are sampled from keys
+known to be in the hash map,
+- `setbuild` - the time it takes to construct a `HashSet` of 1,000 elements
+from 1,000 randomly sampled elements each repeated 10 times (so 10,000 inserts,
+with ~90% duplicates).
+
+All times are reported as expected time per operation, so one hash, one lookup,
+or one insert respectively. The full results [can be found
+here](https://gist.github.com/orlp/1271ad5b8b775c651cc55773888858eb). To
+summarize, we will only show the results for `u64` and `strenglishword` here, as
+well as the observed geometric mean and average rank over the full benchmark.
+
+```
+Xeon 8481c
+
+┌────────────────┬────────────┬────────────┬────────────┬─────────┬─────────┬─────────┐
+│              avg_rank       ┆       1.58 ┆       2.66 ┆    2.09 ┆    3.70 ┆    4.97 │
+│        geometric_mean       ┆       6.21 ┆       7.01 ┆    7.56 ┆    8.74 ┆   28.70 │
+╞════════════════╪════════════╪════════════╪════════════╪═════════╪═════════╪═════════╡
+│          distr ┆      bench ┆ foldhash-f ┆ foldhash-q ┆  fxhash ┆   ahash ┆ siphash │
+╞════════════════╪════════════╪════════════╪════════════╪═════════╪═════════╪═════════╡
+│            u64 ┆   hashonly ┆       0.79 ┆       1.03 ┆    0.67 ┆    1.23 ┆    9.09 │
+│            u64 ┆ lookupmiss ┆       2.01 ┆       2.44 ┆    1.73 ┆    2.73 ┆   12.03 │
+│            u64 ┆  lookuphit ┆       3.04 ┆       3.59 ┆    2.64 ┆    3.84 ┆   12.65 │
+│            u64 ┆   setbuild ┆       6.13 ┆       6.52 ┆    4.88 ┆    6.66 ┆   17.80 │
+|            ... ┆        ... ┆        ... ┆        ... ┆     ... ┆     ... ┆     ... |
+│ strenglishword ┆   hashonly ┆       2.63 ┆       2.98 ┆    3.24 ┆    3.57 ┆   11.87 │
+│ strenglishword ┆ lookupmiss ┆       4.63 ┆       5.03 ┆    4.51 ┆    5.86 ┆   15.19 │
+│ strenglishword ┆  lookuphit ┆       8.62 ┆       9.25 ┆    8.28 ┆   10.06 ┆   21.35 │
+│ strenglishword ┆   setbuild ┆      14.77 ┆      15.57 ┆   18.86 ┆   15.72 ┆   35.36 │
+└────────────────┴────────────┴────────────┴────────────┴─────────┴─────────┴─────────┘
+
+Xeon 8481c with RUSTFLAGS="-C target-cpu=native"
+
+┌────────────────┬────────────┬────────────┬────────────┬─────────┬─────────┬─────────┐
+│              avg_rank       ┆       1.89 ┆       3.12 ┆    2.25 ┆    2.77 ┆    4.97 │ 
+│        geometric_mean       ┆       6.00 ┆       6.82 ┆    7.39 ┆    6.94 ┆   29.49 │ 
+╞════════════════╪════════════╪════════════╪════════════╪═════════╪═════════╪═════════╡
+│          distr ┆      bench ┆ foldhash-f ┆ foldhash-q ┆  fxhash ┆   ahash ┆ siphash │
+╞════════════════╪════════════╪════════════╪════════════╪═════════╪═════════╪═════════╡
+│            u64 ┆   hashonly ┆       0.79 ┆       1.01 ┆    0.67 ┆    1.34 ┆    9.24 │
+│            u64 ┆ lookupmiss ┆       1.68 ┆       2.12 ┆    1.62 ┆    1.96 ┆   12.04 │
+│            u64 ┆  lookuphit ┆       2.68 ┆       3.19 ┆    2.28 ┆    3.16 ┆   13.09 │
+│            u64 ┆   setbuild ┆       6.16 ┆       6.42 ┆    4.75 ┆    7.03 ┆   18.88 │
+|            ... ┆        ... ┆        ... ┆        ... ┆     ... ┆     ... ┆     ... |
+│ strenglishword ┆   hashonly ┆       2.60 ┆       2.97 ┆    3.25 ┆    3.04 ┆   11.58 │
+│ strenglishword ┆ lookupmiss ┆       4.41 ┆       4.96 ┆    4.82 ┆    4.79 ┆   32.31 │
+│ strenglishword ┆  lookuphit ┆       8.68 ┆       9.35 ┆    8.46 ┆    8.63 ┆   21.48 │
+│ strenglishword ┆   setbuild ┆      15.01 ┆      16.34 ┆   19.34 ┆   15.37 ┆   35.22 │
+└────────────────┴────────────┴────────────┴────────────┴─────────┴─────────┴─────────┘
+
+Apple M2
+
+┌────────────────┬────────────┬────────────┬────────────┬─────────┬─────────┬─────────┐
+│              avg_rank       ┆       1.62 ┆       2.81 ┆    2.02 ┆    3.58 ┆    4.97 │
+│        geometric_mean       ┆       4.41 ┆       4.86 ┆    5.39 ┆    5.71 ┆   21.94 │
+╞════════════════╪════════════╪════════════╪════════════╪═════════╪═════════╪═════════╡
+│          distr ┆      bench ┆ foldhash-f ┆ foldhash-q ┆  fxhash ┆   ahash ┆ siphash │
+╞════════════════╪════════════╪════════════╪════════════╪═════════╪═════════╪═════════╡
+│            u64 ┆   hashonly ┆       0.60 ┆       0.70 ┆    0.41 ┆    0.78 ┆    6.61 │
+│            u64 ┆ lookupmiss ┆       1.50 ┆       1.61 ┆    1.23 ┆    1.65 ┆    8.28 │
+│            u64 ┆  lookuphit ┆       1.78 ┆       2.10 ┆    1.57 ┆    2.25 ┆    8.53 │
+│            u64 ┆   setbuild ┆       4.74 ┆       5.19 ┆    3.61 ┆    5.38 ┆   15.36 │
+|            ... ┆        ... ┆        ... ┆        ... ┆     ... ┆     ... ┆     ... |
+│ strenglishword ┆   hashonly ┆       1.84 ┆       2.13 ┆    1.85 ┆    2.13 ┆   11.61 │
+│ strenglishword ┆ lookupmiss ┆       2.71 ┆       2.96 ┆    2.47 ┆    2.99 ┆    9.27 │
+│ strenglishword ┆  lookuphit ┆       7.54 ┆       8.77 ┆    7.83 ┆    8.77 ┆   18.65 │
+│ strenglishword ┆   setbuild ┆      16.61 ┆      17.09 ┆   14.83 ┆   16.52 ┆   26.42 │
+└────────────────┴────────────┴────────────┴────────────┴─────────┴─────────┴─────────┘
+```
+
+We note from the above benchmark that for hash table performance the extra
+quality that `foldhash-q` provides is almost never actually worth the small but
+also non-negligible computational overhead it has over `foldhash-f`. This is our
+justification for providing `foldhash::fast` as a default choice for hash
+tables, even though it has measurable biases (see also the "Quality" section).
+
+fxhash generally does fairly well for small inputs on the benchmarks, however it
+has structural weaknesses as a hash which makes it ill-advised to use as a
+general-purpose hash function in our opinion. For example the `lookuphit`
+benchmark on Apple M2 for `u64hibits` takes 1.77 nanoseconds per lookup for
+foldhash, but 67.72 nanoseconds for fxhash (due to everything colliding - the
+effects would be even worse with a larger hash map). In our opinion foldhash-f
+strikes the right balance between quality and performance for hash tables,
+whereas fxhash flies a bit too close to the sun.
+
+aHash is faster than foldhash for medium-long strings when compiled with
+AES instruction support, but is slower in almost every other scenario or when
+AES instructions are unavailable.
+
+
+## Quality
+
+Foldhash-f is a fairly strong hash in terms of collisions *on its full 64-bit
+output*. However, statistical tests such as
+[SMHasher3](https://gitlab.com/fwojcik/smhasher3) can distinguish it from an ideal
+hash function in tests that focus on the relationship between individual
+input/output bits. One such property is avalanching: changing a single bit in
+the input does not flip every other bit with 50% probability when using
+foldhash-f like it should if it behaved like a proper random oracle.
+
+As the benchmarks above show, spending more effort in foldhash-f to improve the
+hash quality does not lead to better hash table performance. However, there are
+also use cases for hash functions where it is important that (each bit of) the
+hash is unbiased and a random function of all bits of the input, such as in
+algorithms as HyperLogLog or MinHash.
+
+For this purpose we also provide foldhash-q, which is simply a post-processed
+version of foldhash-f to properly avalanche all the bits. Foldhash-q passes the
+[SMHasher3](https://gitlab.com/fwojcik/smhasher3) test suite [without any
+failures](https://github.com/orlp/foldhash_smhasher3). You can also plot the
+worst-case probability (where 50% is ideal) that any particular output bit flips
+if you flip an input bit, which nicely visualizes how fxhash and foldhash-f
+fail this avalanche property but foldhash-q and SipHash-1-3 pass:
+
+
+| FxHash | Foldhash-f | Foldhash-q | SipHash-1-3 |
+|--------|------------|------------|-------------|
+| <img src="assets/avalanche-fxhash.png" width=300> | <img src="assets/avalanche-foldhash-fast.png" width=300> | <img src="assets/avalanche-foldhash-quality.png" width=300> | <img src="assets/avalanche-siphash.png" width=300>
+
+
+## Background
+
+The name foldhash is derived from the *folded multiply*. This technique
+compresses two 64-bit words into a single 64-bit word while simultaneously
+thoroughly mixing the bits. It does this using a 64 x 64 bit -> 128 bit
+multiplication followed by folding the two halves of the 128-bit product
+together using a XOR operation:
+
+```rust
+let full = (x as u128) * (y as u128);
+let lo = full as u64;
+let hi = (full >> 64) as u64;
+let folded = lo ^ hi;
+```
+
+We're not aware of a formal analysis of this operation, but empirically it works
+very well. An informal intuition for why it should work is that multiplication
+can be seen as the sum of many shifted copies of one of the arguments, only
+including those shifted copies where the other argument has set bits, e.g. for
+multiplying 4-bit words `abcd` and `efgh`:
+
+```
+abcd * efgh =
+
+  abcd    * e
+   abcd   * f
+    abcd  * g
+     abcd * h
+--------------- +
+```
+
+Note that the middle bits of the product are a function of many of the input
+bits, whereas the top-most and bottom-most bits are impacted by fewer of the
+input bits. By folding the top half back onto the bottom half these effects
+compensate each other, making all the output bits affected by much of the input.
+
+We did not invent the folded multiply, it was previously used in essentially the
+same way in [aHash](https://github.com/tkaitchuck/aHash),
+[wyhash](https://github.com/wangyi-fudan/wyhash), and
+[xxhash3](https://github.com/Cyan4973/xxHash). The operation was also used
+in [mum-hash](https://github.com/vnmakarov/mum-hash), and probably others.
+We do not know who originally invented it, the earliest reference
+we could find was Steven Fuerst [blogging about it](https://web.archive.org/web/20121213174842/http://locklessinc.com/articles/crypto_hash/)
+in 2012.
+
+
+## HashDoS resistance
+
+The folded multiply has a fairly glaring flaw: if one of the halves is zero, the
+output is zero. This makes it trivial to create a large number of hash
+collisions (even by accident, as zeroes are a common input to hashes). To combat
+this, every folded multiply in foldhash has the following form:
+
+```rust
+folded_multiply(input1 ^ secret1, input2 ^ secret2)
+```
+
+Here `secret1` or `secret2` are either secret random numbers generated by
+foldhash beforehand, or partial hash results influenced by such a secret prior.
+This (plus other careful design throughout the hash function) ensures that it is
+not possible to create a list of inputs that collide for every instance of
+foldhash, and also prevents certain access patterns on hash tables going
+quadratric by ensuring that each hash table uses a different seed and thus a
+different access pattern. It is these two properties that we refer to when we
+claim foldhash is "minimally DoS-resistant": it does the bare minimum to defeat
+very simple attacks.
+
+However, to be crystal clear, **foldhash does not claim to provide HashDoS
+resistance against interactive attackers**. For a student of cryptography it
+should be trivial to derive the secret values from direct observation of hash
+outputs, and feasible to derive the secret values from indirect observation of
+hashes, such as through timing attacks or hash table iteration. Once an attacker
+knows the secret values, they can once again create infinite hash collisions
+with ease.
diff --git a/crates/foldhash/cargo_embargo.json b/crates/foldhash/cargo_embargo.json
new file mode 100644
index 0000000..9e26dfe
--- /dev/null
+++ b/crates/foldhash/cargo_embargo.json
@@ -0,0 +1 @@
+{}
\ No newline at end of file
diff --git a/crates/foldhash/src/convenience.rs b/crates/foldhash/src/convenience.rs
new file mode 100644
index 0000000..d515d0b
--- /dev/null
+++ b/crates/foldhash/src/convenience.rs
@@ -0,0 +1,65 @@
+use super::fast::{FixedState, RandomState};
+
+/// Type alias for [`std::collections::HashMap<K, V, foldhash::fast::RandomState>`].
+pub type HashMap<K, V> = std::collections::HashMap<K, V, RandomState>;
+
+/// Type alias for [`std::collections::HashSet<T, foldhash::fast::RandomState>`].
+pub type HashSet<T> = std::collections::HashSet<T, RandomState>;
+
+/// A convenience extension trait to enable [`HashMap::new`] for hash maps that use `foldhash`.
+pub trait HashMapExt {
+    /// Creates an empty `HashMap`.
+    fn new() -> Self;
+
+    /// Creates an empty `HashMap` with at least the specified capacity.
+    fn with_capacity(capacity: usize) -> Self;
+}
+
+impl<K, V> HashMapExt for std::collections::HashMap<K, V, RandomState> {
+    fn new() -> Self {
+        Self::with_hasher(RandomState::default())
+    }
+
+    fn with_capacity(capacity: usize) -> Self {
+        Self::with_capacity_and_hasher(capacity, RandomState::default())
+    }
+}
+
+impl<K, V> HashMapExt for std::collections::HashMap<K, V, FixedState> {
+    fn new() -> Self {
+        Self::with_hasher(FixedState::default())
+    }
+
+    fn with_capacity(capacity: usize) -> Self {
+        Self::with_capacity_and_hasher(capacity, FixedState::default())
+    }
+}
+
+/// A convenience extension trait to enable [`HashSet::new`] for hash sets that use `foldhash`.
+pub trait HashSetExt {
+    /// Creates an empty `HashSet`.
+    fn new() -> Self;
+
+    /// Creates an empty `HashSet` with at least the specified capacity.
+    fn with_capacity(capacity: usize) -> Self;
+}
+
+impl<T> HashSetExt for std::collections::HashSet<T, RandomState> {
+    fn new() -> Self {
+        Self::with_hasher(RandomState::default())
+    }
+
+    fn with_capacity(capacity: usize) -> Self {
+        Self::with_capacity_and_hasher(capacity, RandomState::default())
+    }
+}
+
+impl<T> HashSetExt for std::collections::HashSet<T, FixedState> {
+    fn new() -> Self {
+        Self::with_hasher(FixedState::default())
+    }
+
+    fn with_capacity(capacity: usize) -> Self {
+        Self::with_capacity_and_hasher(capacity, FixedState::default())
+    }
+}
diff --git a/crates/foldhash/src/lib.rs b/crates/foldhash/src/lib.rs
new file mode 100644
index 0000000..deb1636
--- /dev/null
+++ b/crates/foldhash/src/lib.rs
@@ -0,0 +1,397 @@
+//! This crate provides foldhash, a fast, non-cryptographic, minimally
+//! DoS-resistant hashing algorithm designed for computational uses such as
+//! hashmaps, bloom filters, count sketching, etc.
+//!
+//! When should you **not** use foldhash:
+//!
+//! - You are afraid of people studying your long-running program's behavior
+//!   to reverse engineer its internal random state and using this knowledge to
+//!   create many colliding inputs for computational complexity attacks.
+//!
+//! - You expect foldhash to have a consistent output across versions or
+//!   platforms, such as for persistent file formats or communication protocols.
+//!   
+//! - You are relying on foldhash's properties for any kind of security.
+//!   Foldhash is **not appropriate for any cryptographic purpose**.
+//!
+//! Foldhash has two variants, one optimized for speed which is ideal for data
+//! structures such as hash maps and bloom filters, and one optimized for
+//! statistical quality which is ideal for algorithms such as
+//! [HyperLogLog](https://en.wikipedia.org/wiki/HyperLogLog) and
+//! [MinHash](https://en.wikipedia.org/wiki/MinHash).
+//!
+//! Foldhash can be used in a `#![no_std]` environment by disabling its default
+//! `"std"` feature.
+//!
+//! # Usage
+//!
+//! The easiest way to use this crate with the standard library [`HashMap`] or
+//! [`HashSet`] is to import them from `foldhash` instead, along with the
+//! extension traits to make [`HashMap::new`] and [`HashMap::with_capacity`]
+//! work out-of-the-box:
+//!
+//! ```rust
+//! use foldhash::{HashMap, HashMapExt};
+//!
+//! let mut hm = HashMap::new();
+//! hm.insert(42, "hello");
+//! ```
+//!
+//! You can also avoid the convenience types and do it manually by initializing
+//! a [`RandomState`](fast::RandomState), for example if you are using a different hash map
+//! implementation like [`hashbrown`](https://docs.rs/hashbrown/):
+//!
+//! ```rust
+//! use hashbrown::HashMap;
+//! use foldhash::fast::RandomState;
+//!
+//! let mut hm = HashMap::with_hasher(RandomState::default());
+//! hm.insert("foo", "bar");
+//! ```
+//!
+//! The above methods are the recommended way to use foldhash, which will
+//! automatically generate a randomly generated hasher instance for you. If you
+//! absolutely must have determinism you can use [`FixedState`](fast::FixedState)
+//! instead, but note that this makes you trivially vulnerable to HashDoS
+//! attacks and might lead to quadratic runtime when moving data from one
+//! hashmap/set into another:
+//!
+//! ```rust
+//! use std::collections::HashSet;
+//! use foldhash::fast::FixedState;
+//!
+//! let mut hm = HashSet::with_hasher(FixedState::with_seed(42));
+//! hm.insert([1, 10, 100]);
+//! ```
+//!
+//! If you rely on statistical properties of the hash for the correctness of
+//! your algorithm, such as in [HyperLogLog](https://en.wikipedia.org/wiki/HyperLogLog),
+//! it is suggested to use the [`RandomState`](quality::RandomState)
+//! or [`FixedState`](quality::FixedState) from the [`quality`] module instead
+//! of the [`fast`] module. The latter is optimized purely for speed in hash
+//! tables and has known statistical imperfections.
+//!
+//! Finally, you can also directly use the [`RandomState`](quality::RandomState)
+//! or [`FixedState`](quality::FixedState) to manually hash items using the
+//! [`BuildHasher`](std::hash::BuildHasher) trait:
+//! ```rust
+//! use std::hash::BuildHasher;
+//! use foldhash::quality::RandomState;
+//!
+//! let random_state = RandomState::default();
+//! let hash = random_state.hash_one("hello world");
+//! ```
+
+#![cfg_attr(all(not(test), not(feature = "std")), no_std)]
+#![warn(missing_docs)]
+
+use core::hash::Hasher;
+
+#[cfg(feature = "std")]
+mod convenience;
+mod seed;
+
+#[cfg(feature = "std")]
+pub use convenience::*;
+
+// Arbitrary constants with high entropy. Hexadecimal digits of pi were used.
+const ARBITRARY0: u64 = 0x243f6a8885a308d3;
+const ARBITRARY1: u64 = 0x13198a2e03707344;
+const ARBITRARY2: u64 = 0xa4093822299f31d0;
+const ARBITRARY3: u64 = 0x082efa98ec4e6c89;
+const ARBITRARY4: u64 = 0x452821e638d01377;
+const ARBITRARY5: u64 = 0xbe5466cf34e90c6c;
+const ARBITRARY6: u64 = 0xc0ac29b7c97c50dd;
+const ARBITRARY7: u64 = 0x3f84d5b5b5470917;
+const ARBITRARY8: u64 = 0x9216d5d98979fb1b;
+const ARBITRARY9: u64 = 0xd1310ba698dfb5ac;
+
+#[inline(always)]
+const fn folded_multiply(x: u64, y: u64) -> u64 {
+    #[cfg(target_pointer_width = "64")]
+    {
+        // We compute the full u64 x u64 -> u128 product, this is a single mul
+        // instruction on x86-64, one mul plus one mulhi on ARM64.
+        let full = (x as u128) * (y as u128);
+        let lo = full as u64;
+        let hi = (full >> 64) as u64;
+
+        // The middle bits of the full product fluctuate the most with small
+        // changes in the input. This is the top bits of lo and the bottom bits
+        // of hi. We can thus make the entire output fluctuate with small
+        // changes to the input by XOR'ing these two halves.
+        lo ^ hi
+    }
+
+    #[cfg(target_pointer_width = "32")]
+    {
+        // u64 x u64 -> u128 product is prohibitively expensive on 32-bit.
+        // Decompose into 32-bit parts.
+        let lx = x as u32;
+        let ly = y as u32;
+        let hx = (x >> 32) as u32;
+        let hy = (y >> 32) as u32;
+
+        // u32 x u32 -> u64 the low bits of one with the high bits of the other.
+        let afull = (lx as u64) * (hy as u64);
+        let bfull = (hx as u64) * (ly as u64);
+
+        // Combine, swapping low/high of one of them so the upper bits of the
+        // product of one combine with the lower bits of the other.
+        afull ^ bfull.rotate_right(32)
+    }
+}
+
+/// The foldhash implementation optimized for speed.
+pub mod fast {
+    use super::*;
+
+    pub use seed::fast::{FixedState, RandomState};
+
+    /// A [`Hasher`] instance implementing foldhash, optimized for speed.
+    ///
+    /// It can't be created directly, see [`RandomState`] or [`FixedState`].
+    #[derive(Clone)]
+    pub struct FoldHasher {
+        accumulator: u64,
+        sponge: u128,
+        sponge_len: u8,
+        fold_seed: u64,
+        expand_seed: u64,
+        expand_seed2: u64,
+        expand_seed3: u64,
+    }
+
+    impl FoldHasher {
+        pub(crate) fn with_seed(per_hasher_seed: u64, global_seed: &[u64; 4]) -> FoldHasher {
+            FoldHasher {
+                accumulator: per_hasher_seed,
+                sponge: 0,
+                sponge_len: 0,
+                fold_seed: global_seed[0],
+                expand_seed: global_seed[1],
+                expand_seed2: global_seed[2],
+                expand_seed3: global_seed[3],
+            }
+        }
+
+        #[inline(always)]
+        fn write_num<T: Into<u128>>(&mut self, x: T) {
+            let bits: usize = 8 * core::mem::size_of::<T>();
+            if self.sponge_len as usize + bits > 128 {
+                let lo = self.sponge as u64;
+                let hi = (self.sponge >> 64) as u64;
+                self.accumulator = folded_multiply(lo ^ self.accumulator, hi ^ self.fold_seed);
+                self.sponge = x.into();
+                self.sponge_len = 0;
+            } else {
+                self.sponge |= x.into() << self.sponge_len;
+                self.sponge_len += bits as u8;
+            }
+        }
+    }
+
+    impl Hasher for FoldHasher {
+        #[inline(always)]
+        fn write(&mut self, bytes: &[u8]) {
+            let mut s0 = self.accumulator;
+            let mut s1 = self.expand_seed;
+            let len = bytes.len();
+            if len <= 16 {
+                // XOR the input into s0, s1, then multiply and fold.
+                if len >= 8 {
+                    s0 ^= u64::from_ne_bytes(bytes[0..8].try_into().unwrap());
+                    s1 ^= u64::from_ne_bytes(bytes[len - 8..].try_into().unwrap());
+                } else if len >= 4 {
+                    s0 ^= u32::from_ne_bytes(bytes[0..4].try_into().unwrap()) as u64;
+                    s1 ^= u32::from_ne_bytes(bytes[len - 4..].try_into().unwrap()) as u64;
+                } else if len > 0 {
+                    let lo = bytes[0];
+                    let mid = bytes[len / 2];
+                    let hi = bytes[len - 1];
+                    s0 ^= lo as u64;
+                    s1 ^= ((hi as u64) << 8) | mid as u64;
+                }
+                self.accumulator = folded_multiply(s0, s1);
+            } else if len < 256 {
+                self.accumulator = hash_bytes_medium(bytes, s0, s1, self.fold_seed);
+            } else {
+                self.accumulator = hash_bytes_long(
+                    bytes,
+                    s0,
+                    s1,
+                    self.expand_seed2,
+                    self.expand_seed3,
+                    self.fold_seed,
+                );
+            }
+        }
+
+        #[inline(always)]
+        fn write_u8(&mut self, i: u8) {
+            self.write_num(i);
+        }
+
+        #[inline(always)]
+        fn write_u16(&mut self, i: u16) {
+            self.write_num(i);
+        }
+
+        #[inline(always)]
+        fn write_u32(&mut self, i: u32) {
+            self.write_num(i);
+        }
+
+        #[inline(always)]
+        fn write_u64(&mut self, i: u64) {
+            self.write_num(i);
+        }
+
+        #[inline(always)]
+        fn write_u128(&mut self, i: u128) {
+            let lo = i as u64;
+            let hi = (i >> 64) as u64;
+            self.accumulator = folded_multiply(lo ^ self.accumulator, hi ^ self.fold_seed);
+        }
+
+        #[inline(always)]
+        fn write_usize(&mut self, i: usize) {
+            // u128 doesn't implement From<usize>.
+            #[cfg(target_pointer_width = "32")]
+            self.write_num(i as u32);
+            #[cfg(target_pointer_width = "64")]
+            self.write_num(i as u64);
+        }
+
+        #[inline(always)]
+        fn finish(&self) -> u64 {
+            if self.sponge_len > 0 {
+                let lo = self.sponge as u64;
+                let hi = (self.sponge >> 64) as u64;
+                folded_multiply(lo ^ self.accumulator, hi ^ self.fold_seed)
+            } else {
+                self.accumulator
+            }
+        }
+    }
+}
+
+/// The foldhash implementation optimized for quality.
+pub mod quality {
+    use super::*;
+
+    pub use seed::quality::{FixedState, RandomState};
+
+    /// A [`Hasher`] instance implementing foldhash, optimized for quality.
+    ///
+    /// It can't be created directly, see [`RandomState`] or [`FixedState`].
+    #[derive(Clone)]
+    pub struct FoldHasher {
+        pub(crate) inner: fast::FoldHasher,
+    }
+
+    impl Hasher for FoldHasher {
+        #[inline(always)]
+        fn write(&mut self, bytes: &[u8]) {
+            self.inner.write(bytes);
+        }
+
+        #[inline(always)]
+        fn write_u8(&mut self, i: u8) {
+            self.inner.write_u8(i);
+        }
+
+        #[inline(always)]
+        fn write_u16(&mut self, i: u16) {
+            self.inner.write_u16(i);
+        }
+
+        #[inline(always)]
+        fn write_u32(&mut self, i: u32) {
+            self.inner.write_u32(i);
+        }
+
+        #[inline(always)]
+        fn write_u64(&mut self, i: u64) {
+            self.inner.write_u64(i);
+        }
+
+        #[inline(always)]
+        fn write_u128(&mut self, i: u128) {
+            self.inner.write_u128(i);
+        }
+
+        #[inline(always)]
+        fn write_usize(&mut self, i: usize) {
+            self.inner.write_usize(i);
+        }
+
+        #[inline(always)]
+        fn finish(&self) -> u64 {
+            folded_multiply(self.inner.finish(), ARBITRARY0)
+        }
+    }
+}
+
+/// Hashes strings >= 16 bytes, has unspecified behavior when bytes.len() < 16.
+fn hash_bytes_medium(bytes: &[u8], mut s0: u64, mut s1: u64, fold_seed: u64) -> u64 {
+    // Process 32 bytes per iteration, 16 bytes from the start, 16 bytes from
+    // the end. On the last iteration these two chunks can overlap, but that is
+    // perfectly fine.
+    let left_to_right = bytes.chunks_exact(16);
+    let mut right_to_left = bytes.rchunks_exact(16);
+    for lo in left_to_right {
+        let hi = right_to_left.next().unwrap();
+        let unconsumed_start = lo.as_ptr();
+        let unconsumed_end = hi.as_ptr_range().end;
+        if unconsumed_start >= unconsumed_end {
+            break;
+        }
+
+        let a = u64::from_ne_bytes(lo[0..8].try_into().unwrap());
+        let b = u64::from_ne_bytes(lo[8..16].try_into().unwrap());
+        let c = u64::from_ne_bytes(hi[0..8].try_into().unwrap());
+        let d = u64::from_ne_bytes(hi[8..16].try_into().unwrap());
+        s0 = folded_multiply(a ^ s0, c ^ fold_seed);
+        s1 = folded_multiply(b ^ s1, d ^ fold_seed);
+    }
+
+    s0 ^ s1
+}
+
+/// Hashes strings >= 16 bytes, has unspecified behavior when bytes.len() < 16.
+#[cold]
+#[inline(never)]
+fn hash_bytes_long(
+    bytes: &[u8],
+    mut s0: u64,
+    mut s1: u64,
+    mut s2: u64,
+    mut s3: u64,
+    fold_seed: u64,
+) -> u64 {
+    let chunks = bytes.chunks_exact(64);
+    let remainder = chunks.remainder().len();
+    for chunk in chunks {
+        let a = u64::from_ne_bytes(chunk[0..8].try_into().unwrap());
+        let b = u64::from_ne_bytes(chunk[8..16].try_into().unwrap());
+        let c = u64::from_ne_bytes(chunk[16..24].try_into().unwrap());
+        let d = u64::from_ne_bytes(chunk[24..32].try_into().unwrap());
+        let e = u64::from_ne_bytes(chunk[32..40].try_into().unwrap());
+        let f = u64::from_ne_bytes(chunk[40..48].try_into().unwrap());
+        let g = u64::from_ne_bytes(chunk[48..56].try_into().unwrap());
+        let h = u64::from_ne_bytes(chunk[56..64].try_into().unwrap());
+        s0 = folded_multiply(a ^ s0, e ^ fold_seed);
+        s1 = folded_multiply(b ^ s1, f ^ fold_seed);
+        s2 = folded_multiply(c ^ s2, g ^ fold_seed);
+        s3 = folded_multiply(d ^ s3, h ^ fold_seed);
+    }
+    s0 ^= s2;
+    s1 ^= s3;
+
+    if remainder > 0 {
+        hash_bytes_medium(&bytes[bytes.len() - remainder.max(16)..], s0, s1, fold_seed)
+    } else {
+        s0 ^ s1
+    }
+}
diff --git a/crates/foldhash/src/seed.rs b/crates/foldhash/src/seed.rs
new file mode 100644
index 0000000..f74d2a6
--- /dev/null
+++ b/crates/foldhash/src/seed.rs
@@ -0,0 +1,326 @@
+use core::hash::BuildHasher;
+
+// These constants may end up unused depending on platform support.
+#[allow(unused)]
+use crate::{ARBITRARY1, ARBITRARY9};
+
+use super::{
+    folded_multiply, ARBITRARY2, ARBITRARY3, ARBITRARY4, ARBITRARY5, ARBITRARY6, ARBITRARY7,
+    ARBITRARY8,
+};
+
+/// Used for FixedState, and RandomState if atomics for dynamic init are unavailable.
+const FIXED_GLOBAL_SEED: [u64; 4] = [ARBITRARY4, ARBITRARY5, ARBITRARY6, ARBITRARY7];
+
+pub mod fast {
+    use super::*;
+    use crate::fast::FoldHasher;
+
+    /// A [`BuildHasher`] for [`fast::FoldHasher`]s that are randomly initialized.
+    #[derive(Copy, Clone, Debug)]
+    pub struct RandomState {
+        per_hasher_seed: u64,
+        global_seed: global::GlobalSeed,
+    }
+
+    impl Default for RandomState {
+        fn default() -> Self {
+            let per_hasher_seed;
+
+            // If we have the standard library available we use a thread-local
+            // counter for the per-hasher seed.
+            #[cfg(feature = "std")]
+            {
+                use std::cell::Cell;
+                thread_local! {
+                    static PER_HASHER_NONDETERMINISM: Cell<u64> = const { Cell::new(0) };
+                }
+
+                let mut nondeterminism = PER_HASHER_NONDETERMINISM.get();
+                nondeterminism = nondeterminism.wrapping_add(ARBITRARY1 | 1); // Ensure number is odd for maximum period.
+                PER_HASHER_NONDETERMINISM.set(nondeterminism);
+                per_hasher_seed = folded_multiply(nondeterminism, ARBITRARY2);
+            };
+
+            // If we don't have the standard library we use our current stack
+            // address in combination with a global PER_HASHER_NONDETERMINISM to
+            // create a new value that is very likely to have never been used as
+            // a random state before.
+            //
+            // PER_HASHER_NONDETERMINISM is loaded and updated in a racy manner,
+            // but this doesn't matter in practice - it is impossible that two
+            // different threads have the same stack location, so they'll almost
+            // surely generate different seeds, and provide a different possible
+            // update for PER_HASHER_NONDETERMINISM. If we would use a proper
+            // fetch_add atomic update then there is a larger chance of
+            // problematic contention.
+            //
+            // We use usize instead of 64-bit atomics for best platform support.
+            #[cfg(not(feature = "std"))]
+            {
+                use core::sync::atomic::{AtomicUsize, Ordering};
+                static PER_HASHER_NONDETERMINISM: AtomicUsize = AtomicUsize::new(0);
+
+                let nondeterminism = PER_HASHER_NONDETERMINISM.load(Ordering::Relaxed) as u64;
+                let stack_ptr = &nondeterminism as *const _ as u64;
+                per_hasher_seed = folded_multiply(nondeterminism ^ stack_ptr, ARBITRARY2);
+                PER_HASHER_NONDETERMINISM.store(per_hasher_seed as usize, Ordering::Relaxed);
+            }
+
+            Self {
+                per_hasher_seed,
+                global_seed: global::GlobalSeed::new(),
+            }
+        }
+    }
+
+    impl BuildHasher for RandomState {
+        type Hasher = FoldHasher;
+
+        fn build_hasher(&self) -> FoldHasher {
+            FoldHasher::with_seed(self.per_hasher_seed, self.global_seed.get())
+        }
+    }
+
+    /// A [`BuildHasher`] for [`fast::FoldHasher`]s that all have the same fixed seed.
+    ///
+    /// Not recommended unless you absolutely need determinism.
+    #[derive(Copy, Clone, Debug)]
+    pub struct FixedState {
+        per_hasher_seed: u64,
+    }
+
+    impl FixedState {
+        /// Creates a [`FixedState`] with the given seed.
+        pub const fn with_seed(seed: u64) -> Self {
+            // XOR with ARBITRARY3 such that with_seed(0) matches default.
+            Self {
+                per_hasher_seed: seed ^ ARBITRARY3,
+            }
+        }
+    }
+
+    impl Default for FixedState {
+        fn default() -> Self {
+            Self {
+                per_hasher_seed: ARBITRARY3,
+            }
+        }
+    }
+
+    impl BuildHasher for FixedState {
+        type Hasher = FoldHasher;
+
+        fn build_hasher(&self) -> FoldHasher {
+            FoldHasher::with_seed(self.per_hasher_seed, &FIXED_GLOBAL_SEED)
+        }
+    }
+}
+
+pub mod quality {
+    use super::*;
+    use crate::quality::FoldHasher;
+
+    /// A [`BuildHasher`] for [`quality::FoldHasher`]s that are randomly initialized.
+    #[derive(Copy, Clone, Default, Debug)]
+    pub struct RandomState {
+        inner: fast::RandomState,
+    }
+
+    impl BuildHasher for RandomState {
+        type Hasher = FoldHasher;
+
+        fn build_hasher(&self) -> FoldHasher {
+            FoldHasher {
+                inner: self.inner.build_hasher(),
+            }
+        }
+    }
+
+    /// A [`BuildHasher`] for [`quality::FoldHasher`]s that all have the same fixed seed.
+    ///
+    /// Not recommended unless you absolutely need determinism.
+    #[derive(Copy, Clone, Default, Debug)]
+    pub struct FixedState {
+        inner: fast::FixedState,
+    }
+
+    impl FixedState {
+        /// Creates a [`FixedState`] with the given seed.
+        pub const fn with_seed(seed: u64) -> Self {
+            Self {
+                // We do an additional folded multiply with the seed here for
+                // the quality hash to ensure better independence between seed
+                // and hash. If the seed is zero the folded multiply is zero,
+                // preserving with_seed(0) == default().
+                inner: fast::FixedState::with_seed(folded_multiply(seed, ARBITRARY8)),
+            }
+        }
+    }
+
+    impl BuildHasher for FixedState {
+        type Hasher = FoldHasher;
+
+        fn build_hasher(&self) -> FoldHasher {
+            FoldHasher {
+                inner: self.inner.build_hasher(),
+            }
+        }
+    }
+}
+
+#[cfg(target_has_atomic = "8")]
+mod global {
+    use super::*;
+    use core::cell::UnsafeCell;
+    use core::sync::atomic::{AtomicU8, Ordering};
+
+    fn generate_global_seed() -> [u64; 4] {
+        let mix = |seed: u64, x: u64| folded_multiply(seed ^ x, ARBITRARY9);
+
+        // Use address space layout randomization as our main randomness source.
+        // This isn't great, but we don't advertise HashDoS resistance in the first
+        // place. This is a whole lot better than nothing, at near zero cost with
+        // no dependencies.
+        let mut seed = 0;
+        let stack_ptr = &seed as *const _;
+        let func_ptr = generate_global_seed;
+        let static_ptr = &GLOBAL_SEED_STORAGE as *const _;
+        seed = mix(seed, stack_ptr as usize as u64);
+        seed = mix(seed, func_ptr as usize as u64);
+        seed = mix(seed, static_ptr as usize as u64);
+
+        // If we have the standard library available, augment entropy with the
+        // current time and an address from the allocator.
+        #[cfg(feature = "std")]
+        {
+            #[cfg(not(all(target_family = "wasm", target_os = "unknown")))]
+            if let Ok(duration) = std::time::UNIX_EPOCH.elapsed() {
+                seed = mix(seed, duration.subsec_nanos() as u64);
+                seed = mix(seed, duration.as_secs());
+            }
+
+            let box_ptr = &*Box::new(0u8) as *const _;
+            seed = mix(seed, box_ptr as usize as u64);
+        }
+
+        let seed_a = mix(seed, 0);
+        let seed_b = mix(mix(mix(seed_a, 0), 0), 0);
+        let seed_c = mix(mix(mix(seed_b, 0), 0), 0);
+        let seed_d = mix(mix(mix(seed_c, 0), 0), 0);
+
+        // Zeroes form a weak-point for the multiply-mix, and zeroes tend to be
+        // a common input. So we want our global seeds that are XOR'ed with the
+        // input to always be non-zero. To also ensure there is always a good spread
+        // of bits, we give up 3 bits of entropy and simply force some bits on.
+        const FORCED_ONES: u64 = (1 << 63) | (1 << 31) | 1;
+        [
+            seed_a | FORCED_ONES,
+            seed_b | FORCED_ONES,
+            seed_c | FORCED_ONES,
+            seed_d | FORCED_ONES,
+        ]
+    }
+
+    // Now all the below code purely exists to cache the above seed as
+    // efficiently as possible. Even if we weren't a no_std crate and had access to
+    // OnceLock, we don't want to check whether the global is set each time we
+    // hash an object, so we hand-roll a global storage where type safety allows us
+    // to assume the storage is initialized after construction.
+    struct GlobalSeedStorage {
+        state: AtomicU8,
+        seed: UnsafeCell<[u64; 4]>,
+    }
+
+    const UNINIT: u8 = 0;
+    const LOCKED: u8 = 1;
+    const INIT: u8 = 2;
+
+    // SAFETY: we only mutate the UnsafeCells when state is in the thread-exclusive
+    // LOCKED state, and only read the UnsafeCells when state is in the
+    // once-achieved-eternally-preserved state INIT.
+    unsafe impl Sync for GlobalSeedStorage {}
+
+    static GLOBAL_SEED_STORAGE: GlobalSeedStorage = GlobalSeedStorage {
+        state: AtomicU8::new(UNINIT),
+        seed: UnsafeCell::new([0; 4]),
+    };
+
+    /// An object representing an initialized global seed.
+    ///
+    /// Does not actually store the seed inside itself, it is a zero-sized type.
+    /// This prevents inflating the RandomState size and in turn HashMap's size.
+    #[derive(Copy, Clone, Debug)]
+    pub struct GlobalSeed {
+        // So we can't accidentally type GlobalSeed { } within this crate.
+        _no_accidental_unsafe_init: (),
+    }
+
+    impl GlobalSeed {
+        #[inline(always)]
+        pub fn new() -> Self {
+            if GLOBAL_SEED_STORAGE.state.load(Ordering::Acquire) != INIT {
+                Self::init_slow()
+            }
+            Self {
+                _no_accidental_unsafe_init: (),
+            }
+        }
+
+        #[cold]
+        #[inline(never)]
+        fn init_slow() {
+            // Generate seed outside of critical section.
+            let seed = generate_global_seed();
+
+            loop {
+                match GLOBAL_SEED_STORAGE.state.compare_exchange_weak(
+                    UNINIT,
+                    LOCKED,
+                    Ordering::Relaxed,
+                    Ordering::Acquire,
+                ) {
+                    Ok(_) => unsafe {
+                        // SAFETY: we just acquired an exclusive lock.
+                        *GLOBAL_SEED_STORAGE.seed.get() = seed;
+                        GLOBAL_SEED_STORAGE.state.store(INIT, Ordering::Release);
+                        return;
+                    },
+
+                    Err(INIT) => return,
+
+                    // Yes, it's a spin loop. We need to support no_std (so no easy
+                    // access to proper locks), this is a one-time-per-program
+                    // initialization, and the critical section is only a few
+                    // store instructions, so it'll be fine.
+                    _ => core::hint::spin_loop(),
+                }
+            }
+        }
+
+        #[inline(always)]
+        pub fn get(self) -> &'static [u64; 4] {
+            // SAFETY: our constructor ensured we are in the INIT state and thus
+            // this raw read does not race with any write.
+            unsafe { &*GLOBAL_SEED_STORAGE.seed.get() }
+        }
+    }
+}
+
+#[cfg(not(target_has_atomic = "8"))]
+mod global {
+    #[derive(Copy, Clone, Debug)]
+    pub struct GlobalSeed {}
+
+    impl GlobalSeed {
+        #[inline(always)]
+        pub fn new() -> Self {
+            Self {}
+        }
+
+        #[inline(always)]
+        pub fn get(self) -> &'static [u64; 4] {
+            &super::FIXED_GLOBAL_SEED
+        }
+    }
+}
diff --git a/crates/is-terminal/.android-checksum.json b/crates/is-terminal/.android-checksum.json
new file mode 100644
index 0000000..c0af31d
--- /dev/null
+++ b/crates/is-terminal/.android-checksum.json
@@ -0,0 +1 @@
+{"package":null,"files":{"Android.bp":"6fd44479f255937aa3b2f50be3e251e95334bc6e58e914454a18d4c8bf1e4b20","METADATA":"fdcf71f09a8bdb2e392429f79900dc7065e7688a8ac9f55278b18212ae814740","LICENSE":"38620a3cfaeec97a9197e8c39e436ea7f0bc86699b1f1c35f1aa41785b6d4eac","Cargo.toml":"96ec1b8a3a1c1a5f425f3926710d64d9135d1c39cccb6ca2376e22c224d29f61","cargo_embargo.json":"0be745f01ba4955b20f2cb5011168c4d8cd396af75c007035ec693c67b17bce7","LICENSE-MIT":"38620a3cfaeec97a9197e8c39e436ea7f0bc86699b1f1c35f1aa41785b6d4eac",".cargo-checksum.json":"c4e8b5b0f8ec2ccaa361807c38b211ec45c5d9fa6a0e66345115e38166f0ebf6","MODULE_LICENSE_MIT":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","README.md":"1f03823c5bb2a51b7a7cbdde6feaf8992142cf1caecddece7999bf63871f9f8b","LICENSE-MIT-atty":"f8460b39b385b3cbe2fbfd924304813edbf06a23cb47c82d1f11eca2a005e2e8","src/lib.rs":"27f1619877e628e89e3940a6e0191a0f71d523d0221a2a43d235aa8cb278a55d"}}
\ No newline at end of file
diff --git a/crates/is-terminal/.cargo-checksum.json b/crates/is-terminal/.cargo-checksum.json
new file mode 100644
index 0000000..5dc3cd3
--- /dev/null
+++ b/crates/is-terminal/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.toml":"3a8bd9008b23bdb268b539caf758a2706d91f80e970ac422c4e6f7ebdd27e690","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","LICENSE-MIT-atty":"bab426a663ce3d5bbbcea9cdc300da74e94d76c3c79e46699a953f967a08e533","README.md":"144ede7d2e02bae955556ba940e05d75c2e0f8398944ad585bd3e7c4ac55f612","src/lib.rs":"12d723fa1aca4d9a0679b0be8052b2d04256e2c642379c96d31516f2ad145559"},"package":"261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b"}
\ No newline at end of file
diff --git a/crates/is-terminal/Android.bp b/crates/is-terminal/Android.bp
new file mode 100644
index 0000000..38d09b5
--- /dev/null
+++ b/crates/is-terminal/Android.bp
@@ -0,0 +1,31 @@
+// This file is generated by cargo_embargo.
+// Do not modify this file because the changes will be overridden on upgrade.
+
+package {
+    default_applicable_licenses: ["external_rust_crates_is-terminal_license"],
+    default_team: "trendy_team_android_rust",
+}
+
+license {
+    name: "external_rust_crates_is-terminal_license",
+    visibility: [":__subpackages__"],
+    license_kinds: ["SPDX-license-identifier-MIT"],
+    license_text: ["LICENSE"],
+}
+
+rust_library {
+    name: "libis_terminal",
+    host_supported: true,
+    crate_name: "is_terminal",
+    cargo_env_compat: true,
+    cargo_pkg_version: "0.4.13",
+    crate_root: "src/lib.rs",
+    edition: "2018",
+    rustlibs: ["liblibc"],
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+    product_available: true,
+    vendor_available: true,
+}
diff --git a/crates/is-terminal/Cargo.toml b/crates/is-terminal/Cargo.toml
new file mode 100644
index 0000000..43beb95
--- /dev/null
+++ b/crates/is-terminal/Cargo.toml
@@ -0,0 +1,79 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+rust-version = "1.63"
+name = "is-terminal"
+version = "0.4.13"
+authors = [
+    "softprops <[email protected]>",
+    "Dan Gohman <[email protected]>",
+]
+build = false
+include = [
+    "src",
+    "build.rs",
+    "Cargo.toml",
+    "COPYRIGHT",
+    "LICENSE*",
+    "/*.md",
+]
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+description = "Test whether a given stream is a terminal"
+documentation = "https://docs.rs/is-terminal"
+readme = "README.md"
+keywords = [
+    "terminal",
+    "tty",
+    "isatty",
+]
+categories = ["command-line-interface"]
+license = "MIT"
+repository = "https://github.com/sunfishcode/is-terminal"
+
+[lib]
+name = "is_terminal"
+path = "src/lib.rs"
+
+[dev-dependencies.atty]
+version = "0.2.14"
+
+[target.'cfg(any(unix, target_os = "wasi"))'.dependencies.libc]
+version = "0.2"
+
+[target.'cfg(any(unix, target_os = "wasi"))'.dev-dependencies.libc]
+version = "0.2.110"
+
+[target.'cfg(any(unix, target_os = "wasi"))'.dev-dependencies.rustix]
+version = "0.38.0"
+features = ["termios"]
+
+[target.'cfg(not(any(windows, target_os = "hermit", target_os = "unknown")))'.dev-dependencies.rustix]
+version = "0.38.0"
+features = ["stdio"]
+
+[target.'cfg(target_os = "hermit")'.dependencies.hermit-abi]
+version = "0.4.0"
+
+[target."cfg(windows)".dependencies.windows-sys]
+version = "0.52.0"
+features = [
+    "Win32_Foundation",
+    "Win32_Storage_FileSystem",
+    "Win32_System_Console",
+]
+
+[target."cfg(windows)".dev-dependencies.tempfile]
+version = "3"
diff --git a/crates/is-terminal/LICENSE b/crates/is-terminal/LICENSE
new file mode 120000
index 0000000..7f9a88e
--- /dev/null
+++ b/crates/is-terminal/LICENSE
@@ -0,0 +1 @@
+LICENSE-MIT
\ No newline at end of file
diff --git a/crates/is-terminal/LICENSE-MIT b/crates/is-terminal/LICENSE-MIT
new file mode 100644
index 0000000..31aa793
--- /dev/null
+++ b/crates/is-terminal/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/crates/is-terminal/LICENSE-MIT-atty b/crates/is-terminal/LICENSE-MIT-atty
new file mode 100644
index 0000000..b2319d8
--- /dev/null
+++ b/crates/is-terminal/LICENSE-MIT-atty
@@ -0,0 +1,23 @@
+Portions of this project are derived from atty, which bears the following
+copyright notice and permission notice:
+
+Copyright (c) 2015-2019 Doug Tangren
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/crates/is-terminal/METADATA b/crates/is-terminal/METADATA
new file mode 100644
index 0000000..746135c
--- /dev/null
+++ b/crates/is-terminal/METADATA
@@ -0,0 +1,17 @@
+name: "is-terminal"
+description: "Test whether a given stream is a terminal"
+third_party {
+  version: "0.4.13"
+  license_type: NOTICE
+  last_upgrade_date {
+    year: 2024
+    month: 12
+    day: 18
+  }
+  homepage: "https://crates.io/crates/is-terminal"
+  identifier {
+    type: "Archive"
+    value: "https://static.crates.io/crates/is-terminal/is-terminal-0.4.13.crate"
+    version: "0.4.13"
+  }
+}
diff --git a/crates/is-terminal/MODULE_LICENSE_MIT b/crates/is-terminal/MODULE_LICENSE_MIT
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/crates/is-terminal/MODULE_LICENSE_MIT
diff --git a/crates/is-terminal/README.md b/crates/is-terminal/README.md
new file mode 100644
index 0000000..0d9b4aa
--- /dev/null
+++ b/crates/is-terminal/README.md
@@ -0,0 +1,116 @@
+<div align="center">
+  <h1><code>is-terminal</code></h1>
+
+  <p>
+    <strong>Test whether a given stream is a terminal</strong>
+  </p>
+
+  <p>
+    <a href="https://github.com/sunfishcode/is-terminal/actions?query=workflow%3ACI"><img src="https://github.com/sunfishcode/is-terminal/workflows/CI/badge.svg" alt="Github Actions CI Status" /></a>
+    <a href="https://crates.io/crates/is-terminal"><img src="https://img.shields.io/crates/v/is-terminal.svg" alt="crates.io page" /></a>
+    <a href="https://docs.rs/is-terminal"><img src="https://docs.rs/is-terminal/badge.svg" alt="docs.rs docs" /></a>
+  </p>
+</div>
+
+As of Rust 1.70, most users should use the [`IsTerminal`] trait in the Rust
+standard library instead of this crate.
+
+On Unix platforms, this crate now uses libc, so that the implementation
+matches what's in std. Users wishing to use the rustix-based implementation
+can use the [rustix-is-terminal] crate instead.
+
+[rustix-is-terminal]: https://crates.io/crates/rustix-is-terminal
+
+<hr>
+
+is-terminal is a simple utility that answers one question:
+
+> Is this a terminal?
+
+A "terminal", also known as a "tty", is an I/O device which may be interactive
+and may support color and other special features. This crate doesn't provide
+any of those features; it just answers this one question.
+
+On Unix-family platforms, this is effectively the same as the [`isatty`]
+function for testing whether a given stream is a terminal, though it accepts
+high-level stream types instead of raw file descriptors.
+
+On Windows, it uses a variety of techniques to determine whether the given
+stream is a terminal.
+
+This crate is derived from [the atty crate] with [PR \#51] bug fix and
+[PR \#54] port to windows-sys applied. The only additional difference is that
+the atty crate only accepts stdin, stdout, or stderr, while this crate accepts
+any stream. In particular, this crate does not access any stream that is not
+passed to it, in accordance with [I/O safety].
+
+[PR \#51]: https://github.com/softprops/atty/pull/51
+[PR \#54]: https://github.com/softprops/atty/pull/54
+
+## Example
+
+```rust
+use is_terminal::IsTerminal;
+
+fn main() {
+    if std::io::stdout().is_terminal() {
+        println!("Stdout is a terminal");
+    } else {
+        println!("Stdout is not a terminal");
+    }
+}
+```
+
+## Testing
+
+This library is tested on both Unix-family and Windows platforms.
+
+To test it on a platform manually, use the provided `stdio` example program.
+When run normally, it prints this:
+
+```bash
+$ cargo run --example stdio
+stdin? true
+stdout? true
+stderr? true
+```
+
+To test stdin, pipe some text to the program:
+
+```bash
+$ cat | cargo run --example stdio
+stdin? false
+stdout? true
+stderr? true
+```
+
+To test stdout, pipe the program to something:
+
+```bash
+$ cargo run --example stdio | cat
+stdin? true
+stdout? false
+stderr? true
+```
+
+To test stderr, pipe the program to something redirecting stderr:
+
+```bash
+$ cargo run --example stdio 2>&1 | cat
+stdin? true
+stdout? false
+stderr? false
+```
+
+# Minimum Supported Rust Version (MSRV)
+
+This crate currently works on the version of [Rust on Debian stable], which is
+currently Rust 1.63. This policy may change in the future, in minor version
+releases, so users using a fixed version of Rust should pin to a specific
+version of this crate.
+
+[`isatty`]: https://man7.org/linux/man-pages/man3/isatty.3.html
+[the atty crate]: https://crates.io/crates/atty
+[I/O safety]: https://github.com/rust-lang/rfcs/blob/master/text/3128-io-safety.md
+[Rust on Debian stable]: https://packages.debian.org/stable/rust/rustc
+[`IsTerminal`]: https://doc.rust-lang.org/stable/std/io/trait.IsTerminal.html
diff --git a/crates/is-terminal/cargo_embargo.json b/crates/is-terminal/cargo_embargo.json
new file mode 100644
index 0000000..9e26dfe
--- /dev/null
+++ b/crates/is-terminal/cargo_embargo.json
@@ -0,0 +1 @@
+{}
\ No newline at end of file
diff --git a/crates/is-terminal/src/lib.rs b/crates/is-terminal/src/lib.rs
new file mode 100644
index 0000000..3ed4764
--- /dev/null
+++ b/crates/is-terminal/src/lib.rs
@@ -0,0 +1,382 @@
+//! is-terminal is a simple utility that answers one question:
+//!
+//! > Is this a terminal?
+//!
+//! A "terminal", also known as a "tty", is an I/O device which may be
+//! interactive and may support color and other special features. This crate
+//! doesn't provide any of those features; it just answers this one question.
+//!
+//! On Unix-family platforms, this is effectively the same as the [`isatty`]
+//! function for testing whether a given stream is a terminal, though it
+//! accepts high-level stream types instead of raw file descriptors.
+//!
+//! On Windows, it uses a variety of techniques to determine whether the
+//! given stream is a terminal.
+//!
+//! # Example
+//!
+//! ```rust
+//! use is_terminal::IsTerminal;
+//!
+//! if std::io::stdout().is_terminal() {
+//!     println!("stdout is a terminal")
+//! }
+//! ```
+//!
+//! [`isatty`]: https://man7.org/linux/man-pages/man3/isatty.3.html
+
+#![cfg_attr(
+    not(any(
+        unix,
+        windows,
+        target_os = "wasi",
+        target_os = "hermit",
+        target_os = "unknown"
+    )),
+    no_std
+)]
+
+#[cfg(target_os = "hermit")]
+use std::os::hermit::io::AsFd;
+#[cfg(unix)]
+use std::os::unix::io::{AsFd, AsRawFd};
+#[cfg(target_os = "wasi")]
+use std::os::wasi::io::{AsFd, AsRawFd};
+#[cfg(windows)]
+use std::os::windows::io::{AsHandle, AsRawHandle, BorrowedHandle};
+#[cfg(windows)]
+use windows_sys::Win32::Foundation::HANDLE;
+
+/// Extension trait to check whether something is a terminal.
+pub trait IsTerminal {
+    /// Returns true if this is a terminal.
+    ///
+    /// # Example
+    ///
+    /// ```
+    /// use is_terminal::IsTerminal;
+    ///
+    /// if std::io::stdout().is_terminal() {
+    ///     println!("stdout is a terminal")
+    /// }
+    /// ```
+    fn is_terminal(&self) -> bool;
+}
+
+/// Returns `true` if `this` is a terminal.
+///
+/// This is equivalent to calling `this.is_terminal()` and exists only as a
+/// convenience to calling the trait method [`IsTerminal::is_terminal`]
+/// without importing the trait.
+///
+/// # Example
+///
+/// ```
+/// if is_terminal::is_terminal(&std::io::stdout()) {
+///     println!("stdout is a terminal")
+/// }
+/// ```
+pub fn is_terminal<T: IsTerminal>(this: T) -> bool {
+    this.is_terminal()
+}
+
+#[cfg(not(any(windows, target_os = "unknown")))]
+impl<Stream: AsFd> IsTerminal for Stream {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        #[cfg(any(unix, target_os = "wasi"))]
+        {
+            let fd = self.as_fd();
+            unsafe { libc::isatty(fd.as_raw_fd()) != 0 }
+        }
+
+        #[cfg(target_os = "hermit")]
+        {
+            use std::os::hermit::io::AsRawFd;
+            hermit_abi::isatty(self.as_fd().as_fd().as_raw_fd())
+        }
+    }
+}
+
+#[cfg(windows)]
+impl<Stream: AsHandle> IsTerminal for Stream {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        handle_is_console(self.as_handle())
+    }
+}
+
+// The Windows implementation here is copied from `handle_is_console` in
+// library/std/src/sys/pal/windows/io.rs in Rust at revision
+// e74c667a53c6368579867a74494e6fb7a7f17d13.
+
+#[cfg(windows)]
+fn handle_is_console(handle: BorrowedHandle<'_>) -> bool {
+    use windows_sys::Win32::System::Console::GetConsoleMode;
+
+    let handle = handle.as_raw_handle();
+
+    unsafe {
+        // A null handle means the process has no console.
+        if handle.is_null() {
+            return false;
+        }
+
+        let mut out = 0;
+        if GetConsoleMode(handle as HANDLE, &mut out) != 0 {
+            // False positives aren't possible. If we got a console then we definitely have a console.
+            return true;
+        }
+
+        // Otherwise, we fall back to an msys hack to see if we can detect the presence of a pty.
+        msys_tty_on(handle as HANDLE)
+    }
+}
+
+/// Returns true if there is an MSYS tty on the given handle.
+#[cfg(windows)]
+unsafe fn msys_tty_on(handle: HANDLE) -> bool {
+    use std::ffi::c_void;
+    use windows_sys::Win32::{
+        Foundation::MAX_PATH,
+        Storage::FileSystem::{
+            FileNameInfo, GetFileInformationByHandleEx, GetFileType, FILE_TYPE_PIPE,
+        },
+    };
+
+    // Early return if the handle is not a pipe.
+    if GetFileType(handle) != FILE_TYPE_PIPE {
+        return false;
+    }
+
+    /// Mirrors windows_sys::Win32::Storage::FileSystem::FILE_NAME_INFO, giving
+    /// it a fixed length that we can stack allocate
+    #[repr(C)]
+    #[allow(non_snake_case)]
+    struct FILE_NAME_INFO {
+        FileNameLength: u32,
+        FileName: [u16; MAX_PATH as usize],
+    }
+    let mut name_info = FILE_NAME_INFO {
+        FileNameLength: 0,
+        FileName: [0; MAX_PATH as usize],
+    };
+    // Safety: buffer length is fixed.
+    let res = GetFileInformationByHandleEx(
+        handle,
+        FileNameInfo,
+        &mut name_info as *mut _ as *mut c_void,
+        std::mem::size_of::<FILE_NAME_INFO>() as u32,
+    );
+    if res == 0 {
+        return false;
+    }
+
+    // Use `get` because `FileNameLength` can be out of range.
+    let s = match name_info
+        .FileName
+        .get(..name_info.FileNameLength as usize / 2)
+    {
+        None => return false,
+        Some(s) => s,
+    };
+    let name = String::from_utf16_lossy(s);
+    // Get the file name only.
+    let name = name.rsplit('\\').next().unwrap_or(&name);
+    // This checks whether 'pty' exists in the file name, which indicates that
+    // a pseudo-terminal is attached. To mitigate against false positives
+    // (e.g., an actual file name that contains 'pty'), we also require that
+    // the file name begins with either the strings 'msys-' or 'cygwin-'.)
+    let is_msys = name.starts_with("msys-") || name.starts_with("cygwin-");
+    let is_pty = name.contains("-pty");
+    is_msys && is_pty
+}
+
+#[cfg(target_os = "unknown")]
+impl IsTerminal for std::io::Stdin {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(target_os = "unknown")]
+impl IsTerminal for std::io::Stdout {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(target_os = "unknown")]
+impl IsTerminal for std::io::Stderr {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(target_os = "unknown")]
+impl<'a> IsTerminal for std::io::StdinLock<'a> {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(target_os = "unknown")]
+impl<'a> IsTerminal for std::io::StdoutLock<'a> {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(target_os = "unknown")]
+impl<'a> IsTerminal for std::io::StderrLock<'a> {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(target_os = "unknown")]
+impl<'a> IsTerminal for std::fs::File {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(target_os = "unknown")]
+impl IsTerminal for std::process::ChildStdin {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(target_os = "unknown")]
+impl IsTerminal for std::process::ChildStdout {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(target_os = "unknown")]
+impl IsTerminal for std::process::ChildStderr {
+    #[inline]
+    fn is_terminal(&self) -> bool {
+        false
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    #[cfg(not(target_os = "unknown"))]
+    use super::IsTerminal;
+
+    #[test]
+    #[cfg(windows)]
+    fn stdin() {
+        assert_eq!(
+            atty::is(atty::Stream::Stdin),
+            std::io::stdin().is_terminal()
+        )
+    }
+
+    #[test]
+    #[cfg(windows)]
+    fn stdout() {
+        assert_eq!(
+            atty::is(atty::Stream::Stdout),
+            std::io::stdout().is_terminal()
+        )
+    }
+
+    #[test]
+    #[cfg(windows)]
+    fn stderr() {
+        assert_eq!(
+            atty::is(atty::Stream::Stderr),
+            std::io::stderr().is_terminal()
+        )
+    }
+
+    #[test]
+    #[cfg(any(unix, target_os = "wasi"))]
+    fn stdin() {
+        assert_eq!(
+            atty::is(atty::Stream::Stdin),
+            rustix::stdio::stdin().is_terminal()
+        )
+    }
+
+    #[test]
+    #[cfg(any(unix, target_os = "wasi"))]
+    fn stdout() {
+        assert_eq!(
+            atty::is(atty::Stream::Stdout),
+            rustix::stdio::stdout().is_terminal()
+        )
+    }
+
+    #[test]
+    #[cfg(any(unix, target_os = "wasi"))]
+    fn stderr() {
+        assert_eq!(
+            atty::is(atty::Stream::Stderr),
+            rustix::stdio::stderr().is_terminal()
+        )
+    }
+
+    #[test]
+    #[cfg(any(unix, target_os = "wasi"))]
+    fn stdin_vs_libc() {
+        unsafe {
+            assert_eq!(
+                libc::isatty(libc::STDIN_FILENO) != 0,
+                rustix::stdio::stdin().is_terminal()
+            )
+        }
+    }
+
+    #[test]
+    #[cfg(any(unix, target_os = "wasi"))]
+    fn stdout_vs_libc() {
+        unsafe {
+            assert_eq!(
+                libc::isatty(libc::STDOUT_FILENO) != 0,
+                rustix::stdio::stdout().is_terminal()
+            )
+        }
+    }
+
+    #[test]
+    #[cfg(any(unix, target_os = "wasi"))]
+    fn stderr_vs_libc() {
+        unsafe {
+            assert_eq!(
+                libc::isatty(libc::STDERR_FILENO) != 0,
+                rustix::stdio::stderr().is_terminal()
+            )
+        }
+    }
+
+    // Verify that the msys_tty_on function works with long path.
+    #[test]
+    #[cfg(windows)]
+    fn msys_tty_on_path_length() {
+        use std::{fs::File, os::windows::io::AsRawHandle};
+        use windows_sys::Win32::Foundation::MAX_PATH;
+
+        let dir = tempfile::tempdir().expect("Unable to create temporary directory");
+        let file_path = dir.path().join("ten_chars_".repeat(25));
+        // Ensure that the path is longer than MAX_PATH.
+        assert!(file_path.to_string_lossy().len() > MAX_PATH as usize);
+        let file = File::create(file_path).expect("Unable to create file");
+
+        assert!(!unsafe { crate::msys_tty_on(file.as_raw_handle() as isize) });
+    }
+}
diff --git a/crates/textdistance/.android-checksum.json b/crates/textdistance/.android-checksum.json
new file mode 100644
index 0000000..ece193f
--- /dev/null
+++ b/crates/textdistance/.android-checksum.json
@@ -0,0 +1 @@
+{"package":null,"files":{"Taskfile.yaml":"eb60c88c5ea196cf976df722209c43d7a92de7c036b6746053e0c9aae9d7c783","src/algorithms/sift4_common.rs":"ed6ef5de7b7dbd1f15f68dfb1bdceee14a6e5806877b0fdf0c3a2a04e198085d","src/algorithms/ratcliff_obershelp.rs":"49282fa9f0936d58ee1c6ac9e7e5ff45ba864eb1a22695a82032d6c8aad15f04","Cargo.toml":"2331a78976d39e9c0d882a50cd72d62d886678334f4a3f9df87bbd178d1a7c12","src/algorithms/smith_waterman.rs":"5041b6ccbb2f6f8abb5b437e02e4ccf02a80354dc5aac32b0fe49447bd3abae9","src/algorithms/mlipns.rs":"bf60fb5a4aef9231de80ba13b6d03dbb04604203ae6fe84fdec941ce769721c7","src/nstr.rs":"263d0284a214b00f396a9737d3f0aa3043af61b02de8d3f7b0f11079cf08a7bc","src/algorithms/length.rs":"34d311be89ff8da511bfb98f3d42b8e8f0629898cfab1736602bd9b2460e527a","LICENSE":"decc1c7bfdb75b188bd69fcc81c1ee3b71bd2f7d6f67db68f23a428aaa1a4497","src/counter.rs":"1c597c5cb009404bf22d5b8d5ec0156efff636305247934a6782d4b85c158378","src/algorithms/bag.rs":"70087c96676dfbd05e239d7e60f9dde6e682a44404a14c5c773b082db8bd0b37","src/algorithms/hamming.rs":"10c942e5ed03fa2ddb80199370b3bb8fd1167e8ae9d6767df952a3f90474bfc4","src/algorithms/lcsseq.rs":"586ab3333a66844515048031bfe3f675c6b55e40c96107f9c37dfdb6f260bfc5","src/algorithms/jaro_winkler.rs":"2dfba19b7d75bc28699bbe37859730e038df2e2f8705afc1dcf394803b28ae41","src/algorithms/roberts.rs":"fa4d6fc74de9b79d809b1c63ab265f4b0fd594dd02ed96991af0d569a97bb25a","benches/str_benchmarks.rs":"020324785d0019674082dfca385f62a25e3b93918db489c9be17b53b1d16a3ed","README.md":"37e1f9890617702e835134a3ddef056bc1d0d7f75c20cfedca213199a3556a30","src/result.rs":"fcafb7ef94669d96e1f78c01397b695566a6a4e2c933bfae99441852d1ff218c","src/algorithms/lcsstr.rs":"817da97a27b492e550495fb7b88ad2c76ecc466d815cbb22aba609b8d5c0fcbd",".cargo-checksum.json":"3b9241683db786136d74469a4be22faf0881e1266c36a8cc1d6467ff85d66d63","src/algorithms/overlap.rs":"c27835ae21d28aa19eea71d30325172215f3b41f105be4d94897e4974fbb7095","src/algorithms/cosine.rs":"0c7211705986db034e06749b6954094e96e2bd137d4dddec1ea90f6b8580c4ea","src/algorithms/sift4_simple.rs":"fd0c5025d0a96eda2a891bea29d7358988f9a169bb4e01bfd1a6fbad46120d15","tests/integration_test.rs":"2595d2e421f62e39cb7f0c963079e38313acfa5b0173189ca350b3b44601f86a","Cargo.lock":"1104c19811a60b82753c98876aca318475182db2798432d0ada4daaeae61f2e3","src/lib.rs":"4e60e7d15bf262801dbd44ecd16424be33c12b2580592ef42580eadb3abe4211","src/str.rs":"c5217fde85d68e2519fdece2207612af336230badb56fd433eb362a305df7b8e","src/algorithm.rs":"a3295b4a8a8756f9d6e2add55041c1c8def7a2d54d4508a6479eecfc5b4519de","src/algorithms/suffix.rs":"bcda0eb35133d9c6cf3c40352654ee11c7a340a43c3954f542ad37691b76ce8a","src/main.rs":"8d9ea0112d0a2e9cc0ef53d5b760af8cfede3ec81bcae77095cee1fb14e7354b","src/algorithms/jaro.rs":"45ce234e39999d1ad24a503fd3339736c611838c7274aa95328ddec4ffeda6a6","src/algorithms/levenshtein.rs":"5d950805dcb57d4b5e7fa1ae856a37475e83da8af4dee6a2d2021f2c90326b13","tests/test_introspection.py":"9531d8e0f8ab6305ca1589d3ca56d023fe70231161a8d793c404e4af278219c6","src/algorithms/tversky.rs":"f564fc05dffc7f5e4f5d8bafa3fbc0e86188857d15faac6ddee01f802b61dc8c","src/algorithms/lig3.rs":"61ebbe3cb8dfa1e889cd32f8c08ae90151ed494ac6be67f20e9e76c12bc6f42c","src/algorithms/yujian_bo.rs":"b09605c480cfa82e9d6a7f857ab809a49f0139581e1aa54d05e2a7b11e080b91","MODULE_LICENSE_MIT":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","Android.bp":"8cffeaaf4bd70969d0e3f9145c8703280108852953bfc2ca66a6118220266092","METADATA":"40b380f76ef7851233b1e2c271bd63be14d799c844bdabe5f7d1e2f65e0cff52","src/algorithms/prefix.rs":"475c3cfad4c40bfff0ea5d05e1f706ce7b31dd59fbda1af56070981817450068","src/algorithms/jaccard.rs":"0afa9f244ace606a02706cb6cce7615275fdb42ec0b8058c288dbb220d9ea032","cargo_embargo.json":"0be745f01ba4955b20f2cb5011168c4d8cd396af75c007035ec693c67b17bce7","src/algorithms/sorensen_dice.rs":"cb6865f36a78c89afc47da11c2213e489a7c801b4e3c6183c7eb05733b13bbb0","src/algorithms/entropy_ncd.rs":"a2ddd172e0a281c8a7f99962f8657c447b9d58d12c71e36c4132f107288f5467","src/algorithms/damerau_levenshtein.rs":"1c01fa1f13653b5882fe0b8ff2e3cdf22988a3391ba77c295804169e408ce157"}}
\ No newline at end of file
diff --git a/crates/textdistance/.cargo-checksum.json b/crates/textdistance/.cargo-checksum.json
new file mode 100644
index 0000000..bd4fa57
--- /dev/null
+++ b/crates/textdistance/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"0d58cd34d2668b7d10b76111d1d7bdcfb3938a0fa038b79df3e16a461cf06201","Cargo.toml":"7f5c016090563f1ce1bbdd1940e0c718145647e63899b9a3add387149b31064d","LICENSE":"8c49711c45f3da4eb2278351a3af6a202a367e7179fe987e510a9105ac7070f3","README.md":"68025f59c08d7b5e5848c5b9e33032adcb3538530f540bcf96b2fa338cec5c6e","Taskfile.yaml":"bee63b1abab40f672da2a78e282cb002ca138f96cfdf6e623113e7275b55ae91","benches/str_benchmarks.rs":"7dff36ab31f1eba09c3007777d7fd96ff93b78d0784b37cbb8aa1111107c3221","src/algorithm.rs":"40952f7bd1ba6e56d01d265664360aa25345b88ad3e3c7e4db46ed22b79f3d46","src/algorithms/bag.rs":"0c4eca8ff992d043dc28a78b89cc68f9a9294958385a72db134a4577bebeeb6d","src/algorithms/cosine.rs":"c420ca3182cee90e946b1da0d351a15394b4eb64954a6f917ff5aec06e64378a","src/algorithms/damerau_levenshtein.rs":"15288d6e6ecf6398b2acd493b330ebf60c8b45549d38624f68781de73b920947","src/algorithms/entropy_ncd.rs":"cb9150bbfb3a4175b1fdf79e5528daaca293dadd1ea9f8dac952a50ab8e58d2b","src/algorithms/hamming.rs":"891352d083ce868bd2f591a2fc5dcd16e6f5faad9e4daa2966a80c48e4ae2a81","src/algorithms/jaccard.rs":"4f58225562c3ce6c208c4d0b734dabdbd54066cd57f74c40e4eeb12f0fbc7fff","src/algorithms/jaro.rs":"ea8b2101fc4b9e7df3558aa5e47200c7019eb35d443de313ad201121ae532565","src/algorithms/jaro_winkler.rs":"b6db5da0394fb6ae50ac94e558e6b7369494839b67a6f24bca0617cffb32fa52","src/algorithms/lcsseq.rs":"779091dc3c7db3bc217ed51ec3af7ebeb02f72d70f0ac885954f21e896e3ad92","src/algorithms/lcsstr.rs":"5facc429e8e13e91c1c8e6a98be9cb61d91cc953db4e25e4de08696d938885a0","src/algorithms/length.rs":"e3360f0042e03722772a20d01a57f3393387f4333a565620752779cb3f447aa9","src/algorithms/levenshtein.rs":"cedf96e31f1d9ec71565d1fe6b67a328a5090e81cf6cbba6d1a83b8c69c500be","src/algorithms/lig3.rs":"933972e840e152e86057b8943f1ed4f8ee894766ad4372475226bfb4e6f95392","src/algorithms/mlipns.rs":"cbf725e7c6d8f777f6793e670391cd02f8dd9dc329cc3141a97826ede9bc4fbf","src/algorithms/overlap.rs":"796102e337dc612a91542e18fe47d789874378157e91cbbd58ab799aebcca135","src/algorithms/prefix.rs":"ee6a5537cdf7e55d2c2f28fba58ebfc93682c81af759efc54573e52a9a1543b5","src/algorithms/ratcliff_obershelp.rs":"9c7338cc6ccc8a2c7cfb9c1dbffd4bd0f2a43e1937550f13376ae81cf3ce68e5","src/algorithms/roberts.rs":"8b77608d60240ebc369da6d1c8e63be590834e33146a48fd3816ea808c5d44e0","src/algorithms/sift4_common.rs":"e84dd71c8cb15036e9adcaafbe6e0c536181a900d37af2b804e885e01e51d080","src/algorithms/sift4_simple.rs":"f46ed194079ba4307255277df1b99a2a3967db5583167609080fe7a1a76831aa","src/algorithms/smith_waterman.rs":"7ef055c533dfcefa84af7deb674bf5fa44da355dc01453090c8aec10106eca9d","src/algorithms/sorensen_dice.rs":"16fae1eda1a20c6f7798326fbdcb39469168a4d8136599a5c279e92b4b1bbf67","src/algorithms/suffix.rs":"0e6385612a295ebb3b3fe4cdb1c843818668c4881aadab943725ebc910b7b176","src/algorithms/tversky.rs":"5f26bc196af475e3025b0e3817f148183fcfefdb53164e57a680e9fb6442c058","src/algorithms/yujian_bo.rs":"5e418a8290606ac5fc38f51fe90ce85dffe0f8ccd90ca2e9e40d62322e7f6583","src/counter.rs":"5be794606196e32e88fce49a4870082256e237ca42cb3a9f424636e76ad86e4f","src/lib.rs":"b7976fb05704d837f8f505d9d5556eadb96fbe1e314e39777eb301836a9daad1","src/main.rs":"1fe5b1fe31eee0c4fa1c20220c90e73b18cc1c78c02a1f703007a5996e3f0834","src/nstr.rs":"18f263d2e22286b62e4a51d53784b257eb0fd3f8801d26ee85fdeac782b6b233","src/result.rs":"12dafc9813dc698c20cd605aa5c49467ffec129bb540425a64ed1acdfa5d70d2","src/str.rs":"e797cd9a8f5ceeed08c07bf455486dc237022fabf80e45d7d32ac98be2ff46ef","tests/integration_test.rs":"58195c90cfdc70c8b47916f60d9f3c077ab0b255671bb09bc0999fd814b9e385","tests/test_introspection.py":"3b268b1900abf05ba3f2f6e6c3fcddec509f9d5877375cf8a7a5b794c4244658"},"package":"aa672c55ab69f787dbc9126cc387dbe57fdd595f585e4524cf89018fa44ab819"}
\ No newline at end of file
diff --git a/crates/textdistance/Android.bp b/crates/textdistance/Android.bp
new file mode 100644
index 0000000..1e48794
--- /dev/null
+++ b/crates/textdistance/Android.bp
@@ -0,0 +1,51 @@
+// This file is generated by cargo_embargo.
+// Do not modify this file because the changes will be overridden on upgrade.
+
+package {
+    default_applicable_licenses: ["external_rust_crates_textdistance_license"],
+    default_team: "trendy_team_android_rust",
+}
+
+license {
+    name: "external_rust_crates_textdistance_license",
+    visibility: [":__subpackages__"],
+    license_kinds: ["SPDX-license-identifier-MIT"],
+    license_text: ["LICENSE"],
+}
+
+rust_library {
+    name: "libtextdistance",
+    host_supported: true,
+    crate_name: "textdistance",
+    cargo_env_compat: true,
+    cargo_pkg_version: "1.1.1",
+    crate_root: "src/lib.rs",
+    edition: "2021",
+    features: [
+        "default",
+        "std",
+    ],
+    apex_available: [
+        "//apex_available:platform",
+        "//apex_available:anyapex",
+    ],
+    product_available: true,
+    vendor_available: true,
+}
+
+rust_binary {
+    name: "textdistance",
+    host_supported: true,
+    crate_name: "textdistance",
+    cargo_env_compat: true,
+    cargo_pkg_version: "1.1.1",
+    crate_root: "src/main.rs",
+    edition: "2021",
+    features: [
+        "default",
+        "std",
+    ],
+    rustlibs: ["libtextdistance"],
+    product_available: true,
+    vendor_available: true,
+}
diff --git a/crates/textdistance/Cargo.lock b/crates/textdistance/Cargo.lock
new file mode 100644
index 0000000..b380643
--- /dev/null
+++ b/crates/textdistance/Cargo.lock
@@ -0,0 +1,1114 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "aho-corasick"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "anes"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
+
+[[package]]
+name = "anstyle"
+version = "1.0.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
+
+[[package]]
+name = "assert2"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d31fea2b6e18dfe892863c3a0a68f9e005b0195565f3d55b8612946ebca789cc"
+dependencies = [
+ "assert2-macros",
+ "diff",
+ "is-terminal",
+ "yansi",
+]
+
+[[package]]
+name = "assert2-macros"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c1ac052c642f6d94e4be0b33028b346b7ab809ea5432b584eb8859f12f7ad2c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "rustc_version",
+ "syn",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
+
+[[package]]
+name = "bit-set"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
+dependencies = [
+ "bit-vec",
+]
+
+[[package]]
+name = "bit-vec"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
+
+[[package]]
+name = "bitflags"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
+
+[[package]]
+name = "bumpalo"
+version = "3.16.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
+
+[[package]]
+name = "byteorder"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
+
+[[package]]
+name = "cast"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "ciborium"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e"
+dependencies = [
+ "ciborium-io",
+ "ciborium-ll",
+ "serde",
+]
+
+[[package]]
+name = "ciborium-io"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757"
+
+[[package]]
+name = "ciborium-ll"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9"
+dependencies = [
+ "ciborium-io",
+ "half",
+]
+
+[[package]]
+name = "clap"
+version = "4.5.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e5a21b8495e732f1b3c364c9949b201ca7bae518c502c80256c96ad79eaf6ac"
+dependencies = [
+ "clap_builder",
+]
+
+[[package]]
+name = "clap_builder"
+version = "4.5.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8cf2dd12af7a047ad9d6da2b6b249759a22a7abc0f474c1dae1777afa4b21a73"
+dependencies = [
+ "anstyle",
+ "clap_lex",
+]
+
+[[package]]
+name = "clap_lex"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97"
+
+[[package]]
+name = "criterion"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
+dependencies = [
+ "anes",
+ "cast",
+ "ciborium",
+ "clap",
+ "criterion-plot",
+ "is-terminal",
+ "itertools",
+ "num-traits",
+ "once_cell",
+ "oorandom",
+ "plotters",
+ "rayon",
+ "regex",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "tinytemplate",
+ "walkdir",
+]
+
+[[package]]
+name = "criterion-plot"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
+dependencies = [
+ "cast",
+ "itertools",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
+dependencies = [
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
+dependencies = [
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
+
+[[package]]
+name = "crunchy"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
+
+[[package]]
+name = "diff"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
+
+[[package]]
+name = "either"
+version = "1.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
+
+[[package]]
+name = "equivalent"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+
+[[package]]
+name = "errno"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "fastrand"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6"
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "futures"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
+
+[[package]]
+name = "futures-executor"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-io"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
+
+[[package]]
+name = "futures-macro"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "futures-sink"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
+
+[[package]]
+name = "futures-task"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004"
+
+[[package]]
+name = "futures-timer"
+version = "3.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24"
+
+[[package]]
+name = "futures-util"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-macro",
+ "futures-sink",
+ "futures-task",
+ "memchr",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "glob"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
+
+[[package]]
+name = "half"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888"
+dependencies = [
+ "cfg-if",
+ "crunchy",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.14.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
+
+[[package]]
+name = "hermit-abi"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc"
+
+[[package]]
+name = "indexmap"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5"
+dependencies = [
+ "equivalent",
+ "hashbrown",
+]
+
+[[package]]
+name = "is-terminal"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
+
+[[package]]
+name = "js-sys"
+version = "0.3.70"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
+
+[[package]]
+name = "libc"
+version = "0.2.158"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439"
+
+[[package]]
+name = "libm"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
+
+[[package]]
+name = "log"
+version = "0.4.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
+
+[[package]]
+name = "memchr"
+version = "2.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
+
+[[package]]
+name = "num-traits"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+dependencies = [
+ "autocfg",
+ "libm",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+
+[[package]]
+name = "oorandom"
+version = "11.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9"
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "plotters"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747"
+dependencies = [
+ "num-traits",
+ "plotters-backend",
+ "plotters-svg",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "plotters-backend"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a"
+
+[[package]]
+name = "plotters-svg"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670"
+dependencies = [
+ "plotters-backend",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
+dependencies = [
+ "zerocopy",
+]
+
+[[package]]
+name = "proc-macro-crate"
+version = "3.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b"
+dependencies = [
+ "toml_edit",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.86"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "proptest"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d"
+dependencies = [
+ "bit-set",
+ "bit-vec",
+ "bitflags",
+ "lazy_static",
+ "num-traits",
+ "rand",
+ "rand_chacha",
+ "rand_xorshift",
+ "regex-syntax",
+ "rusty-fork",
+ "tempfile",
+ "unarray",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quote"
+version = "1.0.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rand_xorshift"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "rayon"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
+dependencies = [
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
+dependencies = [
+ "crossbeam-deque",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "regex"
+version = "1.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b"
+
+[[package]]
+name = "relative-path"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2"
+
+[[package]]
+name = "rstest"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b423f0e62bdd61734b67cd21ff50871dfaeb9cc74f869dcd6af974fbcb19936"
+dependencies = [
+ "futures",
+ "futures-timer",
+ "rstest_macros",
+ "rustc_version",
+]
+
+[[package]]
+name = "rstest_macros"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c5e1711e7d14f74b12a58411c542185ef7fb7f2e7f8ee6e2940a883628522b42"
+dependencies = [
+ "cfg-if",
+ "glob",
+ "proc-macro-crate",
+ "proc-macro2",
+ "quote",
+ "regex",
+ "relative-path",
+ "rustc_version",
+ "syn",
+ "unicode-ident",
+]
+
+[[package]]
+name = "rustc_version"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92"
+dependencies = [
+ "semver",
+]
+
+[[package]]
+name = "rustix"
+version = "0.38.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f55e80d50763938498dd5ebb18647174e0c76dc38c5505294bb224624f30f36"
+dependencies = [
+ "bitflags",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "rusty-fork"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
+dependencies = [
+ "fnv",
+ "quick-error",
+ "tempfile",
+ "wait-timeout",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "semver"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
+
+[[package]]
+name = "serde"
+version = "1.0.210"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c8e3592472072e6e22e0a54d5904d9febf8508f65fb8552499a1abc7d1078c3a"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.210"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "243902eda00fad750862fc144cea25caca5e20d615af0a81bee94ca738f1df1f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.128"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ff5456707a1de34e7e37f2a6fd3d3f808c318259cbd01ab6377795054b483d8"
+dependencies = [
+ "itoa",
+ "memchr",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "syn"
+version = "2.0.77"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64"
+dependencies = [
+ "cfg-if",
+ "fastrand",
+ "once_cell",
+ "rustix",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "textdistance"
+version = "1.1.1"
+dependencies = [
+ "assert2",
+ "criterion",
+ "proptest",
+ "rstest",
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "tinytemplate"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc"
+dependencies = [
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "toml_datetime"
+version = "0.6.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41"
+
+[[package]]
+name = "toml_edit"
+version = "0.22.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d"
+dependencies = [
+ "indexmap",
+ "toml_datetime",
+ "winnow",
+]
+
+[[package]]
+name = "unarray"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
+
+[[package]]
+name = "wait-timeout"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "walkdir"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
+dependencies = [
+ "same-file",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b"
+dependencies = [
+ "bumpalo",
+ "log",
+ "once_cell",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484"
+
+[[package]]
+name = "web-sys"
+version = "0.3.70"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "winapi-util"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
+dependencies = [
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
+
+[[package]]
+name = "winnow"
+version = "0.6.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "yansi"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
+
+[[package]]
+name = "zerocopy"
+version = "0.7.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
+dependencies = [
+ "byteorder",
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.7.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
diff --git a/crates/textdistance/Cargo.toml b/crates/textdistance/Cargo.toml
new file mode 100644
index 0000000..5165d13
--- /dev/null
+++ b/crates/textdistance/Cargo.toml
@@ -0,0 +1,75 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "textdistance"
+version = "1.1.1"
+authors = ["Gram <[email protected]>"]
+build = false
+autobins = false
+autoexamples = false
+autotests = false
+autobenches = false
+description = "Lots of algorithms to compare how similar two sequences are"
+readme = "README.md"
+keywords = [
+    "jaro",
+    "hamming",
+    "levenshtein",
+    "similarity",
+    "distance",
+]
+categories = [
+    "algorithms",
+    "science",
+    "no-std",
+    "text-processing",
+    "command-line-interface",
+]
+license = "MIT"
+repository = "https://github.com/life4/textdistance.rs"
+
+[lib]
+name = "textdistance"
+path = "src/lib.rs"
+
+[[bin]]
+name = "textdistance"
+path = "src/main.rs"
+
+[[test]]
+name = "integration_test"
+path = "tests/integration_test.rs"
+
+[[bench]]
+name = "str_benchmarks"
+path = "benches/str_benchmarks.rs"
+harness = false
+
+[dev-dependencies.assert2]
+version = "0.3.15"
+
+[dev-dependencies.criterion]
+version = "0.5.1"
+
+[dev-dependencies.proptest]
+version = "1.1.0"
+
+[dev-dependencies.rstest]
+version = "0.22.0"
+
+[dev-dependencies.unicode-segmentation]
+version = "1.10.1"
+
+[features]
+default = ["std"]
+std = []
diff --git a/crates/textdistance/LICENSE b/crates/textdistance/LICENSE
new file mode 100644
index 0000000..211ae14
--- /dev/null
+++ b/crates/textdistance/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+ 2023 Gram
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/crates/textdistance/METADATA b/crates/textdistance/METADATA
new file mode 100644
index 0000000..00bd34e
--- /dev/null
+++ b/crates/textdistance/METADATA
@@ -0,0 +1,17 @@
+name: "textdistance"
+description: "Lots of algorithms to compare how similar two sequences are"
+third_party {
+  version: "1.1.1"
+  license_type: NOTICE
+  last_upgrade_date {
+    year: 2024
+    month: 12
+    day: 18
+  }
+  homepage: "https://crates.io/crates/textdistance"
+  identifier {
+    type: "Archive"
+    value: "https://static.crates.io/crates/textdistance/textdistance-1.1.1.crate"
+    version: "1.1.1"
+  }
+}
diff --git a/crates/textdistance/MODULE_LICENSE_MIT b/crates/textdistance/MODULE_LICENSE_MIT
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/crates/textdistance/MODULE_LICENSE_MIT
diff --git a/crates/textdistance/README.md b/crates/textdistance/README.md
new file mode 100644
index 0000000..d9ccb3a
--- /dev/null
+++ b/crates/textdistance/README.md
@@ -0,0 +1,247 @@
+# textdistance.rs
+
+[ [github.com](https://github.com/life4/textdistance.rs) ]
+[ [docs.rs](https://docs.rs/textdistance/) ]
+[ [crates.io](https://crates.io/crates/textdistance) ]
+
+Rust library with lots of different algorithms to compare how similar two sequences are.
+
+Features:
+
++ 💪 Based on popular and battle-tested [textdistance](https://github.com/life4/textdistance) Python library (and written by the same author).
++ 📚 Contains 20+ algorithms for all purposes.
++ 🔬 Includes state-of-the-art algorithms like `EntropyNCD` and `Sift4`.
++ 🪶 Zero-dependency.
++ 🐜 `#![no_std]` support (embedded systems).
++ 🔨 Works with any iterators, including bytes, code points, Unicode grapheme clusters, words, and numbers.
++ ❤️ Friendly and consistent API for all algorithms.
++ 📏 Optional normalization of the result on the 0.0-1.0 interval.
++ 🛡 No unsafe code.
++ 🦀 Pure Rust.
+
+## Available algorithms
+
+Edit-based:
+
+1. `DamerauLevenshtein`, both optimal string alignment and restricted.
+1. `Hamming`
+1. `Jaro`
+1. `JaroWinkler`
+1. `Levenshtein`
+1. `Sift4Common`
+1. `Sift4Simple`
+1. `SmithWaterman`
+
+Token-based:
+
+1. `Bag`
+1. `Cosine` (aka Orchini, Tucker, Otsuka–Ochiai)
+1. `EntropyNCD` (Entropy-based Normalized Compression Distance)
+1. `Jaccard` (aka Tanimoto, Critical Success Index)
+1. `Overlap` (aka Szymkiewicz–Simpson)
+1. `Roberts`
+1. `SorensenDice` (aka F1, Czekanowski, Zijdenbos)
+1. `Tversky`
+
+Sequence-based:
+
+1. `LCSSeq` (Longest Common SubSequence)
+1. `LCSStr` (Longest Common SubString)
+1. `RatcliffObershelp` (aka Gestalt pattern matching)
+
+Naive:
+
+1. `Prefix`
+1. `Suffix`
+1. `Length`
+
+Normalization for other metrics:
+
+1. `LIG3` normalization for `Hamming` by `Levenshtein`
+1. `MLIPNS` normalization for `Hamming`
+1. `YujianBo` normalization for `Levenshtein`
+
+## Installation
+
+```shell
+cargo add textdistance
+```
+
+Or if you're going to use it in a [no_std](https://docs.rust-embedded.org/book/intro/no-std.html) project:
+
+```shell
+cargo add --no-default-features textdistance
+```
+
+## Usage
+
+The `textdistance::str` module provides shortcut functions for each algorithm for calculating the distance/similarity between two strings:
+
+```rust
+use textdistance::str::damerau_levenshtein;
+assert!(damerau_levenshtein("abc", "acbd") == 2);
+```
+
+The `textdistance::nstr` module is the same but all algorithms return a normalized value (between 0.0 and 1.0):
+
+```rust
+use textdistance::nstr::damerau_levenshtein;
+assert!(damerau_levenshtein("abc", "acbd") == 2./4.);
+```
+
+For more advanced usage, each algorithm is provided as a struct implementing the `Algorithm` trait:
+
+```rust
+use textdistance::{Algorithm, DamerauLevenshtein};
+
+let a = DamerauLevenshtein::default();
+let r = a.for_str("abc", "acbd");
+assert!(r.val() == 2);
+assert!(r.nval() == 2./4.);
+```
+
+1. The `Algorithm` trait provides `for_str`, `for_vec`, and `for_iter` to calculate the result for two strings, vectors (slices), or iterators respectively. In addition, there are `for_words` and `for_bigrams` methods that split the text into words or bigrams respectively before calculating the distance.
+1. Each method returns a `textdistance::Result` that provides methods to get absolute (`val`) or normalized (`nval`) value of the metric, distance (`dist` and `ndist`), or similarity (`sim` and `nsim`).
+
+## Unicode support
+
+The `for_str` method (and so all functions in the `str` and `nstr` modules) uses `String.chars` to split the string and then runs it through the `for_iter` method. So, `é` will be considered two distinct characters ("latin small letter e" and "combining acute accent"). Usually, that's ok and this is how Python works. You can read more in [the official Rust documentation](https://doc.rust-lang.org/std/primitive.char.html#representation).
+
+If you want `é` to be considered as a single symbol, use the [unicode-segmentation](https://crates.io/crates/unicode-segmentation) crate:
+
+```rust
+use textdistance::{Algorithm, DamerauLevenshtein};
+use unicode_segmentation::UnicodeSegmentation;
+
+let s1 = "a̐éö̲\r\n";
+let s2 = "éa̐ö̲\r\n";
+let g1 = s1.graphemes(true);
+let g2 = s2.graphemes(true);
+let a = DamerauLevenshtein::default();
+let r = a.for_iter(g1, g2);
+assert!(r.val() == 1);
+```
+
+## Choosing the algorithm
+
+The algorithm to use depends on your use case. First, you need to decide on the algorithm category:
+
+1. Edit-based algorithms work well on short sequences for detecting typos and minor changes.
+1. Token-based algorithms work well on longer sequences for comparing long texts with noticeable modifications.
+1. Sequence-based algorithms work well for calculating diff size between the original and the changed version of the sequence.
+
+If you go with edit-based, the next thing is to decide what kind of changes you need to detect:
+
++ ✏️ Substitution. One character is replaced by another.
++ ➕ Addition. A new character is added.
++ 🗑 Deletion. A character is removed.
++ 🔄 Transposition. Two sequential characters are swapped.
+
+| alg                   | sub | add | del | trans |
+| --------------------- | --- | --- | --- | ----- |
+| `Hamming`             | ✅  | ❌  | ❌  | ❌    |
+| `Jaro`                | ❌  | ❌  | ❌  | ✅    |
+| `JaroWinkler`         | ❌  | ❌  | ❌  | ✅    |
+| `Sift4`               | ❌  | ❌  | ❌  | ✅    |
+| `Levenshtein`         | ✅  | ✅  | ✅  | ❌    |
+| `DamerauLevenshtein`  | ✅  | ✅  | ✅  | ✅    |
+
++ `Hamming` is the fastest one but detects only substitutions.
++ `Sift4` is very fast but not as well-known and battle-tested as other algorithms.
++ `Jaro` is slower than `Sift4` but well-known and battle-tested.
++ `JaroWinkler` is like `Jaro` but gives more weight to strings with a matching prefix.
++ `Levenshtein` detects everything but transpositions and faster than `DamerauLevenshtein` (but slower than other algorithms).
++ `DamerauLevenshtein` ticks all the boxes but very slow.
+
+There are some use cases:
+
++ `DamerauLevenshtein` with some optimizations is [used in cargo](https://github.com/rust-lang/cargo/blob/master/src/cargo/util/edit_distance.rs) to correct typos in command names.
++ `Jaro` is included in the Elixir standard library ([String.jaro_distance](https://hexdocs.pm/elixir/1.12/String.html#jaro_distance/2)). It is used by the compiler and by mix (cargo for Elixir) to provide the "did you mean?" functionality for typos in module or command names.
++ `RatcliffObershelp` variation is included in the Python standard library ([difflib.SequenceMatcher](https://docs.python.org/3/library/difflib.html#difflib.SequenceMatcher)).
+
+## Benchmarks
+
+Legend:
+
++ 🐌 is very slow (> 5 ms)
++ 🐢 is slow (> 1 ms)
++ 🐇 is fast (> 500 µs)
++ 🐎 is very fast (< 500 µs)
+
+Edit-based (and their normalizations):
+
+| algorithm          | time         |
+| ------------------ | ------------ |
+| hamming            | 🐎 19.203 µs |
+| mlipns             | 🐎 20.625 µs |
+| sift4_simple       | 🐎 143.69 µs |
+| sift4_common       | 🐎 238.86 µs |
+| jaro               | 🐢 1.7148 ms |
+| jaro_winkler       | 🐢 1.7174 ms |
+| levenshtein        | 🐢 4.5999 ms |
+| yujian_bo          | 🐢 4.6044 ms |
+| lig3               | 🐌 6.5563 ms |
+| smith_waterman     | 🐌 9.5146 ms |
+| damerau_levenshtein_restricted | 🐌 10.301 ms |
+| damerau_levenshtein | 🐌 41.938 ms |
+
+Token-based:
+
+| algorithm          | time         |
+| ------------------ | ------------ |
+| cosine             | 🐇 508.59 µs |
+| sorensen_dice      | 🐇 510.75 µs |
+| tversky            | 🐇 512.41 µs |
+| overlap            | 🐇 513.76 µs |
+| bag                | 🐇 523.06 µs |
+| jaccard            | 🐇 580.79 µs |
+| roberts            | 🐇 714.79 µs |
+| entropy_ncd        | 🐇 731.68 µs |
+
+Sequence-based:
+
+| algorithm          | time         |
+| ------------------ | ------------ |
+| lcsstr             | 🐢 3.2658 ms |
+| lcsseq             | 🐌 7.4349 ms |
+| ratcliff_obershelp | 🐌 36.308 ms |
+
+Naive:
+
+| algorithm          | time         |
+| ------------------ | ------------ |
+| length             | 🐎 2.5300 µs |
+| prefix             | 🐎 22.473 µs |
+| suffix             | 🐎 38.821 µs |
+
+The benchmarks are powered by [criterion](https://github.com/bheisler/criterion.rs) and live in the [benches](./benches/) directory. They are quite simple: grab 10 [open-source licenses](https://github.com/github/choosealicense.com/tree/gh-pages/_licenses), take a 200 chars prefix from each, and cross-compare these prefixes. The numbers might be very different for a different kind of input, length of the input, when comparing words rather than characters, or running the benchmarks on a different machine. The goal of these benchmarks is to provide basic guidance rather than give a definitive answer. If performance is critical for your application, I encourage you to make your benchmarks on the real data you have.
+
+## Versioning
+
+We stick to [SemVer](https://semver.org/):
+
+1. The **patch** number is for bug fixes. The results of an algorithm may change in some corner cases if we found that the previous implementation doesn't match the algorithm described in the original paper.
+1. The **minor** number is for new algorithms and features.
+1. The **major** number is for big changes in the API. We try to avoid breaking stuff but we prefer to provide a friendly and convenient API over keeping a backward compatibility.
+
+## Limitations
+
++ In the original textdisance, most of the algorithms are adjusted to work on any number of the input sequences. However, Rust doesn't support variadic arguments, so all algorithms currently are implemented only for exactly two inputs.
++ All algorithms in the crate implement the same `Algorithm` trait. Hence metrics that have additional limitations on the input sequence elements beyond `Eq` (like Editex and MRA that work only with ASCII letters) currently cannot be implemented.
++ Most of the implemented algorithms have certain properties (like [commutative property](https://en.wikipedia.org/wiki/Commutative_property)) that make their behavior more like what you would expect and make normalization simple. So, I haven't implemented yet Needleman-Wunsch and Gotoh, mostly because they are tricky to normalize and I'm still not 100% sure that I did it correctly in the original textdistance.
+
+## Acknowledgments
+
+There are the libraries that I used as a reference implementation and the source of test cases:
+
++ 🐍 Python: [textdistance](https://github.com/life4/textdistance), [abydos](https://github.com/chrislit/abydos), [jellyfish](https://github.com/jamesturk/jellyfish).
++ ☕️ JS: [talisman](https://github.com/Yomguithereal/talisman).
++ 🦀 Rust: [strsim](https://github.com/dguo/strsim-rs), [distance](https://github.com/mbrlabs/distance), [levenshtein-rs](https://github.com/wooorm/levenshtein-rs).
+
+Specials thanks to [Trevor Gross](https://github.com/tgross35) for transferring to me the ownership of the [textdistance](https://crates.io/crates/textdistance) name on crates.io.
+
+## Testing locally
+
+To run everything locally, all you need is Rust, Python, and [task](https://taskfile.dev/installation/). Execute `task all` to run all code formatters, linters, and tests.
+
+Thank you ❤️
diff --git a/crates/textdistance/Taskfile.yaml b/crates/textdistance/Taskfile.yaml
new file mode 100644
index 0000000..0708c96
--- /dev/null
+++ b/crates/textdistance/Taskfile.yaml
@@ -0,0 +1,99 @@
+# https://taskfile.dev
+version: "3"
+
+vars:
+  CARGO_BIN: ~/.cargo/bin/
+
+tasks:
+  install-nextest:
+    status:
+      - test -f {{.CARGO_BIN}}/cargo-nextest
+    cmds:
+      - curl -LsSf https://get.nexte.st/latest/linux | tar zxf - -C {{.CARGO_BIN}}
+
+  install-pytest:
+    status:
+      - which pytest
+    cmds:
+      - python3 -m pip install pytest
+
+  clone-licenses:
+    status:
+      - test -d choosealicense.com
+    cmds:
+      - git clone --depth 1 https://github.com/github/choosealicense.com.git
+
+  check:
+    cmds:
+      - cargo check --all {{.CLI_ARGS}}
+
+  format:
+    cmds:
+      - cargo fmt --all {{.CLI_ARGS}}
+
+  lint:
+    cmds:
+      - >
+        cargo clippy
+        --examples --tests --benches --bins --lib --workspace
+        -- -D clippy::pedantic -D clippy::dbg-macro -D warnings
+
+  doc:
+    env:
+      RUSTDOCFLAGS: "-Dwarnings"
+    cmds:
+      - cargo doc {{.CLI_ARGS}}
+
+  pytest:
+    deps:
+      - install-pytest
+    cmds:
+      - pytest {{.CLI_ARGS}} tests/
+
+  nextest:
+    deps:
+      - install-nextest
+    env:
+      CLICOLOR_FORCE: "yes"
+    cmds:
+      - cargo nextest run --no-fail-fast {{.CLI_ARGS}}
+      - cargo build --no-default-features
+
+  doctest:
+    cmds:
+      - cargo test --doc
+
+  bench:
+    desc: "run benchmarks"
+    deps:
+      - clone-licenses
+    cmds:
+      - cargo bench {{.CLI_ARGS}}
+
+  release:
+    desc: "build and upload a new release"
+    cmds:
+      - which gh
+      - test {{.CLI_ARGS}}
+      - cat Cargo.toml | grep -F 'version = "{{.CLI_ARGS}}"'
+      - cargo publish
+      - git tag {{.CLI_ARGS}}
+      - git push
+      - git push --tags
+      - gh release create --generate-notes {{.CLI_ARGS}}
+
+  test:
+    desc: "run all tests"
+    cmds:
+      - task: pytest
+      - task: nextest
+      - task: doctest
+
+  all:
+    desc: "run all code formatters, linters, and tests"
+    cmds:
+      - task: format
+      - task: check
+      - task: lint
+      - task: doc
+      - task: test
diff --git a/crates/textdistance/benches/str_benchmarks.rs b/crates/textdistance/benches/str_benchmarks.rs
new file mode 100644
index 0000000..d247a5e
--- /dev/null
+++ b/crates/textdistance/benches/str_benchmarks.rs
@@ -0,0 +1,94 @@
+use core::time::Duration;
+use criterion::BenchmarkId;
+use criterion::{black_box, criterion_group, criterion_main, Criterion};
+use std::fs;
+use textdistance::{nstr, str};
+
+fn read_licenses() -> Vec<(String, String)> {
+    let mut licenses: Vec<(String, String)> = Vec::new();
+    let dir = fs::read_dir("choosealicense.com/_licenses").unwrap();
+    let mut i = 0;
+    for lfile in dir {
+        let lpath = lfile.unwrap();
+        let ltext = fs::read_to_string(lpath.path()).unwrap();
+        let lname = lpath.file_name().to_str().unwrap().to_owned();
+        // shorten the text to speed up benchmarks run
+        let ltext = ltext[1..200].to_string();
+        licenses.push((lname, ltext));
+
+        // take only a subset of licenses to speed up benchmarks run
+        i += 1;
+        if i == 10 {
+            break;
+        }
+    }
+    licenses
+}
+
+type AlgFn = dyn Fn(&str, &str) -> f64;
+
+fn criterion_benchmark(c: &mut Criterion) {
+    benchmark_nstr(c);
+}
+
+fn benchmark_nstr(c: &mut Criterion) {
+    let licenses = read_licenses();
+    let mut group = c.benchmark_group("nstr");
+    group.sample_size(10);
+    group.measurement_time(Duration::new(3, 0));
+    group.warm_up_time(Duration::new(1, 0));
+    // group.sampling_mode(criterion::SamplingMode::Flat);
+
+    let algs: Vec<(&str, Box<AlgFn>)> = vec![
+        ("bag", Box::new(nstr::bag)),
+        ("cosine", Box::new(nstr::cosine)),
+        ("damerau_levenshtein", Box::new(nstr::damerau_levenshtein)),
+        (
+            "damerau_levenshtein_restricted",
+            Box::new(nstr::damerau_levenshtein_restricted),
+        ),
+        ("entropy_ncd", Box::new(nstr::entropy_ncd)),
+        ("hamming", Box::new(nstr::hamming)),
+        ("jaccard", Box::new(nstr::jaccard)),
+        ("jaro_winkler", Box::new(nstr::jaro_winkler)),
+        ("jaro", Box::new(nstr::jaro)),
+        ("lcsseq", Box::new(nstr::lcsseq)),
+        ("lcsstr", Box::new(nstr::lcsstr)),
+        ("length", Box::new(nstr::length)),
+        ("levenshtein", Box::new(nstr::levenshtein)),
+        ("lig3", Box::new(nstr::lig3)),
+        ("mlipns", Box::new(nstr::mlipns)),
+        ("overlap", Box::new(nstr::overlap)),
+        ("prefix", Box::new(nstr::prefix)),
+        ("ratcliff_obershelp", Box::new(nstr::ratcliff_obershelp)),
+        ("roberts", Box::new(nstr::roberts)),
+        ("sift4_common", Box::new(nstr::sift4_common)),
+        ("sift4_simple", Box::new(nstr::sift4_simple)),
+        ("smith_waterman", Box::new(nstr::smith_waterman)),
+        ("sorensen_dice", Box::new(nstr::sorensen_dice)),
+        ("suffix", Box::new(nstr::suffix)),
+        ("tversky", Box::new(nstr::tversky)),
+        ("yujian_bo", Box::new(nstr::yujian_bo)),
+    ];
+
+    for (alg_name, alg_fn) in algs {
+        group.bench_with_input(
+            BenchmarkId::from_parameter(alg_name),
+            &licenses,
+            |b, licenses| {
+                b.iter(|| {
+                    for (_, l1) in licenses {
+                        for (_, l2) in licenses {
+                            let s1 = black_box(l1);
+                            let s2 = black_box(l2);
+                            alg_fn(s1, s2);
+                        }
+                    }
+                });
+            },
+        );
+    }
+}
+
+criterion_group!(benches, criterion_benchmark);
+criterion_main!(benches);
diff --git a/crates/textdistance/cargo_embargo.json b/crates/textdistance/cargo_embargo.json
new file mode 100644
index 0000000..9e26dfe
--- /dev/null
+++ b/crates/textdistance/cargo_embargo.json
@@ -0,0 +1 @@
+{}
\ No newline at end of file
diff --git a/crates/textdistance/src/algorithm.rs b/crates/textdistance/src/algorithm.rs
new file mode 100644
index 0000000..fd780a9
--- /dev/null
+++ b/crates/textdistance/src/algorithm.rs
@@ -0,0 +1,138 @@
+use super::Result;
+use alloc::vec::Vec;
+use core::hash::Hash;
+
+/// A base trait for all distance/similarity algorithms.
+///
+///     use textdistance::{Algorithm, Hamming};
+///     let h = Hamming::default();
+///     let res = h.for_str("abc", "acbd");
+///     assert!(res.val() == 3);
+///
+pub trait Algorithm<R> {
+    /// Calculate distance/similarity for iterators.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_iter(1..4, 1..6);
+    ///     assert!(res.val() == 2);
+    ///
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<R>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + Hash,
+    {
+        let s1: Vec<E> = s1.collect();
+        let s2: Vec<E> = s2.collect();
+        self.for_vec(&s1, &s2)
+    }
+
+    /// Calculate distance/similarity for vectors.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_vec(&vec![1, 2, 3], &vec![1, 3, 2, 4]);
+    ///     assert!(res.val() == 3);
+    ///
+    fn for_vec<E>(&self, s1: &[E], s2: &[E]) -> Result<R>
+    where
+        E: Eq + Hash,
+    {
+        self.for_iter(s1.iter(), s2.iter())
+    }
+
+    /// Calculate distance/similarity for strings.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_str("abc", "acbd");
+    ///     assert!(res.val() == 3);
+    ///
+    fn for_str(&self, s1: &str, s2: &str) -> Result<R> {
+        self.for_iter(s1.chars(), s2.chars())
+    }
+
+    /// Calculate distance/similarity for words in strings.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_words("the first edition", "the second edition");
+    ///     assert!(res.val() == 1);
+    ///
+    fn for_words(&self, s1: &str, s2: &str) -> Result<R> {
+        self.for_iter(s1.split_whitespace(), s2.split_whitespace())
+    }
+
+    /// Calculate distance/similarity for bigrams in strings.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_str("abd", "abcd");
+    ///     assert!(res.val() == 2); // 3 bigrams (ab, bc, cd), only "ab" matches
+    ///
+    fn for_bigrams(&self, s1: &str, s2: &str) -> Result<R> {
+        self.for_iter(bigrams(s1), bigrams(s2))
+    }
+}
+
+fn bigrams(s: &str) -> impl Iterator<Item = (char, char)> + '_ {
+    s.chars().zip(s.chars().skip(1))
+}
+
+#[cfg(test)]
+mod tests {
+    use super::Algorithm;
+    use crate::Hamming;
+    use assert2::assert;
+    // use proptest::prelude::*;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case(vec![], vec![], 0)]
+    #[case(vec![1], vec![1], 0)]
+    #[case(vec![1], vec![5], 1)]
+    #[case(vec![3], vec![5], 1)]
+    #[case(vec![3, 4, 5, 6], vec![1, 4, 5, 6, 7], 2)]
+    fn for_vec(#[case] s1: Vec<usize>, #[case] s2: Vec<usize>, #[case] exp: usize) {
+        let h = Hamming::default();
+        assert!(h.for_vec(&s1, &s2).val() == exp);
+    }
+
+    #[rstest]
+    #[case("", "", 0)]
+    #[case("", "\0", 1)]
+    #[case("", "abc", 3)]
+    #[case("abc", "", 3)]
+    #[case("sitting", "sitting", 0)]
+    #[case("abcdefg", "hijklmn", 7)]
+    #[case("karolin", "kathrin", 3)]
+    #[case("hello", "world", 4)]
+    fn for_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        let h = Hamming::default();
+        assert!(h.for_str(s1, s2).val() == exp);
+    }
+
+    #[rstest]
+    #[case("", "", 0)]
+    #[case("", "\0", 1)]
+    #[case("", "abc", 1)]
+    #[case("abc", "", 1)]
+    #[case("oh hi mark", "oh hi world", 1)]
+    #[case("oh hi mark", "oh hi mad world", 2)]
+    #[case("oh hi mark", "greeting you mad world", 4)]
+    fn for_words(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        let h = Hamming::default();
+        assert!(h.for_words(s1, s2).val() == exp);
+    }
+
+    #[rstest]
+    #[case("", "", 0)]
+    // #[case("", "a", 1)]
+    #[case("", "abc", 2)]
+    #[case("abc", "", 2)]
+    #[case("oh hi mark", "oh ho mark", 2)]
+    fn for_bigrams(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        let h = Hamming::default();
+        assert!(h.for_bigrams(s1, s2).val() == exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/bag.rs b/crates/textdistance/src/algorithms/bag.rs
new file mode 100644
index 0000000..2b3f798
--- /dev/null
+++ b/crates/textdistance/src/algorithms/bag.rs
@@ -0,0 +1,57 @@
+//! Bag distance
+#![cfg(feature = "std")]
+use crate::counter::Counter;
+use crate::{Algorithm, Result};
+
+/// [Bag distance] is how many max items there are in one sequence that aren't in the other.
+///
+/// [Bag distance]: http://www-db.disi.unibo.it/research/papers/SPIRE02.pdf
+#[derive(Default)]
+pub struct Bag {}
+
+impl Algorithm<usize> for Bag {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<usize>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        let c1 = Counter::from_iter(s1);
+        let c2 = Counter::from_iter(s2);
+        let d1 = c1.diff_count(&c2);
+        let d2 = c2.diff_count(&c1);
+        let l1 = c1.count();
+        let l2 = c2.count();
+
+        Result {
+            abs: d1.max(d2),
+            is_distance: true,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::bag;
+    use assert2::assert;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 0)]
+    // parity with textdistance
+    #[case("qwe", "qwe", 0)]
+    #[case("qwe", "erty", 3)]
+    #[case("qwe", "ewq", 0)]
+    #[case("qwe", "rtys", 4)]
+    // parity with talisman
+    #[case("cat", "hat", 1)]
+    #[case("Niall", "Neil", 2)]
+    #[case("aluminum", "Catalan", 5)]
+    #[case("ATCG", "TAGC", 0)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        let act = bag(s1, s2);
+        assert!(act == exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/cosine.rs b/crates/textdistance/src/algorithms/cosine.rs
new file mode 100644
index 0000000..c4b7c63
--- /dev/null
+++ b/crates/textdistance/src/algorithms/cosine.rs
@@ -0,0 +1,65 @@
+//! Cosine similarity
+#![cfg(feature = "std")]
+use crate::counter::Counter;
+use crate::{Algorithm, Result};
+
+/// [Cosine similarity] is the cosine of the angle between two vectors.
+///
+/// This is how many symbols the given strings have in common
+/// divided by the square root of the product of the strings' lengths.
+///
+/// [Cosine similarity]: https://en.wikipedia.org/wiki/Cosine_similarity
+#[derive(Default)]
+pub struct Cosine {}
+
+impl Algorithm<f64> for Cosine {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<f64>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        let c1 = Counter::from_iter(s1);
+        let c2 = Counter::from_iter(s2);
+        let n1 = c1.count();
+        let n2 = c2.count();
+        let res = match (n1, n2) {
+            (0, 0) => 1.,
+            (_, 0) | (0, _) => 0.,
+            (_, _) => {
+                let ic = c1.intersect_count(&c2);
+                ic as f64 / ((n1 * n2) as f64).sqrt()
+            }
+        };
+        Result {
+            abs: res,
+            is_distance: false,
+            max: 1.,
+            len1: c1.count(),
+            len2: c2.count(),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::cosine;
+    use assert2::assert;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    #[case("", "", 1.)]
+    #[case("nelson", "", 0.)]
+    #[case("", "neilsen", 0.)]
+    // parity with textdistance
+    #[case("test", "text", 3. / 4.)]
+    #[case("nelson", "neilsen", 0.771516)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = cosine(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "cosine({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/damerau_levenshtein.rs b/crates/textdistance/src/algorithms/damerau_levenshtein.rs
new file mode 100644
index 0000000..4408005
--- /dev/null
+++ b/crates/textdistance/src/algorithms/damerau_levenshtein.rs
@@ -0,0 +1,241 @@
+//! Damerau-Levenshtein distance
+#![cfg(feature = "std")]
+use crate::{Algorithm, Result};
+use alloc::vec;
+use alloc::vec::Vec;
+use core::hash::Hash;
+use std::collections::HashMap;
+
+/// [Damerau-Levenshtein distance] is an edit distance between two sequences.
+///
+/// It is an improved version of [Levenshtein](crate::Levenshtein) that also includes
+/// transpositions.
+///
+/// It is the minimum number of operations (consisting of insertions, deletions or
+/// substitutions of a single character, or transposition of two adjacent characters)
+/// required to change one text into the other.
+///
+/// [Damerau-Levenshtein distance]: https://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance
+pub struct DamerauLevenshtein {
+    /// If false (default), allow adjacent transpositions.
+    pub restricted: bool,
+
+    /// The cost of removing a character.
+    pub del_cost: usize,
+
+    /// The cost of adding a new character.
+    pub ins_cost: usize,
+
+    /// The cost of replacing a character with another one.
+    pub sub_cost: usize,
+
+    /// The cost of swapping two adjacent characters.
+    pub trans_cost: usize,
+}
+
+impl Default for DamerauLevenshtein {
+    fn default() -> Self {
+        Self {
+            restricted: false,
+            del_cost: 1,
+            ins_cost: 1,
+            sub_cost: 1,
+            trans_cost: 1,
+        }
+    }
+}
+
+impl DamerauLevenshtein {
+    fn get_unrestricted<E: Eq + Hash>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        let l1 = s1.len();
+        let l2 = s2.len();
+        let max_dist = l2 + l1;
+
+        let mut mat: Vec<Vec<usize>> = vec![vec![0; l2 + 2]; l1 + 2];
+        mat[0][0] = max_dist;
+        for i in 0..=l1 {
+            mat[i + 1][0] = max_dist;
+            mat[i + 1][1] = i;
+        }
+        for i in 0..=l2 {
+            mat[0][i + 1] = max_dist;
+            mat[1][i + 1] = i;
+        }
+
+        let mut char_map: HashMap<&E, usize> = HashMap::new();
+        for (i1, c1) in s1.iter().enumerate() {
+            let mut db = 0;
+            let i1 = i1 + 1;
+
+            for (i2, c2) in s2.iter().enumerate() {
+                let i2 = i2 + 1;
+                let last = *char_map.get(&c2).unwrap_or(&0);
+
+                let sub_cost = if c1 == c2 { 0 } else { self.sub_cost };
+                mat[i1 + 1][i2 + 1] = min4(
+                    mat[i1][i2] + sub_cost,                                    // substitution
+                    mat[i1 + 1][i2] + self.del_cost,                           // deletion
+                    mat[i1][i2 + 1] + self.ins_cost,                           // insertion
+                    mat[last][db] + i1 + i2 - 2 + self.trans_cost - last - db, // transposition
+                );
+
+                if c1 == c2 {
+                    db = i2;
+                }
+            }
+
+            char_map.insert(c1, i1);
+        }
+
+        Result {
+            is_distance: true,
+            abs: mat[l1 + 1][l2 + 1],
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+
+    #[allow(clippy::needless_range_loop)]
+    fn get_restricted<E: Eq + Hash>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        let l1 = s1.len();
+        let l2 = s2.len();
+
+        let mut mat: Vec<Vec<usize>> = vec![vec![0; l2 + 2]; l1 + 2];
+        for i in 0..=l1 {
+            mat[i][0] = i;
+        }
+        for i in 0..=l2 {
+            mat[0][i] = i;
+        }
+
+        for (i1, c1) in s1.iter().enumerate() {
+            for (i2, c2) in s2.iter().enumerate() {
+                let sub_cost = if c1 == c2 { 0 } else { self.sub_cost };
+                mat[i1 + 1][i2 + 1] = min3(
+                    mat[i1][i2 + 1] + self.del_cost, // deletion
+                    mat[i1 + 1][i2] + self.ins_cost, // insertion
+                    mat[i1][i2] + sub_cost,          // substitution
+                );
+
+                // transposition
+                if i1 == 0 || i2 == 0 {
+                    continue;
+                };
+                if c1 != &s2[i2 - 1] {
+                    continue;
+                };
+                if &s1[i1 - 1] != c2 {
+                    continue;
+                };
+                let trans_cost = if c1 == c2 { 0 } else { self.trans_cost };
+                mat[i1 + 1][i2 + 1] = mat[i1 + 1][i2 + 1].min(mat[i1 - 1][i2 - 1] + trans_cost);
+            }
+        }
+
+        Result {
+            is_distance: true,
+            abs: mat[l1][l2],
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+impl Algorithm<usize> for DamerauLevenshtein {
+    fn for_vec<E: Eq + Hash>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        if self.restricted {
+            self.get_restricted(s1, s2)
+        } else {
+            self.get_unrestricted(s1, s2)
+        }
+    }
+}
+
+fn min4(a: usize, b: usize, c: usize, d: usize) -> usize {
+    a.min(b).min(c).min(d)
+}
+
+fn min3(a: usize, b: usize, c: usize) -> usize {
+    a.min(b).min(c)
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::str::{damerau_levenshtein, damerau_levenshtein_restricted};
+    use assert2::assert;
+    use proptest::prelude::*;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 0)]
+    #[case("", "\0", 1)]
+    #[case("", "abc", 3)]
+    #[case("abc", "", 3)]
+    #[case("hannah", "hannha", 1)]
+    #[case("FOO", "BOR", 2)]
+    #[case("BAR", "BOR", 1)]
+    #[case("hansi", "hasni", 1)]
+    #[case("zzaabbio", "zzababoi", 2)]
+    #[case("zzaabb", "zzabab", 1)]
+    #[case("abcdef", "badcfe", 3)]
+    #[case("klmb", "klm", 1)]
+    #[case("klm", "klmb", 1)]
+    #[case("test", "text", 1)]
+    #[case("test", "tset", 1)]
+    #[case("test", "qwy", 4)]
+    #[case("test", "testit", 2)]
+    #[case("test", "tesst", 1)]
+    #[case("test", "tet", 1)]
+    #[case("cat", "hat", 1)]
+    #[case("Niall", "Neil", 3)]
+    #[case("aluminum", "Catalan", 7)]
+    #[case("ATCG", "TAGC", 2)]
+    #[case("ab", "ba", 1)]
+    #[case("ab", "cde", 3)]
+    #[case("ab", "ac", 1)]
+    #[case("ab", "bc", 2)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        let res1 = damerau_levenshtein(s1, s2);
+        let res2 = damerau_levenshtein_restricted(s1, s2);
+        assert!(res1 == res2);
+        assert!(res1 == exp);
+    }
+
+    #[test]
+    fn restricted() {
+        let a = DamerauLevenshtein {
+            restricted: true,
+            ..Default::default()
+        };
+        assert!(a.for_str("ab", "bca").val() == 3);
+        assert!(a.for_str("abcd", "bdac").val() == 4);
+    }
+
+    #[test]
+    fn unrestricted() {
+        let a = DamerauLevenshtein::default();
+        assert!(a.for_str("ab", "bca").val() == 2);
+        assert!(a.for_str("abcd", "bdac").val() == 3);
+    }
+
+    proptest! {
+        #[test]
+        fn prop_default(s1 in ".*", s2 in ".*") {
+            let res = damerau_levenshtein(&s1, &s2);
+            let res2 = damerau_levenshtein(&s2, &s1);
+            prop_assert_eq!(res, res2);
+            prop_assert!(res <= s1.len() || res <= s2.len());
+        }
+
+        #[test]
+        fn prop_restricted(s1 in ".*", s2 in ".*") {
+            let res = damerau_levenshtein_restricted(&s1, &s2);
+            let res2 = damerau_levenshtein_restricted(&s2, &s1);
+            prop_assert_eq!(res, res2);
+            prop_assert!(res <= s1.len() || res <= s2.len());
+        }
+    }
+}
diff --git a/crates/textdistance/src/algorithms/entropy_ncd.rs b/crates/textdistance/src/algorithms/entropy_ncd.rs
new file mode 100644
index 0000000..68bb8ef
--- /dev/null
+++ b/crates/textdistance/src/algorithms/entropy_ncd.rs
@@ -0,0 +1,128 @@
+//! Entropy-based Normalized Compression Distance
+#![cfg(feature = "std")]
+use crate::counter::Counter;
+use crate::{Algorithm, Result};
+use core::hash::Hash;
+
+/// Entropy-based [Normalized Compression Distance].
+///
+/// It shows how different two inputs are based on their [Entropy].
+///
+/// [Normalized Compression Distance]: https://en.wikipedia.org/wiki/Normalized_compression_distance
+/// [Entropy]: https://en.wikipedia.org/wiki/Entropy_(information_theory)
+pub struct EntropyNCD {
+    /// The base of logarithm for the entropy calculation. Default: 2.
+    pub base: usize,
+
+    /// A non-negative base value to add to entropy of all inputs,
+    /// so that the entropy is never zero. It accounts for all real-world compression
+    /// algorithms having a fixed header with metadata. Default: 1.
+    pub correction: f64,
+}
+
+impl Default for EntropyNCD {
+    fn default() -> Self {
+        Self {
+            base: 2,
+            correction: 1.,
+        }
+    }
+}
+
+impl EntropyNCD {
+    fn compress<E: Hash + Eq>(&self, c: &Counter<E>) -> f64 {
+        debug_assert!(self.correction >= 0.);
+        let total_count = c.count();
+        let mut entropy = 0.0;
+        for element_count in c.values() {
+            let p = *element_count as f64 / total_count as f64;
+            entropy -= p * p.log(self.base as f64);
+        }
+        debug_assert!(entropy >= 0.);
+        self.correction + entropy
+    }
+}
+
+impl Algorithm<f64> for EntropyNCD {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<f64>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        let c1 = Counter::from_iter(s1);
+        let c2 = Counter::from_iter(s2);
+        let cm = c1.merge(&c2);
+        let cl1 = self.compress(&c1);
+        let cl2 = self.compress(&c2);
+        let res: f64 = if cl1 == 0. && cl2 == 0. {
+            0.
+        } else {
+            let clt = self.compress(&cm);
+            (clt - cl1.min(cl2)) / cl1.max(cl2)
+        };
+        Result {
+            abs: res,
+            is_distance: true,
+            max: 1.,
+            len1: c1.count(),
+            len2: c2.count(),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::Counter;
+    use super::EntropyNCD;
+    use crate::str::entropy_ncd;
+    use assert2::assert;
+    use proptest::prelude::*;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    #[case("", "", 0.)]
+    #[case("test", "test", 0.)]
+    #[case("aaa", "bbb", 1.)]
+    #[case("test", "nani", 0.4)]
+    // parity with textdistance
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = entropy_ncd(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "entropy_ncd({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+
+    #[rstest]
+    #[case("", 0.)]
+    #[case("hhhh", 0.)]
+    #[case("hi", 1.)]
+    #[case("hii", 0.9182958340544896)]
+    #[case("hhi", 0.9182958340544896)]
+    #[case("test", 1.5)]
+    #[case("nani", 1.5)]
+    #[case("testnani", 2.5)]
+    fn compress(#[case] s: &str, #[case] exp: f64) {
+        let c = Counter::from_iter(s.chars());
+        let alg = EntropyNCD {
+            correction: 0.,
+            ..Default::default()
+        };
+        let act = alg.compress(&c);
+        let ok = is_close(act, exp);
+        assert!(ok, "compress({}) is {}, not {}", s, act, exp);
+    }
+
+    proptest! {
+        #[test]
+        fn compress_idempotency(s in ".+") {
+            let c = Counter::from_iter(s.chars());
+            let e = EntropyNCD::default();
+            let r1 = e.compress(&c);
+            let r2 = e.compress(&c.merge(&c));
+            prop_assert!(r2 < r1 * 2.);
+        }
+    }
+}
diff --git a/crates/textdistance/src/algorithms/hamming.rs b/crates/textdistance/src/algorithms/hamming.rs
new file mode 100644
index 0000000..4a40fbb
--- /dev/null
+++ b/crates/textdistance/src/algorithms/hamming.rs
@@ -0,0 +1,108 @@
+//! Hamming distance
+use crate::{Algorithm, Result};
+
+/// [Hamming distance] is the number of positions at which the corresponding symbols are different.
+///
+/// [Hamming distance]: https://en.wikipedia.org/wiki/Hamming_distance
+#[derive(Default)]
+pub struct Hamming {
+    /// If false (default), the longer strings is truncated to the same length
+    /// as the shorter one.
+    pub truncate: bool,
+}
+
+impl Algorithm<usize> for Hamming {
+    fn for_iter<C, E>(&self, mut s1: C, mut s2: C) -> Result<usize>
+    where
+        C: Iterator<Item = E>,
+        E: Eq,
+    {
+        let mut result = 0;
+        let mut l1 = 0;
+        let mut l2 = 0;
+        loop {
+            match (s1.next(), s2.next()) {
+                (Some(c1), Some(c2)) => {
+                    l1 += 1;
+                    l2 += 1;
+                    if c1 != c2 {
+                        result += 1;
+                    }
+                }
+                (Some(_), None) => {
+                    l1 += 1;
+                    if !self.truncate {
+                        result += 1;
+                    }
+                }
+                (None, Some(_)) => {
+                    l2 += 1;
+                    if !self.truncate {
+                        result += 1;
+                    }
+                }
+                (None, None) => {
+                    break;
+                }
+            }
+        }
+        Result {
+            abs: result,
+            is_distance: true,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    #![allow(clippy::float_cmp)]
+
+    use super::{Algorithm, Hamming};
+    use crate::str::hamming;
+    use assert2::assert;
+    use proptest::prelude::*;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 0)]
+    #[case("", "\0", 1)]
+    #[case("", "abc", 3)]
+    #[case("abc", "", 3)]
+    #[case("sitting", "sitting", 0)]
+    #[case("abcdefg", "hijklmn", 7)]
+    #[case("karolin", "kathrin", 3)]
+    #[case("hello", "world", 4)]
+    #[case("Rust", "rust", 1)]
+    #[case("hi mark", "hi markus", 2)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(hamming(s1, s2) == exp);
+    }
+
+    #[test]
+    fn default_struct_result() {
+        let r = Hamming::default().for_str("Rust", "rust");
+        assert!(r.dist() == 1);
+        assert!(r.sim() == 3);
+        assert!(r.ndist() == 0.25);
+    }
+
+    #[test]
+    fn truncate() {
+        let a = Hamming { truncate: true };
+        assert!(a.for_str("hi mark", "hi markus").val() == 0);
+        assert!(a.for_str("Hi mark", "hi markus").val() == 1);
+    }
+
+    proptest! {
+        #[test]
+        fn prop(s1 in ".*", s2 in ".*") {
+            let res = hamming(&s1, &s2);
+            let res2 = hamming(&s2, &s1);
+            prop_assert_eq!(res, res2);
+            prop_assert!(res <= s1.len() || res <= s2.len());
+        }
+    }
+}
diff --git a/crates/textdistance/src/algorithms/jaccard.rs b/crates/textdistance/src/algorithms/jaccard.rs
new file mode 100644
index 0000000..b52cc3a
--- /dev/null
+++ b/crates/textdistance/src/algorithms/jaccard.rs
@@ -0,0 +1,67 @@
+//! Jaccard index
+#![cfg(feature = "std")]
+use crate::counter::Counter;
+use crate::{Algorithm, Result};
+
+/// [Jaccard similarity] is a ratio of intersection to union of two sets.
+///
+/// The metric works with the set of input elements, so strings "abc" and "bca"
+/// are the same if compared by letters.
+///
+/// The metric is always normalized on the interval from 0.0 to 1.0.
+///
+/// [Jaccard similarity]: https://en.wikipedia.org/wiki/Jaccard_index
+#[derive(Default)]
+pub struct Jaccard {}
+
+impl Algorithm<f64> for Jaccard {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<f64>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        let c1 = Counter::from_iter(s1);
+        let c2 = Counter::from_iter(s2);
+        let uc = c1.union_count(&c2);
+        let res = if uc == 0 {
+            1.
+        } else {
+            let ic = c1.intersect_count(&c2);
+            ic as f64 / uc as f64
+        };
+        Result {
+            abs: res,
+            is_distance: false,
+            max: 1.,
+            len1: c1.count(),
+            len2: c2.count(),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::jaccard;
+    use assert2::assert;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    #[case("", "", 1.)]
+    #[case("nelson", "", 0.)]
+    #[case("", "neilsen", 0.)]
+    #[case("abc", "abc", 1.)]
+    #[case("abc", "bac", 1.)]
+    // parity with textdistance
+    #[case("nelson", "neilsen", 5. / 8.)]
+    #[case("test", "text", 3. / 5.)]
+    #[case("decide", "resize", 3. / 9.)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = jaccard(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "jaccard({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/jaro.rs b/crates/textdistance/src/algorithms/jaro.rs
new file mode 100644
index 0000000..0a50741
--- /dev/null
+++ b/crates/textdistance/src/algorithms/jaro.rs
@@ -0,0 +1,124 @@
+//! Jaro similarity
+use crate::{Algorithm, Result};
+use alloc::vec;
+
+/// [Jaro similarity] is calculated based on the number of transpositions to turn one string into the other.
+///
+/// The metric is always normalized on the interval from 0.0 to 1.0.
+///
+/// See also [`JaroWinkler`](crate::JaroWinkler).
+///
+/// [Jaro similarity]: https://en.wikipedia.org/wiki/Jaro%E2%80%93Winkler_distance#Jaro_similarity
+#[derive(Default)]
+pub struct Jaro {}
+
+impl Algorithm<f64> for Jaro {
+    fn for_vec<E: Eq>(&self, s1: &[E], s2: &[E]) -> Result<f64> {
+        let l1 = s1.len();
+        let l2 = s2.len();
+
+        if l1 == 0 || l2 == 0 {
+            let result = if l1 == 0 && l2 == 0 { 1. } else { 0. };
+            return Result {
+                abs: result,
+                is_distance: false,
+                max: 1.0,
+                len1: l1,
+                len2: l2,
+            };
+        }
+        if l1 == 1 && l2 == 1 {
+            let result = if s1[0] == s2[0] { 1.0 } else { 0.0 };
+            return Result {
+                abs: result,
+                is_distance: false,
+                max: 1.0,
+                len1: l1,
+                len2: l2,
+            };
+        }
+
+        let search_range = l1.max(l2) / 2 - 1;
+
+        let mut s2_consumed = vec![false; l2];
+        let mut matches: usize = 0;
+
+        let mut n_trans = 0.;
+        let mut b_match_index = 0;
+
+        for (i, a_elem) in s1.iter().enumerate() {
+            let min_bound =
+            // prevent integer wrapping
+            if i > search_range {
+                i - search_range
+            } else {
+                0
+            };
+
+            let max_bound = usize::min(l2 - 1, i + search_range);
+
+            if min_bound > max_bound {
+                continue;
+            }
+
+            for (j, b_elem) in s2.iter().enumerate() {
+                if min_bound <= j && j <= max_bound && a_elem == b_elem && !s2_consumed[j] {
+                    s2_consumed[j] = true;
+                    matches += 1;
+
+                    if j < b_match_index {
+                        n_trans += 1.;
+                    }
+                    b_match_index = j;
+
+                    break;
+                }
+            }
+        }
+
+        let result = if matches == 0 {
+            0.
+        } else {
+            let ms = matches as f64;
+            ((ms / l1 as f64) + (ms / l2 as f64) + ((ms - n_trans) / ms)) / 3.
+        };
+
+        Result {
+            abs: result,
+            is_distance: false,
+            max: 1.,
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::jaro;
+    use assert2::assert;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    // parity with strsim-rs
+    #[case("", "", 1.)]
+    #[case("a", "a", 1.)]
+    #[case("Jaro-Winkler", "Jaro-Winkler", 1.)]
+    #[case("", "jaro-winkler", 0.)]
+    #[case("distance", "", 0.)]
+    #[case("a", "b", 0.)]
+    #[case("dixon", "dicksonx", 0.76667)]
+    #[case("a", "ab", 0.83333)]
+    #[case("ab", "a", 0.83333)]
+    #[case("dwayne", "duane", 0.82222)]
+    #[case("Friedrich Nietzsche", "Jean-Paul Sartre", 0.39189)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = jaro(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "jaro({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/jaro_winkler.rs b/crates/textdistance/src/algorithms/jaro_winkler.rs
new file mode 100644
index 0000000..63ebc77
--- /dev/null
+++ b/crates/textdistance/src/algorithms/jaro_winkler.rs
@@ -0,0 +1,124 @@
+//! Jaro-Winkler similarity
+use super::jaro::Jaro;
+use crate::{Algorithm, Result};
+
+/// [Jaro-Winkler similarity] is a variation of [`Jaro`] with a better rating for strings with a matching prefix.
+///
+/// The metric is always normalized on the interval from 0.0 to 1.0.
+///
+/// [Jaro-Winkler similarity]: https://en.wikipedia.org/wiki/Jaro%E2%80%93Winkler_distance
+pub struct JaroWinkler {
+    /// The Jaro instance to use to calculate the classic Jaro similarity.
+    pub jaro: Jaro,
+
+    /// `p` is a scailing factor for how much Jaro score is adjusted
+    /// for the common prefix. The default is 0.1, must not be higher than
+    /// `1/ℓ` where ℓ is the `max_prefix` value (4 by default).
+    pub prefix_weight: f64,
+
+    /// `ℓ` is the maximum length of the common prefix. The default is 4.
+    pub max_prefix: usize,
+}
+
+impl Default for JaroWinkler {
+    fn default() -> Self {
+        Self {
+            jaro: Jaro::default(),
+            prefix_weight: 0.1,
+            max_prefix: 4,
+        }
+    }
+}
+
+impl JaroWinkler {
+    fn winklerize<C, E>(&self, jaro: f64, s1: C, s2: C) -> f64
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        debug_assert!(self.prefix_weight * self.max_prefix as f64 <= 1.0);
+        let mut prefix_len = 0;
+        for (e1, e2) in s1.zip(s2) {
+            if e1 == e2 {
+                prefix_len += 1;
+                if prefix_len == self.max_prefix {
+                    break;
+                }
+            } else {
+                break;
+            }
+        }
+        jaro + (self.prefix_weight * prefix_len as f64 * (1.0 - jaro))
+    }
+}
+
+impl Algorithm<f64> for JaroWinkler {
+    fn for_vec<E>(&self, s1: &[E], s2: &[E]) -> Result<f64>
+    where
+        E: Eq + core::hash::Hash,
+    {
+        let jaro = self.jaro.for_vec(s1, s2).nval();
+        Result {
+            abs: self.winklerize(jaro, s1.iter(), s2.iter()),
+            is_distance: false,
+            max: 1.0,
+            len1: s1.len(),
+            len2: s2.len(),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::jaro_winkler;
+    use assert2::assert;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    // parity with strsim-rs
+    #[case("", "", 1.)]
+    #[case("a", "a", 1.)]
+    #[case("Jaro-Winkler", "Jaro-Winkler", 1.)]
+    #[case("", "jaro-winkler", 0.)]
+    #[case("distance", "", 0.)]
+    #[case("a", "b", 0.)]
+    #[case("testabctest", "testöঙ香test", 0.890909)]
+    #[case("testöঙ香test", "testabctest", 0.890909)]
+    #[case("dixon", "dicksonx", 0.8133333)]
+    #[case("dwayne", "duane", 0.8400000)]
+    #[case("martha", "marhta", 0.9611111)]
+    #[case("Friedrich Nietzsche", "Fran-Paul Sartre", 0.561988)]
+    #[case("Thorkel", "Thorgier", 0.867857)]
+    #[case("Dinsdale", "D", 0.737500)]
+    // These fail because strsim doesn't limit the max prefix length:
+    // #[case("cheeseburger", "cheese fries", 0.911111)]
+    // #[case("thequickbrownfoxjumpedoverx", "thequickbrownfoxjumpedovery", 1.0)]
+
+    // parity with jellyfish
+    #[case("dixon", "dicksonx", 0.81333333)]
+    #[case("martha", "marhta", 0.961111111)]
+    #[case("dwayne", "duane", 0.84)]
+    #[case("William", "Williams", 0.975)]
+    #[case("", "foo", 0.)]
+    #[case("a", "a", 1.)]
+    #[case("abc", "xyz", 0.)]
+    #[case("aaaa", "aaaaa", 0.96)]
+    #[case("orangutan-kumquat", "orangutan kumquat", 0.97647058)]
+    #[case("jaz", "jal", 0.8222222)]
+    #[case("@", "@@", 0.85)]
+    #[case("0", "0@", 0.85)]
+    #[case("a", "ab", 0.85)]
+    #[case("012345", "0123456", 0.9714285)]
+    #[case("012abc", "012abcd", 0.9714285)]
+    #[case("012abc", "013abcd", 0.879365079)]
+    #[case("a1bc", "a1be", 0.8833333)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = jaro_winkler(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "jaro_winkler({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/lcsseq.rs b/crates/textdistance/src/algorithms/lcsseq.rs
new file mode 100644
index 0000000..3ae492b
--- /dev/null
+++ b/crates/textdistance/src/algorithms/lcsseq.rs
@@ -0,0 +1,92 @@
+//! Longest common subsequence
+use crate::{Algorithm, Result};
+use alloc::vec;
+use alloc::vec::Vec;
+
+/// The length of the [Longest common subsequence].
+///
+/// It differs from the [`LCSStr`](crate::LCSStr). Unlike substrings, subsequences are not required
+/// to occupy consecutive positions within the original sequences.
+///
+/// [Longest common subsequence]: https://en.wikipedia.org/wiki/Longest_common_subsequence
+#[derive(Default)]
+pub struct LCSSeq {}
+
+impl Algorithm<usize> for LCSSeq {
+    fn for_vec<E: Eq>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        let l1 = s1.len();
+        let l2 = s2.len();
+        let mut lengths = vec![vec![0; l2 + 1]; l1 + 1];
+
+        for (i, char1) in s1.iter().enumerate() {
+            for (j, char2) in s2.iter().enumerate() {
+                lengths[i + 1][j + 1] = if char1 == char2 {
+                    lengths[i][j] + 1
+                } else {
+                    lengths[i + 1][j].max(lengths[i][j + 1])
+                };
+            }
+        }
+
+        let mut result = Vec::<&E>::new();
+        let mut i = l1;
+        let mut j = l2;
+        while i != 0 && j != 0 {
+            if lengths[i][j] == lengths[i - 1][j] {
+                i -= 1;
+            } else if lengths[i][j] == lengths[i][j - 1] {
+                j -= 1;
+            } else {
+                assert!(s1[i - 1] == s2[j - 1]);
+                result.push(&s1[i - 1]);
+                i -= 1;
+                j -= 1;
+            }
+        }
+        // val: Some(result.into_iter().rev().collect::<String>())
+        Result {
+            abs: result.len(),
+            is_distance: false,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::lcsseq;
+    use assert2::assert;
+    use proptest::prelude::*;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 0)]
+    #[case("", "abcd", 0)]
+    #[case("abcd", "", 0)]
+    #[case("ab", "cd", 0)]
+    #[case("abcd", "abcd", 4)] // "abcd"
+    #[case("test", "text", 3)] // "tet"
+    #[case("thisisatest", "testing123testing", 7)] // "tsitest"
+    #[case("abcd", "c", 1)] // "c"
+    #[case("abcd", "d", 1)] // "d"
+    #[case("abcd", "e", 0)] // ""
+    #[case("abcdefghi", "acegi", 5)] // "acegi"
+    #[case("abcdgh", "aedfhr", 3)] // "adh"
+    #[case("aggtab", "gxtxayb", 4)] // "gtab"
+    #[case("你好,世界", "再见世界", 2)] // "世界"
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(lcsseq(s1, s2) == exp);
+    }
+
+    proptest! {
+        #[test]
+        fn prop(s1 in ".*", s2 in ".*") {
+            let res = lcsseq(&s1, &s2);
+            let res2 = lcsseq(&s2, &s1);
+            prop_assert_eq!(res, res2);
+            prop_assert!(res <= s1.len() || res <= s2.len());
+        }
+    }
+}
diff --git a/crates/textdistance/src/algorithms/lcsstr.rs b/crates/textdistance/src/algorithms/lcsstr.rs
new file mode 100644
index 0000000..75d6dc1
--- /dev/null
+++ b/crates/textdistance/src/algorithms/lcsstr.rs
@@ -0,0 +1,87 @@
+//! Longest common substring
+use crate::{Algorithm, Result};
+use alloc::vec;
+
+/// The length of the [Longest common substring].
+///
+/// A longest common substring of two or more strings is a longest string
+/// that is a substring of all of them.
+///
+/// [Longest common substring]: https://en.wikipedia.org/wiki/Longest_common_substring
+#[derive(Default)]
+pub struct LCSStr {}
+
+impl Algorithm<usize> for LCSStr {
+    fn for_vec<E: Eq>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        let l1 = s1.len();
+        let l2 = s2.len();
+        let mut dp = vec![vec![0; l2 + 1]; l1 + 1];
+        // let mut result_end = 0;
+        let mut result_len = 0;
+        for (i, c1) in s1.iter().enumerate() {
+            for (j, c2) in s2.iter().enumerate() {
+                if c1 == c2 {
+                    let new_len = dp[i][j] + 1;
+                    dp[i + 1][j + 1] = new_len;
+                    if new_len > result_len {
+                        result_len = new_len;
+                        // result_end = i + 1;
+                    };
+                }
+            }
+        }
+        // s1[(result_end - result_len)..result_end].to_vec()
+        Result {
+            abs: result_len,
+            is_distance: false,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::lcsstr;
+    use assert2::assert;
+    use proptest::prelude::*;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", "")]
+    #[case("a", "", "")]
+    #[case("", "a", "")]
+    #[case("a", "a", "a")]
+    #[case("ab", "b", "b")]
+    #[case("abcdef", "bcd", "bcd")]
+    #[case("bcd", "abcdef", "bcd")]
+    #[case("abcdef", "xabded", "ab")]
+    #[case("GeeksforGeeks", "GeeksQuiz", "Geeks")]
+    #[case("abcdxyz", "xyzabcd", "abcd")]
+    #[case("zxabcdezy", "yzabcdezx", "abcdez")]
+    #[case("OldSite:GeeksforGeeks.org", "NewSite:GeeksQuiz.com", "Site:Geeks")]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: &str) {
+        assert!(lcsstr(s1, s2) == exp.len());
+    }
+
+    #[test]
+    fn unicode() {
+        let f = lcsstr;
+        assert!(f("п", "") == 0);
+        assert!(f("", "п") == 0);
+        assert!(f("п", "п") == 1);
+        assert!(f("привет", "пока") == 1);
+        assert!(f("корвет", "привет") == 3);
+    }
+
+    proptest! {
+        #[test]
+        fn prop(s1 in ".*", s2 in ".*") {
+            let res = lcsstr(&s1, &s2);
+            let res2 = lcsstr(&s2, &s1);
+            prop_assert_eq!(res, res2);
+            prop_assert!(res <= s1.len() || res <= s2.len());
+        }
+    }
+}
diff --git a/crates/textdistance/src/algorithms/length.rs b/crates/textdistance/src/algorithms/length.rs
new file mode 100644
index 0000000..47011e2
--- /dev/null
+++ b/crates/textdistance/src/algorithms/length.rs
@@ -0,0 +1,49 @@
+//! Length distance
+use crate::{Algorithm, Result};
+
+/// Length distance is the absolute difference between the lengths of the two sequences.
+///
+/// It's a very dumb algorithm that says that "qwer" and "zxcv" are the same.
+/// Still, it works surprisingly well in some specific scenarios, especially on big
+/// sequences.
+#[derive(Default)]
+pub struct Length {}
+
+impl Algorithm<usize> for Length {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<usize>
+    where
+        C: Iterator<Item = E>,
+        E: Eq,
+    {
+        let l1 = s1.count();
+        let l2 = s2.count();
+        Result {
+            abs: l1.abs_diff(l2),
+            is_distance: true,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::length;
+    use assert2::assert;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 0)]
+    #[case("", "a", 1)]
+    #[case("a", "", 1)]
+    #[case("a", "a", 0)]
+    #[case("a", "b", 0)]
+    #[case("abcde", "abcef", 0)]
+    #[case("abcde", "abcfde", 1)]
+    #[case("abcd", "bcd", 1)]
+    #[case("ab", "cdefg", 3)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(length(s1, s2) == exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/levenshtein.rs b/crates/textdistance/src/algorithms/levenshtein.rs
new file mode 100644
index 0000000..fc7dc35
--- /dev/null
+++ b/crates/textdistance/src/algorithms/levenshtein.rs
@@ -0,0 +1,157 @@
+//! Levenshtein distance
+use crate::{Algorithm, Result};
+use alloc::vec::Vec;
+
+/// [Levenshtein distance] is an edit distance between two sequences.
+///
+/// It is the minimum number of single-character edits (insertions, deletions or substitutions)
+/// required to change one word into the other.
+///
+/// See also [`DamerauLevenshtein`](crate::DamerauLevenshtein) which is an extended
+/// version of this algorithm that also includes transpositions.
+///
+/// [Levenshtein distance]: https://en.wikipedia.org/wiki/Levenshtein_distance
+pub struct Levenshtein {
+    /// The cost of removing a character.
+    pub del_cost: usize,
+
+    /// The cost of adding a new character.
+    pub ins_cost: usize,
+
+    /// The cost of replacing a character with another one.
+    pub sub_cost: usize,
+}
+
+impl Default for Levenshtein {
+    fn default() -> Self {
+        Self {
+            del_cost: 1,
+            ins_cost: 1,
+            sub_cost: 1,
+        }
+    }
+}
+
+impl Algorithm<usize> for Levenshtein {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<usize>
+    where
+        C: Iterator<Item = E>,
+        E: Eq,
+    {
+        let s1: Vec<E> = s1.collect();
+        let l1 = s1.len();
+        if l1 == 0 {
+            let l2 = s2.count();
+            return Result {
+                abs: l2,
+                is_distance: true,
+                max: l1.max(l2),
+                len1: l1,
+                len2: l2,
+            };
+        }
+
+        let mut cache: Vec<usize> = (1..).take(l1).collect();
+        let mut dist1;
+        let mut dist2;
+
+        let mut result = 0;
+        let mut l2 = 0;
+        for (i2, c2) in s2.enumerate() {
+            result = i2;
+            dist1 = i2;
+            l2 += 1;
+
+            for (i1, c1) in s1.iter().enumerate() {
+                dist2 = if c1 == &c2 {
+                    dist1
+                } else {
+                    dist1 + self.sub_cost
+                };
+                dist1 = cache[i1];
+                result = if dist1 > result {
+                    if dist2 > result {
+                        result + self.del_cost
+                    } else {
+                        dist2
+                    }
+                } else if dist2 > dist1 {
+                    dist1 + self.ins_cost
+                } else {
+                    dist2
+                };
+                cache[i1] = result;
+            }
+        }
+        if l2 == 0 {
+            return Result {
+                abs: l1,
+                is_distance: true,
+                max: l1.max(l2),
+                len1: l1,
+                len2: l2,
+            };
+        }
+        Result {
+            abs: result,
+            is_distance: true,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::levenshtein;
+    use assert2::assert;
+    use proptest::prelude::*;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 0)]
+    #[case("", "\0", 1)]
+    #[case("", "abc", 3)]
+    #[case("abc", "", 3)]
+    #[case("sitting", "sitting", 0)]
+    #[case("sitting", "kitten", 3)]
+    #[case("example", "samples", 3)]
+    #[case("distance", "difference", 5)]
+    #[case("test", "text", 1)]
+    #[case("test", "tset", 2)]
+    #[case("test", "qwe", 4)]
+    #[case("test", "testit", 2)]
+    #[case("test", "tesst", 1)]
+    #[case("test", "tet", 1)]
+    // parity with levenshtein-rs
+    #[case("sitting", "kitten", 3)]
+    #[case("gumbo", "gambol", 2)]
+    #[case("saturday", "sunday", 3)]
+    #[case("a", "b", 1)]
+    #[case("ab", "ac", 1)]
+    #[case("ac", "bc", 1)]
+    #[case("abc", "axc", 1)]
+    #[case("xabxcdxxefxgx", "1ab2cd34ef5g6", 6)]
+    #[case("xabxcdxxefxgx", "abcdefg", 6)]
+    #[case("javawasneat", "scalaisgreat", 7)]
+    #[case("example", "samples", 3)]
+    #[case("sturgeon", "urgently", 6)]
+    #[case("levenshtein", "frankenstein", 6)]
+    #[case("distance", "difference", 5)]
+    #[case("kitten", "sitting", 3)]
+    #[case("Tier", "Tor", 2)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(levenshtein(s1, s2) == exp);
+    }
+
+    proptest! {
+        #[test]
+        fn prop(s1 in ".*", s2 in ".*") {
+            let res = levenshtein(&s1, &s2);
+            let res2 = levenshtein(&s2, &s1);
+            prop_assert_eq!(res, res2);
+            prop_assert!(res <= s1.len() || res <= s2.len());
+        }
+    }
+}
diff --git a/crates/textdistance/src/algorithms/lig3.rs b/crates/textdistance/src/algorithms/lig3.rs
new file mode 100644
index 0000000..992b4e1
--- /dev/null
+++ b/crates/textdistance/src/algorithms/lig3.rs
@@ -0,0 +1,81 @@
+//! LIG3 similarity
+use super::hamming::Hamming;
+use super::levenshtein::Levenshtein;
+use crate::{Algorithm, Result};
+use core::hash::Hash;
+
+/// [LIG3 similarity] is a normalization of [`Hamming`] by [`Levenshtein`].
+///
+/// [LIG3 similarity]: https://github.com/chrislit/abydos/blob/master/abydos/distance/_lig3.py
+pub struct LIG3 {
+    /// Algorithm instance to use for calculating Levenshtein distance.
+    pub levenshtein: Levenshtein,
+
+    /// Algorithm instance to use for calculating Hamming similarity.
+    pub hamming: Hamming,
+}
+
+impl Default for LIG3 {
+    fn default() -> Self {
+        Self {
+            levenshtein: Levenshtein::default(),
+            #[allow(clippy::needless_update)]
+            hamming: Hamming {
+                truncate: false,
+                ..Default::default()
+            },
+        }
+    }
+}
+
+impl Algorithm<f64> for LIG3 {
+    fn for_vec<E>(&self, s1: &[E], s2: &[E]) -> Result<f64>
+    where
+        E: Eq + Hash,
+    {
+        let lev_res = self.levenshtein.for_vec(s1, s2);
+        let lev = lev_res.dist();
+        let ham = self.hamming.for_vec(s1, s2).sim();
+        let res = if lev == 0 && ham == 0 {
+            1.
+        } else {
+            (2 * ham) as f64 / (2 * ham + lev) as f64
+        };
+        Result {
+            abs: res,
+            is_distance: false,
+            max: 1.0,
+            len1: lev_res.len1,
+            len2: lev_res.len2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::lig3;
+    use assert2::assert;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    // parity with abydos
+    #[case("cat", "hat", 0.8)]
+    #[case("Niall", "Neil", 0.5714285714285714)]
+    #[case("aluminum", "Catalan", 0.0)]
+    #[case("ATCG", "TAGC", 0.0)]
+    #[case("Glavin", "Glawyn", 0.8)]
+    #[case("Williams", "Vylliems", 0.7692307692307693)]
+    #[case("Lewis", "Louis", 0.75)]
+    #[case("Alex", "Alexander", 0.6153846153846154)]
+    #[case("Wild", "Wildsmith", 0.6153846153846154)]
+    #[case("Bram", "Bramberley", 0.5714285714285714)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = lig3(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "lig3({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/mlipns.rs b/crates/textdistance/src/algorithms/mlipns.rs
new file mode 100644
index 0000000..704877d
--- /dev/null
+++ b/crates/textdistance/src/algorithms/mlipns.rs
@@ -0,0 +1,80 @@
+//! MLIPNS similarity
+use super::hamming::Hamming;
+use crate::{Algorithm, Result};
+use core::hash::Hash;
+
+/// [MLIPNS similarity] is a normalization for [`Hamming`] that returns either 0 or 1.
+///
+/// MLIPNS stands for Modified Language-Independent Product Name Search.
+///
+/// [MLIPNS similarity]: https://www.sial.iias.spb.su/files/386-386-1-PB.pdf
+pub struct MLIPNS {
+    hamming: Hamming,
+    threshold: f64,
+    max_mismatches: usize,
+}
+
+impl Default for MLIPNS {
+    fn default() -> Self {
+        Self {
+            hamming: Hamming::default(),
+            threshold: 0.25,
+            max_mismatches: 2,
+        }
+    }
+}
+
+impl MLIPNS {
+    fn check(&self, ham: &Result<usize>) -> bool {
+        let mut mismatches = 0;
+        let mut max_length = ham.max;
+        let mut ham_val = ham.val();
+        while mismatches <= self.max_mismatches {
+            if max_length == 0 {
+                return true;
+            }
+            if (1.0 - (max_length - ham_val) as f64 / max_length as f64) <= self.threshold {
+                return true;
+            }
+            mismatches += 1;
+            ham_val -= 1;
+            max_length -= 1;
+        }
+        max_length == 0
+    }
+}
+
+impl Algorithm<usize> for MLIPNS {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<usize>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + Hash,
+    {
+        let ham = self.hamming.for_iter(s1, s2);
+        Result {
+            abs: self.check(&ham).into(),
+            is_distance: false,
+            max: 1,
+            len1: ham.len1,
+            len2: ham.len2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::mlipns;
+    use assert2::assert;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 1)]
+    // parity with abydos and talisman
+    #[case("cat", "hat", 1)]
+    #[case("Niall", "Neil", 0)]
+    #[case("aluminum", "Catalan", 0)]
+    #[case("ATCG", "TAGC", 0)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(mlipns(s1, s2) == exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/overlap.rs b/crates/textdistance/src/algorithms/overlap.rs
new file mode 100644
index 0000000..b8d00df
--- /dev/null
+++ b/crates/textdistance/src/algorithms/overlap.rs
@@ -0,0 +1,63 @@
+//! Overlap coefficient
+#![cfg(feature = "std")]
+use crate::counter::Counter;
+use crate::{Algorithm, Result};
+
+/// [Overlap similarity] is the size of the intersection divided by the smaller of the size of the two sets.
+///
+/// [Overlap similarity]: https://en.wikipedia.org/wiki/Overlap_coefficient
+#[derive(Default)]
+pub struct Overlap {}
+
+impl Algorithm<f64> for Overlap {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<f64>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        let c1 = Counter::from_iter(s1);
+        let c2 = Counter::from_iter(s2);
+        let n1 = c1.count();
+        let n2 = c2.count();
+        let res = match (n1, n2) {
+            (0, 0) => 1.,
+            (_, 0) | (0, _) => 0.,
+            (_, _) => {
+                let ic = c1.intersect_count(&c2);
+                ic as f64 / n1.min(n2) as f64
+            }
+        };
+        Result {
+            abs: res,
+            is_distance: false,
+            max: 1.,
+            len1: c1.count(),
+            len2: c2.count(),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::overlap;
+    use assert2::assert;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    #[case("", "", 1.)]
+    #[case("nelson", "", 0.)]
+    #[case("", "neilsen", 0.)]
+    // parity with textdistance
+    #[case("test", "text", 3. / 4.)]
+    #[case("testme", "textthis", 4. / 6.)]
+    #[case("nelson", "neilsen", 5. / 6.)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = overlap(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "overlap({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/prefix.rs b/crates/textdistance/src/algorithms/prefix.rs
new file mode 100644
index 0000000..21c2283
--- /dev/null
+++ b/crates/textdistance/src/algorithms/prefix.rs
@@ -0,0 +1,71 @@
+//! Prefix similarity
+use crate::{Algorithm, Result};
+
+/// Prefix similarity is the length of the longest common prefix for the given sequences.
+///
+/// It's a very dumb metric but it can be surprisingly effective for comparing words
+/// in languages with an extensive use of [suffixes](https://en.wikipedia.org/wiki/Suffix).
+#[derive(Default)]
+pub struct Prefix {}
+
+impl Algorithm<usize> for Prefix {
+    fn for_iter<C, E>(&self, mut s1: C, mut s2: C) -> Result<usize>
+    where
+        C: Iterator<Item = E>,
+        E: Eq,
+    {
+        let mut result = 0;
+        let mut prev_match: bool = true;
+        let mut l1 = 0;
+        let mut l2 = 0;
+        loop {
+            match (s1.next(), s2.next()) {
+                (Some(c1), Some(c2)) => {
+                    l1 += 1;
+                    l2 += 1;
+                    if c1 != c2 {
+                        prev_match = false;
+                    } else if prev_match {
+                        result += 1;
+                    }
+                }
+                (Some(_), None) => {
+                    l1 += 1;
+                }
+                (None, Some(_)) => {
+                    l2 += 1;
+                }
+                (None, None) => {
+                    break;
+                }
+            }
+        }
+        Result {
+            abs: result,
+            is_distance: false,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::prefix;
+    use assert2::assert;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 0)]
+    #[case("", "a", 0)]
+    #[case("a", "", 0)]
+    #[case("a", "a", 1)]
+    #[case("a", "b", 0)]
+    #[case("abcde", "abcef", 3)]
+    #[case("abcde", "abcfde", 3)]
+    #[case("abcd", "bcd", 0)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(prefix(s1, s2) == exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/ratcliff_obershelp.rs b/crates/textdistance/src/algorithms/ratcliff_obershelp.rs
new file mode 100644
index 0000000..8d30e69
--- /dev/null
+++ b/crates/textdistance/src/algorithms/ratcliff_obershelp.rs
@@ -0,0 +1,113 @@
+//! Gestalt pattern matching
+use crate::{Algorithm, Result};
+use alloc::vec;
+use alloc::vec::Vec;
+
+/// [Ratcliff/Obershelp similarity] is [`LCSStr`] that recursively finds matches
+/// on both sides of the longest substring.
+///
+/// The non-normalized result is a double number of matching characters defined as the first
+/// longest common substring plus recursively the number of matching characters in
+/// the non-matching regions on both sides of the longest common substring.
+///
+/// The normalized result is the non-normalized one divided by the sum of the input string lengths.
+///
+/// [Ratcliff/Obershelp similarity]: https://en.wikipedia.org/wiki/Gestalt_pattern_matching
+/// [`LCSStr`]: crate::LCSStr
+#[derive(Default)]
+pub struct RatcliffObershelp {}
+
+impl Algorithm<usize> for RatcliffObershelp {
+    fn for_vec<E: Eq>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        let l1 = s1.len();
+        let l2 = s2.len();
+        let mut stack: Vec<((usize, usize), (usize, usize))> = Vec::new();
+        stack.push(((0, l1), (0, l2)));
+        let mut result = 0;
+
+        while let Some(top) = stack.pop() {
+            let ((part1_start, part1_len), (part2_start, part2_len)) = top;
+            let s1_part = s1[part1_start..(part1_start + part1_len)].iter();
+            let s2_part: Vec<&E> = s2[part2_start..(part2_start + part2_len)].iter().collect();
+
+            let mut dp = vec![vec![0; l2 + 1]; l1 + 1];
+            let mut prefix1_end = 0;
+            let mut prefix2_end = 0;
+            let mut match_len: usize = 0;
+            for (i1, c1) in s1_part.enumerate() {
+                for (i2, c2) in s2_part.iter().enumerate() {
+                    if &c1 == c2 {
+                        let new_len: usize = dp[i1][i2] + 1;
+                        dp[i1 + 1][i2 + 1] = new_len;
+                        if new_len > match_len {
+                            debug_assert!(i1 + 1 >= new_len);
+                            debug_assert!(i2 + 1 >= new_len);
+                            match_len = new_len;
+                            prefix1_end = i1 + 1;
+                            prefix2_end = i2 + 1;
+                        };
+                    }
+                }
+            }
+
+            if match_len != 0 {
+                let prefix1_len = prefix1_end - match_len;
+                let prefix2_len = prefix2_end - match_len;
+                if prefix1_len != 0 && prefix2_len != 0 {
+                    stack.push(((part1_start, prefix1_len), (part2_start, prefix2_len)));
+                }
+                let suffix1_len = part1_len - prefix1_end;
+                let suffix2_len = part2_len - prefix2_end;
+                if suffix1_len != 0 && suffix2_len != 0 {
+                    stack.push((
+                        (part1_start + prefix1_end, suffix1_len),
+                        (part2_start + prefix2_end, suffix2_len),
+                    ));
+                }
+                result += match_len;
+            }
+        }
+
+        Result {
+            abs: 2 * result,
+            is_distance: false,
+            max: l1 + l2,
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    #![allow(clippy::float_cmp)]
+
+    use super::{Algorithm, RatcliffObershelp};
+    use crate::str::ratcliff_obershelp;
+    use assert2::assert;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 1.)]
+    #[case("abc", "", 0.)]
+    #[case("", "abc", 0.)]
+    #[case("abc", "abc", 1.)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        assert!(ratcliff_obershelp(s1, s2) == exp);
+    }
+
+    #[test]
+    fn default_abs() {
+        let a = RatcliffObershelp::default();
+        assert!(
+            a.for_str("GESTALT PATTERN MATCHING", "GESTALT PRACTICE")
+                .val()
+                == 24
+        );
+        assert!(
+            a.for_str("GESTALT PRACTICE", "GESTALT PATTERN MATCHING")
+                .val()
+                == 26
+        );
+    }
+}
diff --git a/crates/textdistance/src/algorithms/roberts.rs b/crates/textdistance/src/algorithms/roberts.rs
new file mode 100644
index 0000000..1aae4aa
--- /dev/null
+++ b/crates/textdistance/src/algorithms/roberts.rs
@@ -0,0 +1,91 @@
+//! Roberts similarity
+#![cfg(feature = "std")]
+use crate::counter::Counter;
+use crate::{Algorithm, Result};
+
+/// [Roberts similarity].
+///
+/// The metric is always normalized on the interval from 0.0 to 1.0.
+///
+/// [Roberts similarity]: https://github.com/chrislit/abydos/blob/master/abydos/distance/_roberts.py
+#[derive(Default)]
+pub struct Roberts {}
+
+impl Algorithm<f64> for Roberts {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<f64>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        let c1 = Counter::from_iter(s1);
+        let c2 = Counter::from_iter(s2);
+        let n1 = c1.count();
+        let n2 = c2.count();
+        if n1 == 0 && n2 == 0 {
+            return Result {
+                abs: 1.0,
+                is_distance: false,
+                max: 1.,
+                len1: n1,
+                len2: n2,
+            };
+        }
+
+        let cm = c1.merge(&c2);
+        let alphabet = cm.keys();
+        let mut s1: f64 = 0.;
+        let mut s2: usize = 0;
+        for key in alphabet {
+            let v1 = c1.get(key).unwrap_or(&0);
+            let v2 = c2.get(key).unwrap_or(&0);
+            if v1 != &0 && v2 != &0 {
+                s1 += ((v1 + v2) * v1.min(v2)) as f64 / *v1.max(v2) as f64;
+            }
+            s2 += v1 + v2;
+        }
+
+        Result {
+            abs: s1 / s2 as f64,
+            is_distance: false,
+            max: 1.,
+            len1: n1,
+            len2: n2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::roberts;
+    use assert2::assert;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    #[case("", "", 1.)]
+    #[case("a", "a", 1.)]
+    #[case("", "a", 0.)]
+    #[case("a", "", 0.)]
+    // Parity with abydos.
+    // By default, abydos uses bi-grams with word separators to tokenize any passed text
+    // for Roberts. And that's what gets tested. However, textdistance uses bag of chars
+    // by default and doesn't add any word separators ever. So, instead of using results
+    // from tests, I've put results of running the values through `Roberts(qval=1).sim(a, b)`.
+    #[case("cat", "hat", 0.6666666666666666)]
+    #[case("Niall", "Neil", 0.6111111111111112)]
+    #[case("aluminum", "Catalan", 0.3555555555555555)]
+    #[case("ATCG", "TAGC", 1.0)]
+    #[case("Nigel", "Niall", 0.55)]
+    #[case("Niall", "Nigel", 0.55)]
+    #[case("Colin", "Coiln", 1.0)]
+    #[case("Coiln", "Colin", 1.0)]
+    #[case("ATCAACGAGT", "AACGATTAG", 0.9210526315789473)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = roberts(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "roberts({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/sift4_common.rs b/crates/textdistance/src/algorithms/sift4_common.rs
new file mode 100644
index 0000000..f846f48
--- /dev/null
+++ b/crates/textdistance/src/algorithms/sift4_common.rs
@@ -0,0 +1,185 @@
+//! Sift4 distance
+use crate::{Algorithm, Result};
+use alloc::vec::Vec;
+use core::num::Wrapping;
+
+/// [Sift4 distance] is an edit algorithm designed to be "fast and relatively accurate".
+///
+/// The original blog post describes 3 different implementations of the algorithm,
+/// this is the "common" one. The main difference from [`Sift4Simple`](crate::Sift4Simple)
+/// is the support for `max_distance` that can be used to stop calculating the distance
+/// after a certain threshold.
+///
+/// [Sift4 distance]: https://siderite.dev/blog/super-fast-and-accurate-string-distance.html
+pub struct Sift4Common {
+    /// The number of characters to search for matching letters. Default: 5.
+    pub max_offset: usize,
+
+    /// The distance at which the algorithm should stop computing the value
+    /// and just exit (the strings are too different anyway). Default: 0.
+    pub max_distance: usize,
+}
+
+impl Default for Sift4Common {
+    fn default() -> Self {
+        Self {
+            max_distance: 0,
+            max_offset: 5,
+        }
+    }
+}
+
+impl Algorithm<usize> for Sift4Common {
+    fn for_vec<E: Eq>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        let l1 = s1.len();
+        let l2 = s2.len();
+
+        // if l1 == 0 {
+        //     return l2;
+        // }
+        // if l2 == 0 {
+        //     return l1;
+        // }
+
+        // NOTE: c1 and c2 are Wrapping because one step of the algorithm can temporarily underflow them, causing panics in debug builds
+        let mut c1 = Wrapping(0); // cursor for string 1
+        let mut c2 = Wrapping(0); // cursor for string 2
+        let mut lcss = 0; // largest common subsequence
+        let mut local_cs = 0; // local common substring
+        let mut trans = 0; // number of transpositions ('ab' vs 'ba')
+        let mut offset_arr: Vec<Offset> = Vec::new(); // offset pair array, for computing the transpositions
+        while (c1.0 < l1) && (c2.0 < l2) {
+            if s1[c1.0] == s2[c2.0] {
+                local_cs += 1;
+                let mut is_trans = false;
+                //see if current match is a transposition
+                let mut i = 0;
+                while i < offset_arr.len() {
+                    let ofs = &mut offset_arr[i];
+                    if c1.0 <= ofs.c1 || c2.0 <= ofs.c2 {
+                        // when two matches cross, the one considered a transposition is the one with the largest difference in offsets
+                        is_trans = c1.0.abs_diff(c2.0) >= ofs.c1.abs_diff(ofs.c2);
+                        if is_trans {
+                            trans += 1;
+                        } else if !ofs.trans {
+                            ofs.trans = true;
+                            trans += 1;
+                        }
+                        break;
+                    } else if c1.0 > ofs.c2 && c2.0 > ofs.c1 {
+                        offset_arr.remove(i);
+                    } else {
+                        i += 1;
+                    }
+                }
+                offset_arr.push(Offset {
+                    c1: c1.0,
+                    c2: c2.0,
+                    trans: is_trans,
+                });
+            } else {
+                lcss += local_cs;
+                local_cs = 0;
+                if c1 != c2 {
+                    let t = c1.min(c2); //using min allows the computation of transpositions
+                    c1 = t;
+                    c2 = t;
+                }
+                if self.max_distance != 0 {
+                    let temporary_distance = c1.0.max(c2.0) - lcss + trans;
+                    if temporary_distance > self.max_distance {
+                        return Result {
+                            abs: temporary_distance,
+                            is_distance: true,
+                            max: l1.max(l2),
+                            len1: l1,
+                            len2: l2,
+                        };
+                    }
+                }
+                //if matching characters are found, remove 1 from both cursors (they get incremented at the end of the loop)
+                //so that we can have only one code block handling matches
+                for i in 0..self.max_offset {
+                    if c1.0 + i >= l1 && c2.0 + i >= l2 {
+                        break;
+                    }
+                    if (c1.0 + i < l1) && (s1[c1.0 + i] == s2[c2.0]) {
+                        c1 += i;
+                        c1 -= 1; // NOTE: c1 may underflow here
+
+                        c2 -= 1; // NOTE: c2 may underflow here
+
+                        break;
+                    }
+                    if (c2.0 + i < l2) && (s1[c1.0] == s2[c2.0 + i]) {
+                        c1 -= 1; // NOTE: c1 may underflow here
+
+                        c2 += i;
+                        c2 -= 1; // NOTE: c2 may underflow here
+
+                        break;
+                    }
+                }
+            }
+
+            // NOTE: If c1 or c2 underflowed in the previous loop, this ensures that they return to 0
+            c1 += 1;
+            c2 += 1;
+
+            // this covers the case where the last match is on the last token in list, so that it can compute transpositions correctly
+            if (c1.0 >= l1) || (c2.0 >= l2) {
+                lcss += local_cs;
+                local_cs = 0;
+                let t = c1.min(c2);
+                c1 = t;
+                c2 = t;
+            }
+        }
+        Result {
+            abs: l1.max(l2) - lcss - local_cs + trans,
+            is_distance: true,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+struct Offset {
+    c1: usize,
+    c2: usize,
+    trans: bool,
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::sift4_common;
+    use assert2::assert;
+    use rstest::rstest;
+
+    #[rstest]
+    // parity with another Rust implementation
+    #[case("London", "Lond", 2)]
+    #[case("Chicago", "Chiag", 2)]
+    // #[case("Los Angeles", "Angeles", 4)]
+    #[case("Bangkok", "Bagrok", 2)]
+    #[case("San Francisco", "san Francisco", 1)]
+    #[case("New York", "new York", 1)]
+    #[case("San Francisco", "", 13)]
+    #[case("", "New York", 8)]
+    // parity with Swift implementation
+    #[case("a", "a", 0)]
+    #[case("a", "b", 1)]
+    #[case("aa", "aabb", 2)]
+    #[case("aaaa", "aabb", 2)]
+    #[case("abba", "aabb", 1)]
+    #[case("aaaa", "abbb", 3)]
+    #[case("123 nowhere ave", "123 n0where 4ve", 2)]
+    #[case("bisectable6", "disectable6", 1)]
+    // Underflow panic regression tests
+    #[case("aaaaaa |", "baaaaa", 3)]
+    #[case("/", "®/", 1)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(sift4_common(s1, s2) == exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/sift4_simple.rs b/crates/textdistance/src/algorithms/sift4_simple.rs
new file mode 100644
index 0000000..16ea229
--- /dev/null
+++ b/crates/textdistance/src/algorithms/sift4_simple.rs
@@ -0,0 +1,104 @@
+//! Sift4 distance
+use crate::{Algorithm, Result};
+
+/// [Sift4 distance] is an edit algorithm designed to be "fast and relatively accurate".
+///
+/// The original blog post describes 3 different implementations of the algorithm,
+/// this is the "simplest" one.
+///
+/// [Sift4 distance]: https://siderite.dev/blog/super-fast-and-accurate-string-distance.html
+pub struct Sift4Simple {
+    /// The number of characters to search for matching letters.
+    pub max_offset: usize,
+    // max_distance: usize,
+}
+
+impl Default for Sift4Simple {
+    fn default() -> Self {
+        Self {
+            // max_distance: 0,
+            max_offset: 5,
+        }
+    }
+}
+
+impl Algorithm<usize> for Sift4Simple {
+    fn for_vec<E: Eq>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        let l1 = s1.len();
+        let l2 = s2.len();
+
+        let mut c1 = 0; // cursor for string 1
+        let mut c2 = 0; // cursor for string 2
+        let mut lcss = 0; // largest common subsequence
+        let mut local_cs = 0; // local common substring
+
+        while c1 < l1 && c2 < l2 {
+            if s1[c1] == s2[c2] {
+                local_cs += 1;
+            } else {
+                lcss += local_cs;
+                local_cs = 0;
+                if c1 != c2 {
+                    c1 = c1.min(c2);
+                    c2 = c1; // using min allows the computation of transpositions
+                }
+
+                for i in 0..self.max_offset {
+                    if !(c1 + 1 < l1 || c2 + i < l2) {
+                        break;
+                    }
+
+                    if c1 + i < l1 && s1[c1 + i] == s2[c2] {
+                        c1 += i;
+                        local_cs += 1;
+                        break;
+                    }
+                    if (c2 + i < l2) && (s1[c1] == s2[c2 + i]) {
+                        c2 += i;
+                        local_cs += 1;
+                        break;
+                    }
+                }
+            }
+            c1 += 1;
+            c2 += 1;
+        }
+        Result {
+            abs: l1.max(l2) - lcss - local_cs,
+            is_distance: true,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::sift4_simple;
+    use assert2::assert;
+    use rstest::rstest;
+
+    #[rstest]
+    // parity with another Rust implementation
+    #[case("London", "Lond", 2)]
+    #[case("Chicago", "Chiag", 2)]
+    #[case("Los Angeles", "Angeles", 4)]
+    #[case("Bangkok", "Bagrok", 2)]
+    #[case("San Francisco", "san Francisco", 1)]
+    #[case("New York", "new York", 1)]
+    #[case("San Francisco", "", 13)]
+    #[case("", "New York", 8)]
+    // parity with Swift implementation
+    #[case("a", "a", 0)]
+    #[case("a", "b", 1)]
+    #[case("aa", "aabb", 2)]
+    #[case("aaaa", "aabb", 2)]
+    #[case("abba", "aabb", 1)]
+    #[case("aaaa", "abbb", 3)]
+    #[case("123 nowhere ave", "123 n0where 4ve", 2)]
+    #[case("bisectable6", "disectable6", 1)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(sift4_simple(s1, s2) == exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/smith_waterman.rs b/crates/textdistance/src/algorithms/smith_waterman.rs
new file mode 100644
index 0000000..e99df26
--- /dev/null
+++ b/crates/textdistance/src/algorithms/smith_waterman.rs
@@ -0,0 +1,82 @@
+//! Smith-Waterman sequence alignment
+use crate::{Algorithm, Result};
+use alloc::vec;
+use alloc::vec::Vec;
+
+/// [Smith-Waterman similarity] is edit-based and designed for nucleic acid (and protein) sequences.
+///
+/// [Smith-Waterman similarity]: https://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
+pub struct SmithWaterman {
+    /// The cost of an alignment gap. Default: 1.
+    pub gap_cost: isize,
+
+    /// The cost of symbols matching. Default: -1.
+    pub match_cost: isize,
+
+    /// The cost of symbols not matching. Default: 0.
+    pub mismatch_cost: isize,
+}
+
+impl Default for SmithWaterman {
+    fn default() -> Self {
+        Self {
+            gap_cost: 1,
+            match_cost: -1,
+            mismatch_cost: 0,
+        }
+    }
+}
+
+impl Algorithm<usize> for SmithWaterman {
+    fn for_vec<E: Eq>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        let l1 = s1.len();
+        let l2 = s2.len();
+        let mut dist_mat: Vec<Vec<isize>> = vec![vec![0; l2 + 1]; l1 + 1];
+        for (i, sc1) in s1.iter().enumerate() {
+            for (j, sc2) in s2.iter().enumerate() {
+                let cost = if sc1 == sc2 {
+                    self.match_cost
+                } else {
+                    self.mismatch_cost
+                };
+                let match_ = dist_mat[i][j] - cost;
+                let delete = dist_mat[i][j + 1] - self.gap_cost;
+                let insert = dist_mat[i + 1][j] - self.gap_cost;
+                dist_mat[i + 1][j + 1] = 0.max(match_).max(delete).max(insert);
+            }
+        }
+        let result = dist_mat[l1][l2];
+        Result {
+            #[allow(clippy::cast_sign_loss)]
+            abs: result as usize,
+            is_distance: false,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::smith_waterman;
+    use assert2::assert;
+    use rstest::rstest;
+
+    #[rstest]
+    // parity with textdistance
+    #[case("hello", "world", 1)]
+    #[case("abcd", "abce", 3)]
+    #[case("AGACTAGTTAC", "CGAGACGT", 3)]
+    #[case("qwe", "rty", 0)]
+    #[case("qwe", "rty", 0)]
+    #[case("check", "shrek", 2)]
+    // parity with abydos
+    #[case("cat", "hat", 2)]
+    #[case("Niall", "Neil", 1)]
+    #[case("aluminum", "Catalan", 0)]
+    #[case("ATCG", "TAGC", 1)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(smith_waterman(s1, s2) == exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/sorensen_dice.rs b/crates/textdistance/src/algorithms/sorensen_dice.rs
new file mode 100644
index 0000000..23fba47
--- /dev/null
+++ b/crates/textdistance/src/algorithms/sorensen_dice.rs
@@ -0,0 +1,99 @@
+//! Sørensen-Dice coefficient
+#![cfg(feature = "std")]
+use crate::counter::Counter;
+use crate::{Algorithm, Result};
+
+/// [Sørensen–Dice similarity] is a ratio of common chars to total chars in the given strings.
+///
+/// [Sørensen–Dice similarity]: https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient
+#[derive(Default)]
+pub struct SorensenDice {}
+
+impl Algorithm<f64> for SorensenDice {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<f64>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        let c1 = Counter::from_iter(s1);
+        let c2 = Counter::from_iter(s2);
+        let cn = c1.count() + c2.count();
+        let res = if cn == 0 {
+            1.
+        } else {
+            let ic = c1.intersect_count(&c2);
+            (2 * ic) as f64 / cn as f64
+        };
+        Result {
+            abs: res,
+            is_distance: false,
+            max: 1.,
+            len1: c1.count(),
+            len2: c2.count(),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::sorensen_dice;
+    use crate::{Algorithm, SorensenDice};
+    use assert2::assert;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    #[case("", "", 1.)]
+    #[case("nelson", "", 0.)]
+    #[case("", "neilsen", 0.)]
+    // parity with textdistance
+    #[case("test", "text", 2.0 * 3. / 8.)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = sorensen_dice(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "sorensen_dice({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+
+    #[rstest]
+    // parity with strsim
+    #[case("a", "a", 1.0)]
+    #[case("", "", 1.0)]
+    #[case("apple event", "apple event", 1.0)]
+    #[case("iphone", "iphone x", 0.9090909090909091)]
+    #[case("french", "quebec", 0.0)]
+    #[case("france", "france", 1.0)]
+    #[case("fRaNce", "france", 0.2)]
+    #[case("healed", "sealed", 0.8)]
+    #[case("web applications", "applications of the web", 0.7878787878)]
+    #[case("this has one extra word", "this has one word", 0.7741935483870968)]
+    #[case(
+        "this will have a typo somewhere",
+        "this will huve a typo somewhere",
+        0.92
+    )]
+    #[case(
+        "Olive-green table for sale, in extremely good condition.",
+        "For sale: table in very good  condition, olive green in colour.",
+        0.6060606060606061
+    )]
+    #[case(
+        "Olive-green table for sale, in extremely good condition.",
+        "For sale: green Subaru Impreza, 210,000 miles",
+        0.2558139534883721
+    )]
+    #[case(
+        "Olive-green table for sale, in extremely good condition.",
+        "Wanted: mountain bike with at least 21 gears.",
+        0.1411764705882353
+    )]
+    fn for_bigrams(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let s1 = &s1.replace(' ', "");
+        let s2 = &s2.replace(' ', "");
+        let act = SorensenDice::default().for_bigrams(s1, s2).nval();
+        let ok = is_close(act, exp);
+        assert!(ok, "sorensen_dice({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/suffix.rs b/crates/textdistance/src/algorithms/suffix.rs
new file mode 100644
index 0000000..d18bf6a
--- /dev/null
+++ b/crates/textdistance/src/algorithms/suffix.rs
@@ -0,0 +1,52 @@
+//! Suffix similarity
+use crate::{Algorithm, Result};
+
+/// Suffix similarity is the length of the longest common suffix of the given sequences.
+///
+/// It's a very dumb metric but it can work surprisingly well for comparing words
+/// in languages with an active use of [prefixes](https://en.wikipedia.org/wiki/Prefix).
+#[derive(Default)]
+pub struct Suffix {}
+
+impl Algorithm<usize> for Suffix {
+    fn for_vec<E: Eq>(&self, s1: &[E], s2: &[E]) -> Result<usize> {
+        let mut result = 0;
+        for (c1, c2) in s1.iter().rev().zip(s2.iter().rev()) {
+            if c1 == c2 {
+                result += 1;
+            } else {
+                break;
+            }
+        }
+        let l1 = s1.len();
+        let l2 = s2.len();
+        Result {
+            abs: result,
+            is_distance: false,
+            max: l1.max(l2),
+            len1: l1,
+            len2: l2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::suffix;
+    use assert2::assert;
+    use rstest::rstest;
+
+    #[rstest]
+    #[case("", "", 0)]
+    #[case("", "a", 0)]
+    #[case("a", "", 0)]
+    #[case("a", "a", 1)]
+    #[case("a", "b", 0)]
+    #[case("abcde", "abcef", 0)]
+    #[case("abcde", "abfcde", 3)]
+    #[case("abcd", "fabcd", 4)]
+
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: usize) {
+        assert!(suffix(s1, s2) == exp);
+    }
+}
diff --git a/crates/textdistance/src/algorithms/tversky.rs b/crates/textdistance/src/algorithms/tversky.rs
new file mode 100644
index 0000000..e01cc63
--- /dev/null
+++ b/crates/textdistance/src/algorithms/tversky.rs
@@ -0,0 +1,103 @@
+//! Tversky index
+#![cfg(feature = "std")]
+use crate::counter::Counter;
+use crate::{Algorithm, Result};
+
+/// [Tversky similarity] is a generalization of [`SorensenDice`] and [`Jaccard`].
+///
+/// [Tversky similarity]: https://en.wikipedia.org/wiki/Tversky_index
+/// [`SorensenDice`]: crate::SorensenDice
+/// [`Jaccard`]: crate::Jaccard
+pub struct Tversky {
+    /// α, the weight of the first sequence (the "prototype").
+    pub alpha: f64,
+    /// β, the weight of the second sequence (the "variant").
+    pub beta: f64,
+    /// The symmetric Tversky index bias parameter.
+    pub bias: f64,
+}
+
+impl Default for Tversky {
+    fn default() -> Self {
+        Self {
+            alpha: 1.,
+            beta: 1.,
+            bias: 0.,
+        }
+    }
+}
+
+impl Algorithm<f64> for Tversky {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<f64>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        let c1 = Counter::from_iter(s1);
+        let c2 = Counter::from_iter(s2);
+        let ic = c1.intersect_count(&c2);
+        let n1 = c1.count();
+        let n2 = c2.count();
+
+        if n1 == 0 && n2 == 0 {
+            return Result {
+                abs: 1.,
+                is_distance: false,
+                max: 1.,
+                len1: c1.count(),
+                len2: c2.count(),
+            };
+        }
+
+        let denom = self.alpha * (n1 - ic) as f64 + self.beta * (n2 - ic) as f64;
+        let res = (ic as f64 + self.bias) / (ic as f64 + denom);
+        Result {
+            abs: res,
+            is_distance: false,
+            max: 1.,
+            len1: c1.count(),
+            len2: c2.count(),
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::str::{jaccard, sorensen_dice, tversky};
+    use assert2::assert;
+    use proptest::prelude::*;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    #[case("", "", 1.)]
+    #[case("nelson", "", 0.)]
+    #[case("", "neilsen", 0.)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        let act = tversky(s1, s2);
+        let ok = is_close(act, exp);
+        assert!(ok, "tversky({}, {}) is {}, not {}", s1, s2, act, exp);
+    }
+
+    proptest! {
+        #[test]
+        fn sorensen_dice_eqivalence(s1 in ".*", s2 in ".*") {
+            let tv = Tversky{alpha: 0.5, beta: 0.5, ..Default::default()};
+            let tv_res = tv.for_str(&s1, &s2);
+            let sd_res = sorensen_dice(&s1, &s2);
+            prop_assert!(is_close(tv_res.nval(), sd_res));
+        }
+
+        #[test]
+        fn tanimoto_eqivalence(s1 in ".*", s2 in ".*") {
+            let tv = Tversky{alpha: 1., beta: 1., ..Default::default()};
+            let tv_res = tv.for_str(&s1, &s2);
+            let sd_res = jaccard(&s1, &s2);
+            prop_assert!(is_close(tv_res.nval(), sd_res));
+        }
+    }
+}
diff --git a/crates/textdistance/src/algorithms/yujian_bo.rs b/crates/textdistance/src/algorithms/yujian_bo.rs
new file mode 100644
index 0000000..f27243d
--- /dev/null
+++ b/crates/textdistance/src/algorithms/yujian_bo.rs
@@ -0,0 +1,64 @@
+//! Yujian-Bo distance
+use super::levenshtein::Levenshtein;
+use crate::{Algorithm, Result};
+
+/// [Yujian-Bo distance] is a normalization of [`Levenshtein`].
+///
+/// [Yujian-Bo distance]: https://ieeexplore.ieee.org/document/4160958
+#[derive(Default)]
+pub struct YujianBo {
+    /// Algorithm instance to use for calculating Levenshtein distance.
+    pub levenshtein: Levenshtein,
+}
+
+impl Algorithm<f64> for YujianBo {
+    fn for_iter<C, E>(&self, s1: C, s2: C) -> Result<f64>
+    where
+        C: Iterator<Item = E>,
+        E: Eq + core::hash::Hash,
+    {
+        let lev = self.levenshtein.for_iter(s1, s2);
+        let dc: usize = self.levenshtein.del_cost;
+        let ic: usize = self.levenshtein.ins_cost;
+        let lval = lev.val();
+        let res = if lval == 0 {
+            0.0
+        } else {
+            (2 * lval) as f64 / (lev.len1 * dc + lev.len2 * ic + lval) as f64
+        };
+        Result {
+            abs: res,
+            is_distance: true,
+            max: 1.0,
+            len1: lev.len1,
+            len2: lev.len2,
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::str::yujian_bo;
+    use assert2::assert;
+    use rstest::rstest;
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-5
+    }
+
+    #[rstest]
+    #[case("", "", 0.0)]
+    // parity with abydos
+    #[case("a", "", 1.0)]
+    #[case("", "a", 1.0)]
+    #[case("bc", "", 1.0)]
+    #[case("", "bc", 1.0)]
+    #[case("bc", "bc", 0.0)]
+    #[case("bcd", "fgh", 0.6666666666666666)]
+    #[case("ATCG", "TAGC", 0.5454545454545454)]
+    #[case("cat", "hat", 0.285714285714)]
+    #[case("aluminum", "Catalan", 0.6363636363636364)]
+    fn function_str(#[case] s1: &str, #[case] s2: &str, #[case] exp: f64) {
+        assert!(is_close(yujian_bo(s1, s2), exp));
+    }
+}
diff --git a/crates/textdistance/src/counter.rs b/crates/textdistance/src/counter.rs
new file mode 100644
index 0000000..cb3131a
--- /dev/null
+++ b/crates/textdistance/src/counter.rs
@@ -0,0 +1,154 @@
+#![cfg(feature = "std")]
+use core::hash::Hash;
+use std::collections::HashMap;
+
+/// Multiset container inspired by Python's `collections.Counter`.
+pub struct Counter<K> {
+    map: HashMap<K, usize>,
+}
+
+impl<K> Counter<K>
+where
+    K: Hash + Eq,
+{
+    /// make an empty Counter
+    pub fn new() -> Counter<K> {
+        Counter {
+            map: HashMap::new(),
+        }
+    }
+
+    /// Create a counter from a sequence.
+    pub fn from_iter<I>(iter: I) -> Counter<K>
+    where
+        I: IntoIterator<Item = K>,
+    {
+        let mut counter = Counter::new();
+        counter.update(iter);
+        counter
+    }
+
+    /// Merge items from a sequence into the Counter
+    pub fn update<I>(&mut self, iter: I)
+    where
+        I: IntoIterator<Item = K>,
+    {
+        for item in iter {
+            let entry = self.map.entry(item).or_insert(0);
+            *entry += 1;
+        }
+    }
+
+    // How many items there are in total in the Counter.
+    pub fn count(&self) -> usize {
+        self.map.values().sum()
+    }
+
+    /// Unique elements in the set
+    pub fn keys(&self) -> impl Iterator<Item = &K> {
+        self.map.keys()
+    }
+
+    /// The count of things without the things
+    pub fn values(&self) -> impl Iterator<Item = &usize> {
+        self.map.values()
+    }
+
+    /// .
+    pub fn get(&self, key: &K) -> Option<&usize> {
+        self.map.get(key)
+    }
+
+    /// Merge two counters together.
+    pub fn merge<'a>(&'a self, rhs: &'a Counter<K>) -> Counter<&'a K> {
+        let mut result: HashMap<&K, usize> = HashMap::new();
+        for (key, lhs_count) in &self.map {
+            let rhs_count = rhs.map.get(key).unwrap_or(&0);
+            result.insert(key, *lhs_count + rhs_count);
+        }
+        for (key, rhs_count) in &rhs.map {
+            if !self.map.contains_key(key) {
+                result.insert(key, *rhs_count);
+            }
+        }
+        Counter { map: result }
+    }
+
+    /// How many there are common items in the given multisets.
+    pub fn intersect_count(&self, rhs: &Counter<K>) -> usize {
+        let mut result = 0;
+        for (key, lhs_count) in &self.map {
+            if let Some(rhs_count) = rhs.map.get(key) {
+                result += lhs_count.min(rhs_count);
+            }
+        }
+        result
+    }
+
+    /// How many there are items in total in both multisets.
+    pub fn union_count(&self, rhs: &Counter<K>) -> usize {
+        let mut result = 0;
+        for (key, lhs_count) in &self.map {
+            let rhs_count = rhs.map.get(key).unwrap_or(&0);
+            result += lhs_count.max(rhs_count);
+        }
+        for (key, rhs_count) in &rhs.map {
+            if !self.map.contains_key(key) {
+                result += rhs_count;
+            }
+        }
+        result
+    }
+
+    /// How many there are item in left that aren't in the right
+    pub fn diff_count(&self, rhs: &Counter<K>) -> usize {
+        let mut result = 0;
+        for (key, lhs_count) in &self.map {
+            let rhs_count = rhs.map.get(key).unwrap_or(&0);
+            if lhs_count > rhs_count {
+                result += lhs_count - rhs_count;
+            }
+        }
+        result
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use assert2::assert;
+    use rstest::rstest;
+
+    pub fn eq<K: Hash + Eq>(lhs: &Counter<K>, rhs: &Counter<K>) -> bool {
+        for (key, lhs_count) in &lhs.map {
+            if let Some(rhs_count) = rhs.map.get(key) {
+                if lhs_count != rhs_count {
+                    return false;
+                }
+            } else {
+                return false;
+            }
+        }
+        for (key, rhs_count) in &rhs.map {
+            if let Some(lhs_count) = lhs.map.get(key) {
+                if lhs_count != rhs_count {
+                    return false;
+                }
+            } else {
+                return false;
+            }
+        }
+        true
+    }
+
+    #[rstest]
+    fn smoke() {
+        let c1 = Counter::from_iter(1..=5);
+        let c2 = Counter::from_iter(3..=7);
+        assert!(eq(&c1, &c1));
+        assert!(!eq(&c1, &c2));
+        // assert!(eq(c1.intersect(&c2), &Counter::from_iter(3..=5)));
+        assert!(c1.intersect_count(&c2) == 3);
+        assert!(c1.union_count(&c2) == 7);
+    }
+}
diff --git a/crates/textdistance/src/lib.rs b/crates/textdistance/src/lib.rs
new file mode 100644
index 0000000..bc13d93
--- /dev/null
+++ b/crates/textdistance/src/lib.rs
@@ -0,0 +1,253 @@
+#![cfg_attr(not(any(feature = "std", test)), no_std)]
+#![doc = include_str!("../README.md")]
+#![deny(missing_docs)]
+#![deny(clippy::all, clippy::pedantic)]
+#![allow(
+    clippy::cast_precision_loss,
+    clippy::must_use_candidate,
+    clippy::similar_names,
+    clippy::unreadable_literal,
+    clippy::doc_markdown,
+    clippy::wildcard_imports
+)]
+
+extern crate alloc;
+
+mod algorithm;
+mod counter;
+mod result;
+
+pub mod nstr;
+pub mod str;
+
+mod algorithms {
+    pub mod bag;
+    pub mod cosine;
+    pub mod damerau_levenshtein;
+    pub mod entropy_ncd;
+    pub mod hamming;
+    pub mod jaccard;
+    pub mod jaro;
+    pub mod jaro_winkler;
+    pub mod lcsseq;
+    pub mod lcsstr;
+    pub mod length;
+    pub mod levenshtein;
+    pub mod lig3;
+    pub mod mlipns;
+    pub mod overlap;
+    pub mod prefix;
+    pub mod ratcliff_obershelp;
+    pub mod roberts;
+    pub mod sift4_common;
+    pub mod sift4_simple;
+    pub mod smith_waterman;
+    pub mod sorensen_dice;
+    pub mod suffix;
+    pub mod tversky;
+    pub mod yujian_bo;
+}
+
+pub use self::algorithm::Algorithm;
+#[cfg(feature = "std")]
+pub use self::algorithms::bag::Bag;
+#[cfg(feature = "std")]
+pub use self::algorithms::cosine::Cosine;
+#[cfg(feature = "std")]
+pub use self::algorithms::damerau_levenshtein::DamerauLevenshtein;
+#[cfg(feature = "std")]
+pub use self::algorithms::entropy_ncd::EntropyNCD;
+pub use self::algorithms::hamming::Hamming;
+#[cfg(feature = "std")]
+pub use self::algorithms::jaccard::Jaccard;
+pub use self::algorithms::jaro::Jaro;
+pub use self::algorithms::jaro_winkler::JaroWinkler;
+pub use self::algorithms::lcsseq::LCSSeq;
+pub use self::algorithms::lcsstr::LCSStr;
+pub use self::algorithms::length::Length;
+pub use self::algorithms::levenshtein::Levenshtein;
+pub use self::algorithms::lig3::LIG3;
+pub use self::algorithms::mlipns::MLIPNS;
+#[cfg(feature = "std")]
+pub use self::algorithms::overlap::Overlap;
+pub use self::algorithms::prefix::Prefix;
+pub use self::algorithms::ratcliff_obershelp::RatcliffObershelp;
+#[cfg(feature = "std")]
+pub use self::algorithms::roberts::Roberts;
+pub use self::algorithms::sift4_common::Sift4Common;
+pub use self::algorithms::sift4_simple::Sift4Simple;
+pub use self::algorithms::smith_waterman::SmithWaterman;
+#[cfg(feature = "std")]
+pub use self::algorithms::sorensen_dice::SorensenDice;
+pub use self::algorithms::suffix::Suffix;
+#[cfg(feature = "std")]
+pub use self::algorithms::tversky::Tversky;
+pub use self::algorithms::yujian_bo::YujianBo;
+pub use self::result::Result;
+
+#[cfg(test)]
+mod tests {
+    #![allow(clippy::float_cmp)]
+
+    use super::*;
+    use assert2::assert;
+    use proptest::prelude::*;
+    use rstest::rstest;
+
+    const ALGS: usize = 8;
+
+    fn get_result(alg: usize, s1: &str, s2: &str) -> Result<usize> {
+        match alg {
+            1 => Hamming::default().for_str(s1, s2),
+            2 => LCSSeq::default().for_str(s1, s2),
+            3 => LCSStr::default().for_str(s1, s2),
+            4 => RatcliffObershelp::default().for_str(s1, s2),
+            5 => Levenshtein::default().for_str(s1, s2),
+            #[cfg(feature = "std")]
+            6 => DamerauLevenshtein::default().for_str(s1, s2),
+            7 => Sift4Simple::default().for_str(s1, s2),
+            8 => MLIPNS::default().for_str(s1, s2),
+            9 => Prefix::default().for_str(s1, s2),
+            10 => Suffix::default().for_str(s1, s2),
+            11 => Length::default().for_str(s1, s2),
+            12 => Bag::default().for_str(s1, s2),
+            13 => SmithWaterman::default().for_str(s1, s2),
+            14 => Sift4Common::default().for_str(s1, s2),
+            _ => panic!("there are not so many algorithms!"),
+        }
+    }
+
+    fn get_result_f64(alg: usize, s1: &str, s2: &str) -> Result<f64> {
+        match alg {
+            1 => Jaro::default().for_str(s1, s2),
+            2 => JaroWinkler::default().for_str(s1, s2),
+            3 => YujianBo::default().for_str(s1, s2),
+            4 => Jaccard::default().for_str(s1, s2),
+            5 => SorensenDice::default().for_str(s1, s2),
+            6 => Tversky::default().for_str(s1, s2),
+            7 => Overlap::default().for_str(s1, s2),
+            8 => Cosine::default().for_str(s1, s2),
+            9 => EntropyNCD::default().for_str(s1, s2),
+            10 => LIG3::default().for_str(s1, s2),
+            11 => Roberts::default().for_str(s1, s2),
+            _ => panic!("there are not so many algorithms!"),
+        }
+    }
+
+    #[rstest]
+    #[case::hamming(1)]
+    #[case::lcsseq(2)]
+    #[case::lcsstr(3)]
+    #[case::ratcliff_obershelp(4)]
+    #[case::levenshtein(5)]
+    #[case::damerau_levenshtein(6)]
+    #[case::sift4_simple(7)]
+    #[case::mlipns(8)]
+    #[case::prefix(9)]
+    #[case::suffix(10)]
+    #[case::length(11)]
+    #[case::bag(12)]
+    #[case::smith_waterman(13)]
+    #[case::sift4_common(14)]
+    fn basic_usize(#[case] alg: usize) {
+        let empty_res = get_result(alg, "", "");
+        assert!(empty_res.dist() == 0);
+        if alg != 8 {
+            assert!(get_result(alg, "ab", "cde").dist() > 0);
+            assert!(get_result(alg, "ab", "cde").ndist() > 0.);
+        }
+        if alg != 11 {
+            assert!(get_result(alg, "spam", "qwer").sim() == 0);
+            assert!(get_result(alg, "spam", "qwer").nsim() == 0.);
+        }
+        assert!(empty_res.ndist() == 0.);
+        assert!(empty_res.nsim() == 1.);
+    }
+
+    #[rstest]
+    #[case::jaro(1)]
+    #[case::jaro_winkler(2)]
+    #[case::yujian_bo(3)]
+    #[case::jaccard(4)]
+    #[case::sorensen_dice(5)]
+    #[case::tversky(6)]
+    #[case::overlap(7)]
+    #[case::cosine(8)]
+    #[case::entropy_ncd(9)]
+    #[case::lig3(10)]
+    #[case::roberts(11)]
+    fn basic_f64(#[case] alg: usize) {
+        let empty_res = get_result_f64(alg, "", "");
+        assert!(get_result_f64(alg, "ab", "cde").ndist() > 0.);
+        if alg != 3 && alg != 9 {
+            assert!(get_result_f64(alg, "spam", "qwer").nsim() == 0.);
+        }
+        assert!(empty_res.ndist() == 0.);
+        assert!(empty_res.nsim() == 1.);
+        assert!(empty_res.max == 1.);
+    }
+
+    fn is_close(a: f64, b: f64) -> bool {
+        (a - b).abs() < 1E-9
+    }
+
+    proptest! {
+        #[test]
+        fn prop(s1 in ".*", s2 in ".*") {
+            for alg in 1..ALGS {
+                let res = get_result(alg, &s1, &s2);
+                let d = res.dist();
+                let s = res.sim();
+
+                let nd = res.ndist();
+                prop_assert!(nd.is_finite());
+                prop_assert!(nd >= 0.);
+                prop_assert!(nd <= 1.);
+
+                let ns = res.nsim();
+                prop_assert!(ns.is_finite());
+                prop_assert!(ns >= 0.);
+                prop_assert!(ns <= 1.);
+
+                prop_assert!(is_close(ns + nd, 1.), "{} + {} == 1", nd, ns);
+
+                if d < s {
+                    prop_assert!(nd < ns, "{} < {}", nd, ns);
+                } else if d > s {
+                    prop_assert!(nd > ns, "{} > {}", nd, ns);
+                } else if !s1.is_empty() && !s2.is_empty() {
+                    prop_assert!(nd == ns, "{} == {}", nd, ns);
+                }
+                prop_assert!(res.val() == d || res.val() == s);
+
+                prop_assert_eq!(res.len1, s1.chars().count());
+                prop_assert_eq!(res.len2, s2.chars().count());
+                prop_assert!(res.max >= res.len1.min(res.len2));
+            }
+        }
+
+        #[test]
+        fn prop_same(s in ".*") {
+            for alg in 1..ALGS {
+                let res = get_result(alg, &s, &s);
+                let nd = res.ndist();
+                prop_assert_eq!(nd, 0., "{}: {} == 0.0", alg, nd);
+                let ns = res.nsim();
+                prop_assert_eq!(ns, 1., "{}: {} == 1.0", alg, ns);
+            }
+        }
+
+        // strings should have lower distance if you add the same prefix to them
+        fn prop_prefix(prefix in ".+", s1 in ".+", s2 in ".+") {
+            for alg in 1..ALGS {
+                let r1 = get_result(alg, &s1, &s2).ndist();
+                let mut p1 = prefix.clone();
+                let mut p2 = prefix.clone();
+                p1.push_str(&s1);
+                p2.push_str(&s2);
+                let r2 = get_result(alg, &p1, &p2).ndist();
+                prop_assert!(r1 > r2, "{}: {} > {}", alg, r1, r2);
+            }
+        }
+    }
+}
diff --git a/crates/textdistance/src/main.rs b/crates/textdistance/src/main.rs
new file mode 100644
index 0000000..c971b7e
--- /dev/null
+++ b/crates/textdistance/src/main.rs
@@ -0,0 +1,52 @@
+use core::borrow::Borrow;
+
+fn main() {
+    let args: Vec<String> = std::env::args().collect();
+    let alg_name = args.get(1).expect("algorithm name is required");
+    let s1 = args.get(2).expect("first text is required");
+    let s2 = args.get(3).expect("second text is required");
+
+    #[allow(clippy::cast_precision_loss)]
+    let res: f64 = match alg_name.to_lowercase().borrow() {
+        #[cfg(feature = "std")]
+        "damerau_levenshtein" => textdistance::str::damerau_levenshtein(s1, s2) as f64,
+        #[cfg(feature = "std")]
+        "damerau_levenshtein_restricted" => {
+            textdistance::str::damerau_levenshtein_restricted(s1, s2) as f64
+        }
+        "hamming" => textdistance::str::hamming(s1, s2) as f64,
+        "lcsseq" => textdistance::str::lcsseq(s1, s2) as f64,
+        "lcsstr" => textdistance::str::lcsstr(s1, s2) as f64,
+        "levenshtein" => textdistance::str::levenshtein(s1, s2) as f64,
+        "ratcliff_obershelp" => textdistance::str::ratcliff_obershelp(s1, s2),
+        "sift4_simple" => textdistance::str::sift4_simple(s1, s2) as f64,
+        "sift4_common" => textdistance::str::sift4_common(s1, s2) as f64,
+        "jaro" => textdistance::str::jaro(s1, s2),
+        "jaro_winkler" => textdistance::str::jaro_winkler(s1, s2),
+        "yujian_bo" => textdistance::str::yujian_bo(s1, s2),
+        "mlipns" => textdistance::str::mlipns(s1, s2) as f64,
+        #[cfg(feature = "std")]
+        "bag" => textdistance::str::bag(s1, s2) as f64,
+        "lig3" => textdistance::str::lig3(s1, s2),
+        #[cfg(feature = "std")]
+        "jaccard" => textdistance::str::jaccard(s1, s2),
+        #[cfg(feature = "std")]
+        "sorensen_dice" => textdistance::str::sorensen_dice(s1, s2),
+        #[cfg(feature = "std")]
+        "tversky" => textdistance::str::tversky(s1, s2),
+        #[cfg(feature = "std")]
+        "overlap" => textdistance::str::overlap(s1, s2),
+        #[cfg(feature = "std")]
+        "cosine" => textdistance::str::cosine(s1, s2),
+        "prefix" => textdistance::str::prefix(s1, s2) as f64,
+        "suffix" => textdistance::str::suffix(s1, s2) as f64,
+        "length" => textdistance::str::length(s1, s2) as f64,
+        "smith_waterman" => textdistance::str::smith_waterman(s1, s2) as f64,
+        #[cfg(feature = "std")]
+        "entropy_ncd" => textdistance::str::entropy_ncd(s1, s2),
+        #[cfg(feature = "std")]
+        "roberts" => textdistance::str::roberts(s1, s2),
+        _ => panic!("unknown algorithm name"),
+    };
+    println!("{res}");
+}
diff --git a/crates/textdistance/src/nstr.rs b/crates/textdistance/src/nstr.rs
new file mode 100644
index 0000000..fdf0d93
--- /dev/null
+++ b/crates/textdistance/src/nstr.rs
@@ -0,0 +1,328 @@
+//! Helper functions providing the default normalized implementation of distance/similarity algorithms for strings.
+//!
+//! See also [`textdistance::str`](super::str) for non-normalized distance.
+use super::*;
+
+/// Calculate normalized unrestricted [Damerau-Levenshtein distance][1] for two strings.
+///
+/// A wrapper for [`DamerauLevenshtein`].
+///
+///     use textdistance::nstr::damerau_levenshtein;
+///     assert!(damerau_levenshtein("abc", "acbd") == 2./4.); // "bc" swapped and "d" added
+///
+/// [1]: https://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance
+#[cfg(feature = "std")]
+pub fn damerau_levenshtein(s1: &str, s2: &str) -> f64 {
+    DamerauLevenshtein::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized restricted [Damerau-Levenshtein distance][1] for two strings.
+///
+/// A wrapper for [`DamerauLevenshtein`].
+///
+///     use textdistance::nstr::damerau_levenshtein;
+///     assert!(damerau_levenshtein("abc", "acbd") == 2./4.); // "bc" swapped and "d" added
+///
+/// [1]: https://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance
+#[cfg(feature = "std")]
+pub fn damerau_levenshtein_restricted(s1: &str, s2: &str) -> f64 {
+    let a = DamerauLevenshtein {
+        restricted: true,
+        ..Default::default()
+    };
+    a.for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Hamming distance][1] for two strings.
+///
+/// A wrapper for [`Hamming`].
+///
+///     use textdistance::nstr::hamming;
+///     assert!(hamming("abc", "acbd") == 3./4.); // only "a" matches
+///
+/// [1]: https://en.wikipedia.org/wiki/Hamming_distance
+pub fn hamming(s1: &str, s2: &str) -> f64 {
+    Hamming::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized the length of the [Longest Common SubSequence][1] for two strings.
+///
+/// A wrapper for [`LCSSeq`].
+///
+///     use textdistance::nstr::lcsseq;
+///     assert!(lcsseq("abcdef", "xbcegf") == 4./6.); // "bcef"
+///
+/// [1]: https://en.wikipedia.org/wiki/Longest_common_subsequence
+pub fn lcsseq(s1: &str, s2: &str) -> f64 {
+    LCSSeq::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized the length of the [Longest Common SubString][1] for two strings.
+///
+/// A wrapper for [`LCSStr`].
+///
+///     use textdistance::nstr::lcsstr;
+///     assert!(lcsstr("abcdef", "xbcegf") == 2./6.); // "bc"
+///
+/// [1]: https://en.wikipedia.org/wiki/Longest_common_substring
+pub fn lcsstr(s1: &str, s2: &str) -> f64 {
+    LCSStr::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Levenshtein distance][1] for two strings.
+///
+/// A wrapper for [`Levenshtein`].
+///
+///     use textdistance::nstr::levenshtein;
+///     assert!(levenshtein("abc", "acbd") == 2./4.); // add "c" at 2 and then swap "c" with "d" at 4
+///
+/// [1]: https://en.wikipedia.org/wiki/Levenshtein_distance
+pub fn levenshtein(s1: &str, s2: &str) -> f64 {
+    Levenshtein::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Ratcliff-Obershelp normalized similarity][1] for two strings.
+///
+/// A wrapper for [`RatcliffObershelp`].
+///
+///     use textdistance::nstr::ratcliff_obershelp;
+///     assert_eq!(ratcliff_obershelp("abc", "acbd"), 0.5714285714285714);
+///
+/// [1]: https://en.wikipedia.org/wiki/Gestalt_pattern_matching
+pub fn ratcliff_obershelp(s1: &str, s2: &str) -> f64 {
+    RatcliffObershelp::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Sift4 distance][1] for two strings using the "simplest" algorithm.
+///
+/// A wrapper for [`Sift4Simple`].
+///
+///     use textdistance::nstr::sift4_simple;
+///     assert!(sift4_simple("abc", "acbd") == 2./4.);
+///
+/// [1]: https://siderite.dev/blog/super-fast-and-accurate-string-distance.html
+pub fn sift4_simple(s1: &str, s2: &str) -> f64 {
+    Sift4Simple::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Sift4 distance][1] for two strings using the "common" algorithm.
+///
+/// A wrapper for [`Sift4Common`].
+///
+///     use textdistance::nstr::sift4_common;
+///     assert!(sift4_common("abc", "acbd") == 2./4.);
+///
+/// [1]: https://siderite.dev/blog/super-fast-and-accurate-string-distance.html
+pub fn sift4_common(s1: &str, s2: &str) -> f64 {
+    Sift4Common::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Jaro normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Jaro`].
+///
+///     use textdistance::nstr::jaro;
+///     assert_eq!(jaro("abc", "acbd"), 0.8055555555555555);
+///
+/// [1]: https://en.wikipedia.org/wiki/Jaro%E2%80%93Winkler_distance#Jaro_similarity
+pub fn jaro(s1: &str, s2: &str) -> f64 {
+    Jaro::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Jaro-Winkler normalized similarity][1] for two strings.
+///
+/// A wrapper for [`JaroWinkler`].
+///
+///     use textdistance::nstr::jaro_winkler;
+///     assert_eq!(jaro_winkler("abc", "acbd"), 0.825);
+///
+/// [1]: https://en.wikipedia.org/wiki/Jaro%E2%80%93Winkler_distance
+pub fn jaro_winkler(s1: &str, s2: &str) -> f64 {
+    JaroWinkler::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Yujian-Bo normalization][1] of [Levenshtein] for two strings.
+///
+/// A wrapper for [`YujianBo`].
+///
+///     use textdistance::nstr::yujian_bo;
+///     assert_eq!(yujian_bo("abc", "acbd"), 0.4444444444444444);
+///
+/// [1]: https://ieeexplore.ieee.org/document/4160958
+pub fn yujian_bo(s1: &str, s2: &str) -> f64 {
+    YujianBo::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [MLIPNS normalization][1] of [Hamming] for two strings.
+///
+/// A wrapper for [`MLIPNS`].
+///
+///     use textdistance::nstr::mlipns;
+///     assert!(mlipns("abc", "acbd") == 0.);
+///
+/// [1]: https://www.sial.iias.spb.su/files/386-386-1-PB.pdf
+pub fn mlipns(s1: &str, s2: &str) -> f64 {
+    MLIPNS::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Bag distance][1] for two strings.
+///
+/// A wrapper for [`Bag`].
+///
+///     use textdistance::nstr::bag;
+///     assert!(bag("abc", "acbd") == 1./4.);
+///
+/// [1]: http://www-db.disi.unibo.it/research/papers/SPIRE02.pdf
+#[cfg(feature = "std")]
+pub fn bag(s1: &str, s2: &str) -> f64 {
+    Bag::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [LIG3 normalization][1] of [Hamming] by [Levenshtein] for two strings.
+///
+/// A wrapper for [`LIG3`].
+///
+///     use textdistance::nstr::lig3;
+///     assert_eq!(lig3("abc", "acbd"), 0.5);
+///
+/// [1]: https://github.com/chrislit/abydos/blob/master/abydos/distance/_lig3.py
+pub fn lig3(s1: &str, s2: &str) -> f64 {
+    LIG3::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Jaccard normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Jaccard`].
+///
+///     use textdistance::nstr::jaccard;
+///     assert_eq!(jaccard("abc", "acbd"), 0.75);
+///
+/// [1]: https://en.wikipedia.org/wiki/Jaccard_index
+#[cfg(feature = "std")]
+pub fn jaccard(s1: &str, s2: &str) -> f64 {
+    Jaccard::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Sørensen–Dice normalized similarity][1] for two strings.
+///
+/// A wrapper for [`SorensenDice`].
+///
+///     use textdistance::nstr::sorensen_dice;
+///     assert_eq!(sorensen_dice("abc", "acbd"), 0.8571428571428571);
+///
+/// [1]:https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient
+#[cfg(feature = "std")]
+pub fn sorensen_dice(s1: &str, s2: &str) -> f64 {
+    SorensenDice::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Tversky normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Tversky`].
+///
+///     use textdistance::nstr::tversky;
+///     assert_eq!(tversky("abc", "acbd"), 0.75);
+///
+/// [1]: https://en.wikipedia.org/wiki/Tversky_index
+#[cfg(feature = "std")]
+pub fn tversky(s1: &str, s2: &str) -> f64 {
+    Tversky::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Overlap normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Overlap`].
+///
+///     use textdistance::nstr::overlap;
+///     assert_eq!(overlap("abc", "acbd"), 1.0);
+///
+/// [1]: https://en.wikipedia.org/wiki/Overlap_coefficient
+#[cfg(feature = "std")]
+pub fn overlap(s1: &str, s2: &str) -> f64 {
+    Overlap::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Cosine normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Cosine`].
+///
+///     use textdistance::nstr::cosine;
+///     assert_eq!(cosine("abc", "acbd"), 0.8660254037844387);
+///
+/// [1]: https://en.wikipedia.org/wiki/Cosine_similarity
+#[cfg(feature = "std")]
+pub fn cosine(s1: &str, s2: &str) -> f64 {
+    Cosine::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized common prefix length for two strings.
+///
+/// A wrapper for [`Prefix`].
+///
+///     use textdistance::nstr::prefix;
+///     assert!(prefix("abc", "acbd") == 1./4.); // "a"
+///
+pub fn prefix(s1: &str, s2: &str) -> f64 {
+    Prefix::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized common suffix length for two strings.
+///
+/// A wrapper for [`Suffix`].
+///
+///     use textdistance::nstr::suffix;
+///     assert!(suffix("abcd", "axcd") == 2./4.); // "cd"
+///
+pub fn suffix(s1: &str, s2: &str) -> f64 {
+    Suffix::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized length distance for two strings.
+///
+/// A wrapper for [`Length`].
+///
+///     use textdistance::nstr::length;
+///     assert!(length("abcd", "axc") == (4. - 3.) / 4.);
+///
+pub fn length(s1: &str, s2: &str) -> f64 {
+    Length::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Smith-Waterman similarity] for two strings.
+///
+/// A wrapper for [`SmithWaterman`].
+///
+///     use textdistance::nstr::smith_waterman;
+///     assert!(smith_waterman("abc", "acbd") == 1./4.);
+///
+/// [Smith-Waterman similarity]: https://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
+pub fn smith_waterman(s1: &str, s2: &str) -> f64 {
+    SmithWaterman::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Entropy]-based [normalized compression distance][1] for two strings.
+///
+/// A wrapper for [`EntropyNCD`].
+///
+///     use textdistance::nstr::entropy_ncd;
+///     assert_eq!(entropy_ncd("abc", "acbd"), 0.12174985473119697);
+///
+/// [1]: https://en.wikipedia.org/wiki/Normalized_compression_distance
+/// [Entropy]: https://en.wikipedia.org/wiki/Entropy_(information_theory)
+#[cfg(feature = "std")]
+pub fn entropy_ncd(s1: &str, s2: &str) -> f64 {
+    EntropyNCD::default().for_str(s1, s2).nval()
+}
+
+/// Calculate normalized [Roberts similarity] for two strings.
+///
+/// A wrapper for [`Roberts`].
+///
+///     use textdistance::nstr::roberts;
+///     assert_eq!(roberts("abc", "acbd"), 0.8571428571428571);
+///
+/// [Roberts similarity]: https://github.com/chrislit/abydos/blob/master/abydos/distance/_roberts.py
+#[cfg(feature = "std")]
+pub fn roberts(s1: &str, s2: &str) -> f64 {
+    Roberts::default().for_str(s1, s2).nval()
+}
diff --git a/crates/textdistance/src/result.rs b/crates/textdistance/src/result.rs
new file mode 100644
index 0000000..6b5b65c
--- /dev/null
+++ b/crates/textdistance/src/result.rs
@@ -0,0 +1,187 @@
+/// Result of a distance/similarity algorithm.
+pub struct Result<R> {
+    /// Indicates if it is a distance or a similarity metric.
+    pub(crate) is_distance: bool,
+
+    /// Absolute raw value of the metric.
+    pub(crate) abs: R,
+
+    /// Maximum possible value for the input of the given length.
+    pub(crate) max: R,
+
+    /// Length of the first analyzed sequence.
+    pub(crate) len1: usize,
+
+    /// Length of the second analyzed sequence.
+    pub(crate) len2: usize,
+}
+
+impl Result<usize> {
+    /// Raw value of the metric.
+    ///
+    /// It is equivalent to `dist` for distance metrics
+    /// and to `sim` for similarity metrics.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_str("abc", "acbd");
+    ///     assert!(res.val() == 3);
+    ///
+    pub fn val(&self) -> usize {
+        self.abs
+    }
+
+    /// Absolute distance.
+    ///
+    /// A non-negative number showing how different the two sequences are.
+    /// Two exactly the same sequences have the distance 0.
+    ///
+    /// The highest possible number varies based on the length of the input strings.
+    /// Most often, each increment of this value indicates one symbol that differs
+    /// in the input sequences.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_str("abc", "acbd");
+    ///     assert!(res.dist() == 3);
+    ///
+    pub fn dist(&self) -> usize {
+        if self.is_distance {
+            self.abs
+        } else {
+            self.max - self.abs
+        }
+    }
+
+    /// Absolute similarity.
+    ///
+    /// A non-negative number showing how similar the two sequences are.
+    /// Two absolutely different sequences have the similarity 0.
+    ///
+    /// The highest possible number varies based on the length of the input strings.
+    /// Most often, each increment of this value indicates one symbol that is the same
+    /// in both sequences.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_str("abc", "acbd");
+    ///     assert!(res.sim() == 1); // "a"
+    ///
+    pub fn sim(&self) -> usize {
+        if self.is_distance {
+            self.max - self.abs
+        } else {
+            self.abs
+        }
+    }
+
+    /// Normalized raw value of the metric.
+    ///
+    /// It is equivalent to `ndist` for distance metrics
+    /// and to `nsim` for similarity metrics.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_str("abc", "acbd");
+    ///     assert!(res.nval() == 3.0 / 4.0);
+    ///
+    pub fn nval(&self) -> f64 {
+        if self.is_distance {
+            self.ndist()
+        } else {
+            self.nsim()
+        }
+    }
+
+    /// Normalized distance.
+    ///
+    /// A number from 0.0 to 1.0 showing how different the two sequences are.
+    /// 0.0 indicates that the sequences are the same,
+    /// and 1.0 indicates that the sequences are very different.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_str("abc", "acbd");
+    ///     assert!(res.ndist() == 3.0 / 4.0);
+    ///
+    pub fn ndist(&self) -> f64 {
+        if self.max == 0 {
+            self.dist() as f64
+        } else {
+            self.dist() as f64 / self.max as f64
+        }
+    }
+
+    /// Normalized similarity.
+    ///
+    /// A number from 0.0 to 1.0 showing how similar the two sequences are.
+    /// 0.0 indicates that the sequences are very different,
+    /// and 1.0 indicates that the sequences are the same.
+    ///
+    ///     use textdistance::{Algorithm, Hamming};
+    ///     let h = Hamming::default();
+    ///     let res = h.for_str("abc", "acbd");
+    ///     assert!(res.nsim() == 1.0 / 4.0);
+    ///
+    pub fn nsim(&self) -> f64 {
+        if self.max == 0 {
+            1.0
+        } else {
+            self.sim() as f64 / self.max as f64
+        }
+    }
+}
+
+impl Result<f64> {
+    /// Normalized raw value of the metric.
+    ///
+    /// It is equivalent to `ndist` for distance metrics
+    /// and to `nsim` for similarity metrics.
+    ///
+    ///     use textdistance::{Algorithm, Jaro};
+    ///     let h = Jaro::default();
+    ///     let res = h.for_str("test", "tset");
+    ///     assert_eq!(res.nval(), 0.9166666666666666);
+    ///
+    pub fn nval(&self) -> f64 {
+        self.abs
+    }
+
+    /// Normalized distance.
+    ///
+    /// A number from 0.0 to 1.0 showing how different the two sequences are.
+    /// 0.0 indicates that the sequences are the same,
+    /// and 1.0 indicates that the sequences are very different.
+    ///
+    ///     use textdistance::{Algorithm, Jaro};
+    ///     let h = Jaro::default();
+    ///     let res = h.for_str("test", "tset");
+    ///     assert_eq!(res.ndist(), 0.08333333333333337);
+    ///
+    pub fn ndist(&self) -> f64 {
+        if self.is_distance {
+            self.abs
+        } else {
+            self.max - self.abs
+        }
+    }
+
+    /// Normalized similarity.
+    ///
+    /// A number from 0.0 to 1.0 showing how similar the two sequences are.
+    /// 0.0 indicates that the sequences are very different,
+    /// and 1.0 indicates that the sequences are the same.
+    ///
+    ///     use textdistance::{Algorithm, Jaro};
+    ///     let h = Jaro::default();
+    ///     let res = h.for_str("test", "tset");
+    ///     assert_eq!(res.nsim(), 0.9166666666666666);
+    ///
+    pub fn nsim(&self) -> f64 {
+        if self.is_distance {
+            self.max - self.abs
+        } else {
+            self.abs
+        }
+    }
+}
diff --git a/crates/textdistance/src/str.rs b/crates/textdistance/src/str.rs
new file mode 100644
index 0000000..1b3093e
--- /dev/null
+++ b/crates/textdistance/src/str.rs
@@ -0,0 +1,329 @@
+//! Helper functions providing the default implementation of distance/similarity algorithms for strings.
+//!
+//! See also [`textdistance::nstr`](super::nstr) for normalized distance.
+
+use super::*;
+
+/// Calculate unrestricted [Damerau-Levenshtein distance][1] for two strings.
+///
+/// A wrapper for [`DamerauLevenshtein`].
+///
+///     use textdistance::str::damerau_levenshtein;
+///     assert!(damerau_levenshtein("abc", "acbd") == 2); // "bc" swapped and "d" added
+///
+/// [1]: https://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance
+#[cfg(feature = "std")]
+pub fn damerau_levenshtein(s1: &str, s2: &str) -> usize {
+    DamerauLevenshtein::default().for_str(s1, s2).val()
+}
+
+/// Calculate restricted [Damerau-Levenshtein distance][1] for two strings.
+///
+/// A wrapper for [`DamerauLevenshtein`].
+///
+///     use textdistance::str::damerau_levenshtein;
+///     assert!(damerau_levenshtein("abc", "acbd") == 2); // "bc" swapped and "d" added
+///
+/// [1]: https://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance
+#[cfg(feature = "std")]
+pub fn damerau_levenshtein_restricted(s1: &str, s2: &str) -> usize {
+    let a = DamerauLevenshtein {
+        restricted: true,
+        ..Default::default()
+    };
+    a.for_str(s1, s2).val()
+}
+
+/// Calculate [Hamming distance][1] for two strings.
+///
+/// A wrapper for [`Hamming`].
+///
+///     use textdistance::str::hamming;
+///     assert!(hamming("abc", "acbd") == 3); // only "a" matches
+///
+/// [1]: https://en.wikipedia.org/wiki/Hamming_distance
+pub fn hamming(s1: &str, s2: &str) -> usize {
+    Hamming::default().for_str(s1, s2).val()
+}
+
+/// Calculate the length of the [Longest Common SubSequence][1] for two strings.
+///
+/// A wrapper for [`LCSSeq`].
+///
+///     use textdistance::str::lcsseq;
+///     assert!(lcsseq("abcdef", "xbcegf") == 4); // "bcef"
+///
+/// [1]: https://en.wikipedia.org/wiki/Longest_common_subsequence
+pub fn lcsseq(s1: &str, s2: &str) -> usize {
+    LCSSeq::default().for_str(s1, s2).val()
+}
+
+/// Calculate the length of the [Longest Common SubString][1] for two strings.
+///
+/// A wrapper for [`LCSStr`].
+///
+///     use textdistance::str::lcsstr;
+///     assert!(lcsstr("abcdef", "xbcegf") == 2); // "bc"
+///
+/// [1]: https://en.wikipedia.org/wiki/Longest_common_substring
+pub fn lcsstr(s1: &str, s2: &str) -> usize {
+    LCSStr::default().for_str(s1, s2).val()
+}
+
+/// Calculate [Levenshtein distance][1] for two strings.
+///
+/// A wrapper for [`Levenshtein`].
+///
+///     use textdistance::str::levenshtein;
+///     assert!(levenshtein("abc", "acbd") == 2); // add "c" at 2 and then swap "c" with "d" at 4
+///
+/// [1]: https://en.wikipedia.org/wiki/Levenshtein_distance
+pub fn levenshtein(s1: &str, s2: &str) -> usize {
+    Levenshtein::default().for_str(s1, s2).val()
+}
+
+/// Calculate [Ratcliff-Obershelp normalized similarity][1] for two strings.
+///
+/// A wrapper for [`RatcliffObershelp`].
+///
+///     use textdistance::str::ratcliff_obershelp;
+///     assert_eq!(ratcliff_obershelp("abc", "acbd"), 0.5714285714285714);
+///
+/// [1]: https://en.wikipedia.org/wiki/Gestalt_pattern_matching
+pub fn ratcliff_obershelp(s1: &str, s2: &str) -> f64 {
+    RatcliffObershelp::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [Sift4 distance][1] for two strings using the "simplest" algorithm.
+///
+/// A wrapper for [`Sift4Simple`].
+///
+///     use textdistance::str::sift4_simple;
+///     assert!(sift4_simple("abc", "acbd") == 2);
+///
+/// [1]: https://siderite.dev/blog/super-fast-and-accurate-string-distance.html
+pub fn sift4_simple(s1: &str, s2: &str) -> usize {
+    Sift4Simple::default().for_str(s1, s2).val()
+}
+
+/// Calculate [Sift4 distance][1] for two strings using the "common" algorithm.
+///
+/// A wrapper for [`Sift4Common`].
+///
+///     use textdistance::str::sift4_common;
+///     assert!(sift4_common("abc", "acbd") == 2);
+///
+/// [1]: https://siderite.dev/blog/super-fast-and-accurate-string-distance.html
+pub fn sift4_common(s1: &str, s2: &str) -> usize {
+    Sift4Common::default().for_str(s1, s2).val()
+}
+
+/// Calculate [Jaro normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Jaro`].
+///
+///     use textdistance::str::jaro;
+///     assert_eq!(jaro("abc", "acbd"), 0.8055555555555555);
+///
+/// [1]: https://en.wikipedia.org/wiki/Jaro%E2%80%93Winkler_distance#Jaro_similarity
+pub fn jaro(s1: &str, s2: &str) -> f64 {
+    Jaro::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [Jaro-Winkler normalized similarity][1] for two strings.
+///
+/// A wrapper for [`JaroWinkler`].
+///
+///     use textdistance::str::jaro_winkler;
+///     assert_eq!(jaro_winkler("abc", "acbd"), 0.825);
+///
+/// [1]: https://en.wikipedia.org/wiki/Jaro%E2%80%93Winkler_distance
+pub fn jaro_winkler(s1: &str, s2: &str) -> f64 {
+    JaroWinkler::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [Yujian-Bo normalization][1] of [Levenshtein] for two strings.
+///
+/// A wrapper for [`YujianBo`].
+///
+///     use textdistance::str::yujian_bo;
+///     assert_eq!(yujian_bo("abc", "acbd"), 0.4444444444444444);
+///
+/// [1]: https://ieeexplore.ieee.org/document/4160958
+pub fn yujian_bo(s1: &str, s2: &str) -> f64 {
+    YujianBo::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [MLIPNS normalization][1] of [Hamming] for two strings.
+///
+/// A wrapper for [`MLIPNS`].
+///
+///     use textdistance::str::mlipns;
+///     assert!(mlipns("abc", "acbd") == 0);
+///
+/// [1]: https://www.sial.iias.spb.su/files/386-386-1-PB.pdf
+pub fn mlipns(s1: &str, s2: &str) -> usize {
+    MLIPNS::default().for_str(s1, s2).val()
+}
+
+/// Calculate [Bag distance][1] for two strings.
+///
+/// A wrapper for [`Bag`].
+///
+///     use textdistance::str::bag;
+///     assert!(bag("abc", "acbd") == 1);
+///
+/// [1]: http://www-db.disi.unibo.it/research/papers/SPIRE02.pdf
+#[cfg(feature = "std")]
+pub fn bag(s1: &str, s2: &str) -> usize {
+    Bag::default().for_str(s1, s2).val()
+}
+
+/// Calculate [LIG3 normalization][1] of [Hamming] by [Levenshtein] for two strings.
+///
+/// A wrapper for [`LIG3`].
+///
+///     use textdistance::str::lig3;
+///     assert_eq!(lig3("abc", "acbd"), 0.5);
+///
+/// [1]: https://github.com/chrislit/abydos/blob/master/abydos/distance/_lig3.py
+pub fn lig3(s1: &str, s2: &str) -> f64 {
+    LIG3::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [Jaccard normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Jaccard`].
+///
+///     use textdistance::str::jaccard;
+///     assert_eq!(jaccard("abc", "acbd"), 0.75);
+///
+/// [1]: https://en.wikipedia.org/wiki/Jaccard_index
+#[cfg(feature = "std")]
+pub fn jaccard(s1: &str, s2: &str) -> f64 {
+    Jaccard::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [Sørensen–Dice normalized similarity][1] for two strings.
+///
+/// A wrapper for [`SorensenDice`].
+///
+///     use textdistance::str::sorensen_dice;
+///     assert_eq!(sorensen_dice("abc", "acbd"), 0.8571428571428571);
+///
+/// [1]:https://en.wikipedia.org/wiki/S%C3%B8rensen%E2%80%93Dice_coefficient
+#[cfg(feature = "std")]
+pub fn sorensen_dice(s1: &str, s2: &str) -> f64 {
+    SorensenDice::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [Tversky normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Tversky`].
+///
+///     use textdistance::str::tversky;
+///     assert_eq!(tversky("abc", "acbd"), 0.75);
+///
+/// [1]: https://en.wikipedia.org/wiki/Tversky_index
+#[cfg(feature = "std")]
+pub fn tversky(s1: &str, s2: &str) -> f64 {
+    Tversky::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [Overlap normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Overlap`].
+///
+///     use textdistance::str::overlap;
+///     assert_eq!(overlap("abc", "acbd"), 1.0);
+///
+/// [1]: https://en.wikipedia.org/wiki/Overlap_coefficient
+#[cfg(feature = "std")]
+pub fn overlap(s1: &str, s2: &str) -> f64 {
+    Overlap::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [Cosine normalized similarity][1] for two strings.
+///
+/// A wrapper for [`Cosine`].
+///
+///     use textdistance::str::cosine;
+///     assert_eq!(cosine("abc", "acbd"), 0.8660254037844387);
+///
+/// [1]: https://en.wikipedia.org/wiki/Cosine_similarity
+#[cfg(feature = "std")]
+pub fn cosine(s1: &str, s2: &str) -> f64 {
+    Cosine::default().for_str(s1, s2).nval()
+}
+
+/// Calculate common prefix length for two strings.
+///
+/// A wrapper for [`Prefix`].
+///
+///     use textdistance::str::prefix;
+///     assert!(prefix("abc", "acbd") == 1); // "a"
+///
+pub fn prefix(s1: &str, s2: &str) -> usize {
+    Prefix::default().for_str(s1, s2).val()
+}
+
+/// Calculate common suffix length for two strings.
+///
+/// A wrapper for [`Suffix`].
+///
+///     use textdistance::str::suffix;
+///     assert!(suffix("abcd", "axcd") == 2); // "cd"
+///
+pub fn suffix(s1: &str, s2: &str) -> usize {
+    Suffix::default().for_str(s1, s2).val()
+}
+
+/// Calculate length distance for two strings.
+///
+/// A wrapper for [`Length`].
+///
+///     use textdistance::str::length;
+///     assert!(length("abcd", "axc") == 4 - 3);
+///
+pub fn length(s1: &str, s2: &str) -> usize {
+    Length::default().for_str(s1, s2).val()
+}
+
+/// Calculate [Smith-Waterman similarity] for two strings.
+///
+/// A wrapper for [`SmithWaterman`].
+///
+///     use textdistance::str::smith_waterman;
+///     assert!(smith_waterman("abc", "acbd") == 1);
+///
+/// [Smith-Waterman similarity]: https://en.wikipedia.org/wiki/Smith%E2%80%93Waterman_algorithm
+pub fn smith_waterman(s1: &str, s2: &str) -> usize {
+    SmithWaterman::default().for_str(s1, s2).val()
+}
+
+/// Calculate [Entropy]-based [normalized compression distance][1] for two strings.
+///
+/// A wrapper for [`EntropyNCD`].
+///
+///     use textdistance::str::entropy_ncd;
+///     assert_eq!(entropy_ncd("abc", "acbd"), 0.12174985473119697);
+///
+/// [1]: https://en.wikipedia.org/wiki/Normalized_compression_distance
+/// [Entropy]: https://en.wikipedia.org/wiki/Entropy_(information_theory)
+#[cfg(feature = "std")]
+pub fn entropy_ncd(s1: &str, s2: &str) -> f64 {
+    EntropyNCD::default().for_str(s1, s2).nval()
+}
+
+/// Calculate [Roberts similarity] for two strings.
+///
+/// A wrapper for [`Roberts`].
+///
+///     use textdistance::str::roberts;
+///     assert_eq!(roberts("abc", "acbd"), 0.8571428571428571);
+///
+/// [Roberts similarity]: https://github.com/chrislit/abydos/blob/master/abydos/distance/_roberts.py
+#[cfg(feature = "std")]
+pub fn roberts(s1: &str, s2: &str) -> f64 {
+    Roberts::default().for_str(s1, s2).nval()
+}
diff --git a/crates/textdistance/tests/integration_test.rs b/crates/textdistance/tests/integration_test.rs
new file mode 100644
index 0000000..325e58b
--- /dev/null
+++ b/crates/textdistance/tests/integration_test.rs
@@ -0,0 +1,5 @@
+#[test]
+fn test_str_hamming() {
+    let res = textdistance::str::hamming("hello", "hi");
+    assert_eq!(res, 4);
+}
diff --git a/crates/textdistance/tests/test_introspection.py b/crates/textdistance/tests/test_introspection.py
new file mode 100644
index 0000000..7a9442d
--- /dev/null
+++ b/crates/textdistance/tests/test_introspection.py
@@ -0,0 +1,85 @@
+
+from pathlib import Path
+from typing import Iterator
+
+import pytest
+
+
+SKIP = frozenset({'algorithm', 'counter'})
+ROOT = Path(__file__).parent.parent
+
+
+def get_algorithms() -> Iterator[str]:
+    for fpath in (ROOT / 'src' / 'algorithms').iterdir():
+        alg_name = fpath.stem
+        if alg_name not in SKIP:
+            yield alg_name
+
+
+ALGORITHMS = tuple(get_algorithms())
+
+
[email protected]('alg', ALGORITHMS)
+def test_name_is_ascii(alg: str) -> None:
+    assert alg.replace('_', '').isalnum()
+    assert alg.isascii()
+
+
[email protected]('alg', ALGORITHMS)
+def test_is_in_readme(alg: str) -> None:
+    text = (ROOT / 'README.md').read_text()
+    alg = alg.replace('_', '')
+    assert f'1. `{alg}`' in text.lower()
+
+
[email protected]('alg', ALGORITHMS)
+def test_str_shortcut_exists(alg: str) -> None:
+    fpath = (ROOT / 'src' / 'str.rs')
+    text = fpath.read_text()
+    assert f'fn {alg}(' in text
+    alg = alg.replace('_', '')
+    assert f'{alg}::default().for_str(s1, s2).' in text.lower()
+    assert f'/// a wrapper for [`{alg}`].\n' in text.lower()
+
+
+def test_str_docs_consistency() -> None:
+    fpath = (ROOT / 'src' / 'str.rs')
+    text = fpath.read_text()
+    assert text.count('/// Calculate ') == text.count('pub fn')
+
+
[email protected]('alg', ALGORITHMS)
+def test_nstr_shortcut_exists(alg: str) -> None:
+    fpath = (ROOT / 'src' / 'nstr.rs')
+    text = fpath.read_text()
+    assert f'fn {alg}(' in text
+    alg = alg.replace('_', '')
+    assert f'{alg}::default().for_str(s1, s2).nval()' in text.lower()
+    assert f'/// a wrapper for [`{alg}`].\n' in text.lower()
+
+
+def test_nstr_docs_consistency() -> None:
+    fpath = (ROOT / 'src' / 'nstr.rs')
+    text = fpath.read_text()
+    assert text.count('/// Calculate normalized ') == text.count('pub fn')
+
+
[email protected]('alg', ALGORITHMS)
+def test_is_exported(alg: str) -> None:
+    fpath = (ROOT / 'src' / 'lib.rs')
+    text = fpath.read_text()
+    assert f'pub mod {alg}' in text
+    assert f'pub use self::algorithms::{alg}::' in text
+
+
[email protected]('alg', ALGORITHMS)
+def test_is_smoke_tested(alg: str) -> None:
+    fpath = (ROOT / 'src' / 'lib.rs')
+    text = fpath.read_text()
+    assert f'#[case::{alg}(' in text
+
+
[email protected]('alg', ALGORITHMS)
+def test_is_tested(alg: str) -> None:
+    fpath = (ROOT / 'src' / 'algorithms' / f'{alg}.rs')
+    assert '#[rstest]' in fpath.read_text()
diff --git a/crates/thiserror/.android-checksum.json b/crates/thiserror/.android-checksum.json
index 81ec816..6a21157 100644
--- a/crates/thiserror/.android-checksum.json
+++ b/crates/thiserror/.android-checksum.json
@@ -1 +1 @@
-{"package":null,"files":{".cargo-checksum.json":"6628719163f217dced7d3df46b361ea7875863edd38e2af2259fb9644aac537e","Android.bp":"b7728c539835d0ef33db1daa81826ac0e98df1e2c9415729544ba00385b76bc2","Cargo.toml":"ac746b6d2d17a37458bdcc6a04e4ca1de7a67daba519a256e7fba7ea1b9268a5","LICENSE":"50f827348bfd5def2df4f30cb41264072cf9c180bd9ed698437e8d85b2482754","LICENSE-APACHE":"50f827348bfd5def2df4f30cb41264072cf9c180bd9ed698437e8d85b2482754","LICENSE-MIT":"38620a3cfaeec97a9197e8c39e436ea7f0bc86699b1f1c35f1aa41785b6d4eac","METADATA":"55ababb5c4b72d5ff522c57207b9698c828fe97a66d50a05f79baf132be69000","MODULE_LICENSE_APACHE2":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","NOTICE":"50f827348bfd5def2df4f30cb41264072cf9c180bd9ed698437e8d85b2482754","README.md":"76160b61c09049e479407435b9a03cabda4e8a3c280ee963f7c3c915b530675a","TEST_MAPPING":"f935a2c2019169411ab63fc9475f797223d7caf28484fc0287977fa0b8db3643","build.rs":"8006f0f5ee493f4cfe5e0c8c4c186c219e87168f2de4f32cb7032378982f6940","build/probe.rs":"b39312d34ace0e0b2702e10d6036ef100fb8d23c8355f46ff635e22758bdfbe5","cargo_embargo.json":"eaf3579e645fdf1e9282be1229cb7d06b2c028364842aaf72b02778a82d6120e","rules.mk":"95f35cc5a8c54bd3bc180fdd3cba24caa2c80dbf6f380f00816e2b7ebb07d254","rust-toolchain.toml":"25bc44c2a4cb625e0f16003c73ac0db2fc79a3b79a5b0e6d4b6175c6dbe5af69","src/aserror.rs":"eb2fafd9cca3217d103bf4c8abf016db3c3f7d3af7d7a8e2270bd961ecfbb3b7","src/display.rs":"8ce7fa11380d90b9f3ade582aca93f74ae54d215d7c6d0a4a65151c5612c2bbe","src/lib.rs":"b10b27b78f6890a73910ea923823c84fa3fa7434c0de3ffaa07fda367be93868","src/provide.rs":"3217a7800360cf3a7d8a689b1aa7ecf1832a49719eebebfc9f29ec10667feb5f","src/var.rs":"9983204d7fc8fd890e3cba8b82e8ea35dfdaf285765e4d8661ed7ae8a34cabf3","tests/compiletest.rs":"5876a4728abd95ed4ed3b78777fe95428d32a32730d7718f58001bdbd4d1e168","tests/test_backtrace.rs":"51a8358592ce410888bf90ae9f5f0a47fae236e7c03a197fa2b7d3ec09b652ab","tests/test_display.rs":"3b6fdc5a4f1c51898c0b42dad2e4d8d61d24927d1d9e9eed4853e6fb16c85b44","tests/test_error.rs":"546fc4db53b241b4963018eda3582d4ff5d352abd841ff956c8ebb9c14115a03","tests/test_expr.rs":"226c345d04ee48b8d63dcf616a6140c2bbc59988774f0c23eb4c88577eed72a3","tests/test_from.rs":"f95e48e2d8ebc8dc347b521c6b8d870abd5f6fc07c75f42b78be78657a74c2ff","tests/test_generics.rs":"907a43e3c2ec2357e61db14f4bb63f75efb24a92034773a99b22efc1dd097d34","tests/test_lints.rs":"6e9c53b6590a795e6286af8410818fb20a8fd9a46600d9621c203d3a571d9c39","tests/test_option.rs":"c409b2cea42a0fe968007690e89715e43023a50303cce9b86d0a4d6885e794f6","tests/test_path.rs":"55b0e66dc4215a1f47bf456e0349180e6df83208f84395758ee88f0f2949ff6d","tests/test_source.rs":"38c4d61e5f988dd276a38522d3b8bf8aa31b0c792174188937fcabee4dfa5d65","tests/test_transparent.rs":"090214f938799d553e063019feb2ba7a77e3ea73bb3f61d81251e47dc68dc553","tests/ui/bad-field-attr.rs":"bf347c298087f6f0d0a1eac59267839ae53adcd5540b85ae9c63c3a45fa706cd","tests/ui/bad-field-attr.stderr":"281d1c5f032d66d2aff8606f3aab497b634d7f527184d5afaf9241e1faf64c5f","tests/ui/concat-display.rs":"f1175ba9be43368c509dc45e6e405eaad05a9d4eb77e14dc926dc71bbae999af","tests/ui/concat-display.stderr":"c18d04c9bc7fa530e846b5eb8121ed7733bea7d0abeed2c5a73e9d3e6c005f55","tests/ui/display-underscore.rs":"62de9b01cda9132a1b70a2cebc1338ea1927024bce589b0334d1c54e42e4a045","tests/ui/display-underscore.stderr":"cd84a129befdd0b3d64a352726b657d0e26a9c2595185279652bc5e4a7ba239b","tests/ui/duplicate-enum-source.rs":"36542154222586a4f39b7b5d52844649766b74ff70fd09e42509b080d95f5d71","tests/ui/duplicate-enum-source.stderr":"93d9795714e247e2d428ba74be5edc93736cd5b548e2f3ecec97d6fb9f0b18f2","tests/ui/duplicate-fmt.rs":"7bb9cd7dfeef6c34f81f9e87291e0531fc4ccc374e4113b9db409769c27bdf55","tests/ui/duplicate-fmt.stderr":"dccd907e645857bfe7fef66cd92e822e71e89f683c5fbe54473d2c1a1bbdedb9","tests/ui/duplicate-struct-source.rs":"2b23a91807be37ce71b1083c7c1866b6493018724a3f16678530345b8e92d46f","tests/ui/duplicate-struct-source.stderr":"17b1deae8183b4c1a650adf511814a98db0f3c34c3a0a310a81212be0343dadb","tests/ui/duplicate-transparent.rs":"2c90c839f4d0ef13a552a5af8439ae868a9213f7565b981a518205b7d4e477b7","tests/ui/duplicate-transparent.stderr":"6f8e8a3c566df779c18085cbe57a7b17f8374d5d5783a94b8f7bcfe8abb57e73","tests/ui/expression-fallback.rs":"97503986b26e0e4f51d85ccd1486b959a06fe61fb64dbd0e7b94363b15e7d585","tests/ui/expression-fallback.stderr":"fd1e8ed9158239b6e5a901fa00a561e98d00176c3a422599ddb3e0c5775734de","tests/ui/fallback-impl-with-display.rs":"48af8de8a000a7d47c64d2b78ed7e0b9e76f3991fd995d4f8e7ef3978b9a50d8","tests/ui/fallback-impl-with-display.stderr":"b5b51dc41ebfb5eb0f6a93ce5388abf0e4c8e6c89408c4db2987492f235f125d","tests/ui/from-backtrace-backtrace.rs":"7916b789d091b3b5b694fb5bb712dc3e018f8761a20ed2c3705e68b3f9ceea69","tests/ui/from-backtrace-backtrace.stderr":"6b0f7a1b448d1addb7e4e3592a2e8378907f67e4974663edc041d9e00f45c02b","tests/ui/from-not-source.rs":"75b6faeb9e1460ca9852bd18e4e947d6fbd3791e585fe9f22d70dd7d82029f10","tests/ui/from-not-source.stderr":"b05feb4ac8ba62379a552e0c5e6fba8d10a45d500b0dcae3a7169806236d3abd","tests/ui/invalid-input-impl-anyway.rs":"b46154acdfbd7b7baa0a933f063596418fd1e5eb2c7b51decced4a0de2faf255","tests/ui/invalid-input-impl-anyway.stderr":"42ad10ca6586eaeb482a32c702162d9126e701ecda574e3c9b11f19ea1e005a5","tests/ui/lifetime.rs":"4285a3a46eaf7e653975e8f896802b4cdbe867cce2854372eac37b9e0d2d9b25","tests/ui/lifetime.stderr":"91cdc5f9e5400d52c75eec2ec448f3ac5fcda2258a0d823d9e33bd14e0b58947","tests/ui/missing-display.rs":"2c9abba361bd1d823ef27b8e90152918cffa6635638e526dfe92e08a77608169","tests/ui/missing-display.stderr":"536453e6c152805bc534706276f78654630d79028b42900766769dced941a483","tests/ui/missing-fmt.rs":"6af19714f0f60028a9b5ae19392ca00858ae58dfcf2e6dc9bfae4414636221ac","tests/ui/missing-fmt.stderr":"16cfd840423200479c9f18fcfea7ef158ff1d6554e73d379dab4fe34a663e8e8","tests/ui/no-display.rs":"d02b04857182cf9b2da83437a17ef19c613768eebc3617337f89f02fb7f7974d","tests/ui/no-display.stderr":"3bf1a6e6791377563de07c3dda248c9e96103f04ce101f37f8a4e728424af32a","tests/ui/numbered-positional-tuple.rs":"c9179f8e6a3e60ad34b20f5b682c2da74ff3644653691dcc4b0476fe16d564fd","tests/ui/numbered-positional-tuple.stderr":"565350599805426fcf093ebd1a9c4fddafddb61c5703edfc9ce09c890f56d3b8","tests/ui/raw-identifier.rs":"1ef32c66a82cd48d836943668201b72543c53ace9b9b0f055228d1ddb3e2b276","tests/ui/raw-identifier.stderr":"f8afbcec8baabf852fad0d8b780dc54592548772bb475cb858c29f5a8d8b5226","tests/ui/same-from-type.rs":"13d24c8764667819db33491af13438577512d29c756e06cc0a1d749e2549dd51","tests/ui/same-from-type.stderr":"977cc395a135a1cd95a67499883a25f2e36e89afe3f2a7a036f29dbe9c876740","tests/ui/source-enum-not-error.rs":"8b66db858b247f3979a06deaf8580e445e67e23f45751dcd5ef7218b833f433f","tests/ui/source-enum-not-error.stderr":"8d96818d43219105f32ae41c5c9c59c461728b2663a5d138c25e2250a62af1d1","tests/ui/source-enum-unnamed-field-not-error.rs":"27e7d6d589b4a3e974484577bc108ae8f4310adc23ca547d1c9e828753c37a51","tests/ui/source-enum-unnamed-field-not-error.stderr":"bd373ded6993059c46bad10ba6fbba39564dbdacde24c676f40a87b52fd0556c","tests/ui/source-struct-not-error.rs":"e36b6441053a1c63f3c69af2b4a3e833865006c4faf7d9c51ecb3fe2dc39e7c4","tests/ui/source-struct-not-error.stderr":"d7d1cc742060dd26ff84786909cfa49b346b0e8ec1921ca444df13eb026aa9e9","tests/ui/source-struct-unnamed-field-not-error.rs":"e82a9061d401b23dc6676881f74522807632741cf5944bdc3996d02697de5b79","tests/ui/source-struct-unnamed-field-not-error.stderr":"ed3f7b430b37f573903ec9cc8f6f89272b4d7b310ed6ba5593778f85aae047e1","tests/ui/struct-with-fmt.rs":"4895c24195726465f1b68c22723fc84609419963b953ecfd5956dcc5b288b360","tests/ui/struct-with-fmt.stderr":"5f99f229dd01a94a4bb94c070591235e6f94373d13c25d49c4e7e2fd1bb3d317","tests/ui/transparent-display.rs":"5abd47616b338d4bfb95732c82d8ee41d873ce6ac8bc1c3dd1ef544d3debc734","tests/ui/transparent-display.stderr":"2d0a78931017ab7979b47dd3ce6598a6eaa17921087a34c046e6fad16c6ae480","tests/ui/transparent-enum-many.rs":"cda644d43222d17c8169e754ab59141e9cac2afc4b65b744e6cb4079f23cbaad","tests/ui/transparent-enum-many.stderr":"c3d389b42c8c6117571a5aef1cb71176b2d27097ff434dc6ed66a29589005228","tests/ui/transparent-enum-not-error.rs":"e47febf3bcccf29b337a768cc66a1cc4f022ab6094a4fa5ef19202e03348bdfb","tests/ui/transparent-enum-not-error.stderr":"99579294e88378a49a90da1966b013d9ac7c88b2b2fcda556242202833f69206","tests/ui/transparent-enum-source.rs":"150fad9540b7a86a3bc51549fa0a8b172ffd7648a3a8c3f095c4865e3d37413b","tests/ui/transparent-enum-source.stderr":"0aeed0f88273ad06756fbe307e9f1449665d2382ac1c81122c85fe880b8c98e8","tests/ui/transparent-enum-unnamed-field-not-error.rs":"afa3d81d6becca7ac62cbb697a1b21847342b0871a43ad3d5113219729abc854","tests/ui/transparent-enum-unnamed-field-not-error.stderr":"f01a220d38ebff3e66d66174600fdf768f963319753d10468512858354826295","tests/ui/transparent-struct-many.rs":"73a9fbc5649117b46fa691ba096a67dd5372d8bfadaa71dd42fc26c5f1da0617","tests/ui/transparent-struct-many.stderr":"7111a46e578e3bb6fc2a81fc6374c0de81dabc2dc03bd6d9f80ba7b94eeab68d","tests/ui/transparent-struct-not-error.rs":"af6d7cd5aa80cc8440fc7983e66d2ecc47badcdf103e3209e5494ca06279891f","tests/ui/transparent-struct-not-error.stderr":"6a9add9db88ac1a6364cd912a07b2b3f5227f8db6e3a2e20b25ba5fd310e1f93","tests/ui/transparent-struct-source.rs":"9a36c4273061bf33de0681b862c2cfbc554f891d2c033a6f5f198ebc6cdc2a57","tests/ui/transparent-struct-source.stderr":"96b8b76fd5d235e10fd135a51de73ff16f42fb72e89a63423646f3010193056d","tests/ui/transparent-struct-unnamed-field-not-error.rs":"c0b776bfef8c8f90ab453f4dda64e2c66687a8a91e32a6c51eac0c729b4ca967","tests/ui/transparent-struct-unnamed-field-not-error.stderr":"a9c8f742caf7d0c0b4a79893fdc5da11e716ddf0d833a06ca3e36d76eab07134","tests/ui/unconditional-recursion.rs":"19a01c25685f9c9e3dfa137da973b6422d192a8bcf0c2dd82717b56c17838c72","tests/ui/unconditional-recursion.stderr":"b38a8adc33f3b1d7297c75087d5a7c975a5443b4fa2be37ef8bcac5fe6904e74","tests/ui/unexpected-field-fmt.rs":"461cbc37768b9d357784c3a1958c7b7d9bedcb3ac5f065f6ad26002540bc1585","tests/ui/unexpected-field-fmt.stderr":"cf9cc6db1d468c49c3fcd02ffe846e0a6306806477c0459f6feb588163ca3ead","tests/ui/unexpected-struct-source.rs":"2ad5a79ad119d310310ff02da4e4f18c1127a42d9954dc1e30dae9a7e7e44373","tests/ui/unexpected-struct-source.stderr":"f3930172b5a705e45a2fa85a443d06ac3e34e9dfb1309a878226c71d502c5d82","tests/ui/union.rs":"d03e526e970e667423f3579a966e99b328794586a8dd9cdce1210a4db303fdc6","tests/ui/union.stderr":"4ba0864f18a58b84acb83e08bec5dc3b6ad857ef101dfa9a087e735294fc2aa7"}}
\ No newline at end of file
+{"package":null,"files":{".cargo-checksum.json":"6628719163f217dced7d3df46b361ea7875863edd38e2af2259fb9644aac537e","Android.bp":"b7728c539835d0ef33db1daa81826ac0e98df1e2c9415729544ba00385b76bc2","Cargo.toml":"ac746b6d2d17a37458bdcc6a04e4ca1de7a67daba519a256e7fba7ea1b9268a5","LICENSE":"50f827348bfd5def2df4f30cb41264072cf9c180bd9ed698437e8d85b2482754","LICENSE-APACHE":"50f827348bfd5def2df4f30cb41264072cf9c180bd9ed698437e8d85b2482754","LICENSE-MIT":"38620a3cfaeec97a9197e8c39e436ea7f0bc86699b1f1c35f1aa41785b6d4eac","METADATA":"55ababb5c4b72d5ff522c57207b9698c828fe97a66d50a05f79baf132be69000","MODULE_LICENSE_APACHE2":"0d6f8afa3940b7f06bebee651376d43bc8b0d5b437337be2696d30377451e93a","NOTICE":"50f827348bfd5def2df4f30cb41264072cf9c180bd9ed698437e8d85b2482754","README.md":"76160b61c09049e479407435b9a03cabda4e8a3c280ee963f7c3c915b530675a","TEST_MAPPING":"f935a2c2019169411ab63fc9475f797223d7caf28484fc0287977fa0b8db3643","build.rs":"8006f0f5ee493f4cfe5e0c8c4c186c219e87168f2de4f32cb7032378982f6940","build/probe.rs":"b39312d34ace0e0b2702e10d6036ef100fb8d23c8355f46ff635e22758bdfbe5","cargo_embargo.json":"281153bfe764a9e63936fb6c60d33857a3605d78b942b4f6731cb1f8f2939b77","rules.mk":"af23c0769c82d700203a16f29934127c6ba5a98e2471c9b0d3519c18804694da","rust-toolchain.toml":"25bc44c2a4cb625e0f16003c73ac0db2fc79a3b79a5b0e6d4b6175c6dbe5af69","src/aserror.rs":"eb2fafd9cca3217d103bf4c8abf016db3c3f7d3af7d7a8e2270bd961ecfbb3b7","src/display.rs":"8ce7fa11380d90b9f3ade582aca93f74ae54d215d7c6d0a4a65151c5612c2bbe","src/lib.rs":"b10b27b78f6890a73910ea923823c84fa3fa7434c0de3ffaa07fda367be93868","src/provide.rs":"3217a7800360cf3a7d8a689b1aa7ecf1832a49719eebebfc9f29ec10667feb5f","src/var.rs":"9983204d7fc8fd890e3cba8b82e8ea35dfdaf285765e4d8661ed7ae8a34cabf3","tests/compiletest.rs":"5876a4728abd95ed4ed3b78777fe95428d32a32730d7718f58001bdbd4d1e168","tests/test_backtrace.rs":"51a8358592ce410888bf90ae9f5f0a47fae236e7c03a197fa2b7d3ec09b652ab","tests/test_display.rs":"3b6fdc5a4f1c51898c0b42dad2e4d8d61d24927d1d9e9eed4853e6fb16c85b44","tests/test_error.rs":"546fc4db53b241b4963018eda3582d4ff5d352abd841ff956c8ebb9c14115a03","tests/test_expr.rs":"226c345d04ee48b8d63dcf616a6140c2bbc59988774f0c23eb4c88577eed72a3","tests/test_from.rs":"f95e48e2d8ebc8dc347b521c6b8d870abd5f6fc07c75f42b78be78657a74c2ff","tests/test_generics.rs":"907a43e3c2ec2357e61db14f4bb63f75efb24a92034773a99b22efc1dd097d34","tests/test_lints.rs":"6e9c53b6590a795e6286af8410818fb20a8fd9a46600d9621c203d3a571d9c39","tests/test_option.rs":"c409b2cea42a0fe968007690e89715e43023a50303cce9b86d0a4d6885e794f6","tests/test_path.rs":"55b0e66dc4215a1f47bf456e0349180e6df83208f84395758ee88f0f2949ff6d","tests/test_source.rs":"38c4d61e5f988dd276a38522d3b8bf8aa31b0c792174188937fcabee4dfa5d65","tests/test_transparent.rs":"090214f938799d553e063019feb2ba7a77e3ea73bb3f61d81251e47dc68dc553","tests/ui/bad-field-attr.rs":"bf347c298087f6f0d0a1eac59267839ae53adcd5540b85ae9c63c3a45fa706cd","tests/ui/bad-field-attr.stderr":"281d1c5f032d66d2aff8606f3aab497b634d7f527184d5afaf9241e1faf64c5f","tests/ui/concat-display.rs":"f1175ba9be43368c509dc45e6e405eaad05a9d4eb77e14dc926dc71bbae999af","tests/ui/concat-display.stderr":"c18d04c9bc7fa530e846b5eb8121ed7733bea7d0abeed2c5a73e9d3e6c005f55","tests/ui/display-underscore.rs":"62de9b01cda9132a1b70a2cebc1338ea1927024bce589b0334d1c54e42e4a045","tests/ui/display-underscore.stderr":"cd84a129befdd0b3d64a352726b657d0e26a9c2595185279652bc5e4a7ba239b","tests/ui/duplicate-enum-source.rs":"36542154222586a4f39b7b5d52844649766b74ff70fd09e42509b080d95f5d71","tests/ui/duplicate-enum-source.stderr":"93d9795714e247e2d428ba74be5edc93736cd5b548e2f3ecec97d6fb9f0b18f2","tests/ui/duplicate-fmt.rs":"7bb9cd7dfeef6c34f81f9e87291e0531fc4ccc374e4113b9db409769c27bdf55","tests/ui/duplicate-fmt.stderr":"dccd907e645857bfe7fef66cd92e822e71e89f683c5fbe54473d2c1a1bbdedb9","tests/ui/duplicate-struct-source.rs":"2b23a91807be37ce71b1083c7c1866b6493018724a3f16678530345b8e92d46f","tests/ui/duplicate-struct-source.stderr":"17b1deae8183b4c1a650adf511814a98db0f3c34c3a0a310a81212be0343dadb","tests/ui/duplicate-transparent.rs":"2c90c839f4d0ef13a552a5af8439ae868a9213f7565b981a518205b7d4e477b7","tests/ui/duplicate-transparent.stderr":"6f8e8a3c566df779c18085cbe57a7b17f8374d5d5783a94b8f7bcfe8abb57e73","tests/ui/expression-fallback.rs":"97503986b26e0e4f51d85ccd1486b959a06fe61fb64dbd0e7b94363b15e7d585","tests/ui/expression-fallback.stderr":"fd1e8ed9158239b6e5a901fa00a561e98d00176c3a422599ddb3e0c5775734de","tests/ui/fallback-impl-with-display.rs":"48af8de8a000a7d47c64d2b78ed7e0b9e76f3991fd995d4f8e7ef3978b9a50d8","tests/ui/fallback-impl-with-display.stderr":"b5b51dc41ebfb5eb0f6a93ce5388abf0e4c8e6c89408c4db2987492f235f125d","tests/ui/from-backtrace-backtrace.rs":"7916b789d091b3b5b694fb5bb712dc3e018f8761a20ed2c3705e68b3f9ceea69","tests/ui/from-backtrace-backtrace.stderr":"6b0f7a1b448d1addb7e4e3592a2e8378907f67e4974663edc041d9e00f45c02b","tests/ui/from-not-source.rs":"75b6faeb9e1460ca9852bd18e4e947d6fbd3791e585fe9f22d70dd7d82029f10","tests/ui/from-not-source.stderr":"b05feb4ac8ba62379a552e0c5e6fba8d10a45d500b0dcae3a7169806236d3abd","tests/ui/invalid-input-impl-anyway.rs":"b46154acdfbd7b7baa0a933f063596418fd1e5eb2c7b51decced4a0de2faf255","tests/ui/invalid-input-impl-anyway.stderr":"42ad10ca6586eaeb482a32c702162d9126e701ecda574e3c9b11f19ea1e005a5","tests/ui/lifetime.rs":"4285a3a46eaf7e653975e8f896802b4cdbe867cce2854372eac37b9e0d2d9b25","tests/ui/lifetime.stderr":"91cdc5f9e5400d52c75eec2ec448f3ac5fcda2258a0d823d9e33bd14e0b58947","tests/ui/missing-display.rs":"2c9abba361bd1d823ef27b8e90152918cffa6635638e526dfe92e08a77608169","tests/ui/missing-display.stderr":"536453e6c152805bc534706276f78654630d79028b42900766769dced941a483","tests/ui/missing-fmt.rs":"6af19714f0f60028a9b5ae19392ca00858ae58dfcf2e6dc9bfae4414636221ac","tests/ui/missing-fmt.stderr":"16cfd840423200479c9f18fcfea7ef158ff1d6554e73d379dab4fe34a663e8e8","tests/ui/no-display.rs":"d02b04857182cf9b2da83437a17ef19c613768eebc3617337f89f02fb7f7974d","tests/ui/no-display.stderr":"3bf1a6e6791377563de07c3dda248c9e96103f04ce101f37f8a4e728424af32a","tests/ui/numbered-positional-tuple.rs":"c9179f8e6a3e60ad34b20f5b682c2da74ff3644653691dcc4b0476fe16d564fd","tests/ui/numbered-positional-tuple.stderr":"565350599805426fcf093ebd1a9c4fddafddb61c5703edfc9ce09c890f56d3b8","tests/ui/raw-identifier.rs":"1ef32c66a82cd48d836943668201b72543c53ace9b9b0f055228d1ddb3e2b276","tests/ui/raw-identifier.stderr":"f8afbcec8baabf852fad0d8b780dc54592548772bb475cb858c29f5a8d8b5226","tests/ui/same-from-type.rs":"13d24c8764667819db33491af13438577512d29c756e06cc0a1d749e2549dd51","tests/ui/same-from-type.stderr":"977cc395a135a1cd95a67499883a25f2e36e89afe3f2a7a036f29dbe9c876740","tests/ui/source-enum-not-error.rs":"8b66db858b247f3979a06deaf8580e445e67e23f45751dcd5ef7218b833f433f","tests/ui/source-enum-not-error.stderr":"8d96818d43219105f32ae41c5c9c59c461728b2663a5d138c25e2250a62af1d1","tests/ui/source-enum-unnamed-field-not-error.rs":"27e7d6d589b4a3e974484577bc108ae8f4310adc23ca547d1c9e828753c37a51","tests/ui/source-enum-unnamed-field-not-error.stderr":"bd373ded6993059c46bad10ba6fbba39564dbdacde24c676f40a87b52fd0556c","tests/ui/source-struct-not-error.rs":"e36b6441053a1c63f3c69af2b4a3e833865006c4faf7d9c51ecb3fe2dc39e7c4","tests/ui/source-struct-not-error.stderr":"d7d1cc742060dd26ff84786909cfa49b346b0e8ec1921ca444df13eb026aa9e9","tests/ui/source-struct-unnamed-field-not-error.rs":"e82a9061d401b23dc6676881f74522807632741cf5944bdc3996d02697de5b79","tests/ui/source-struct-unnamed-field-not-error.stderr":"ed3f7b430b37f573903ec9cc8f6f89272b4d7b310ed6ba5593778f85aae047e1","tests/ui/struct-with-fmt.rs":"4895c24195726465f1b68c22723fc84609419963b953ecfd5956dcc5b288b360","tests/ui/struct-with-fmt.stderr":"5f99f229dd01a94a4bb94c070591235e6f94373d13c25d49c4e7e2fd1bb3d317","tests/ui/transparent-display.rs":"5abd47616b338d4bfb95732c82d8ee41d873ce6ac8bc1c3dd1ef544d3debc734","tests/ui/transparent-display.stderr":"2d0a78931017ab7979b47dd3ce6598a6eaa17921087a34c046e6fad16c6ae480","tests/ui/transparent-enum-many.rs":"cda644d43222d17c8169e754ab59141e9cac2afc4b65b744e6cb4079f23cbaad","tests/ui/transparent-enum-many.stderr":"c3d389b42c8c6117571a5aef1cb71176b2d27097ff434dc6ed66a29589005228","tests/ui/transparent-enum-not-error.rs":"e47febf3bcccf29b337a768cc66a1cc4f022ab6094a4fa5ef19202e03348bdfb","tests/ui/transparent-enum-not-error.stderr":"99579294e88378a49a90da1966b013d9ac7c88b2b2fcda556242202833f69206","tests/ui/transparent-enum-source.rs":"150fad9540b7a86a3bc51549fa0a8b172ffd7648a3a8c3f095c4865e3d37413b","tests/ui/transparent-enum-source.stderr":"0aeed0f88273ad06756fbe307e9f1449665d2382ac1c81122c85fe880b8c98e8","tests/ui/transparent-enum-unnamed-field-not-error.rs":"afa3d81d6becca7ac62cbb697a1b21847342b0871a43ad3d5113219729abc854","tests/ui/transparent-enum-unnamed-field-not-error.stderr":"f01a220d38ebff3e66d66174600fdf768f963319753d10468512858354826295","tests/ui/transparent-struct-many.rs":"73a9fbc5649117b46fa691ba096a67dd5372d8bfadaa71dd42fc26c5f1da0617","tests/ui/transparent-struct-many.stderr":"7111a46e578e3bb6fc2a81fc6374c0de81dabc2dc03bd6d9f80ba7b94eeab68d","tests/ui/transparent-struct-not-error.rs":"af6d7cd5aa80cc8440fc7983e66d2ecc47badcdf103e3209e5494ca06279891f","tests/ui/transparent-struct-not-error.stderr":"6a9add9db88ac1a6364cd912a07b2b3f5227f8db6e3a2e20b25ba5fd310e1f93","tests/ui/transparent-struct-source.rs":"9a36c4273061bf33de0681b862c2cfbc554f891d2c033a6f5f198ebc6cdc2a57","tests/ui/transparent-struct-source.stderr":"96b8b76fd5d235e10fd135a51de73ff16f42fb72e89a63423646f3010193056d","tests/ui/transparent-struct-unnamed-field-not-error.rs":"c0b776bfef8c8f90ab453f4dda64e2c66687a8a91e32a6c51eac0c729b4ca967","tests/ui/transparent-struct-unnamed-field-not-error.stderr":"a9c8f742caf7d0c0b4a79893fdc5da11e716ddf0d833a06ca3e36d76eab07134","tests/ui/unconditional-recursion.rs":"19a01c25685f9c9e3dfa137da973b6422d192a8bcf0c2dd82717b56c17838c72","tests/ui/unconditional-recursion.stderr":"b38a8adc33f3b1d7297c75087d5a7c975a5443b4fa2be37ef8bcac5fe6904e74","tests/ui/unexpected-field-fmt.rs":"461cbc37768b9d357784c3a1958c7b7d9bedcb3ac5f065f6ad26002540bc1585","tests/ui/unexpected-field-fmt.stderr":"cf9cc6db1d468c49c3fcd02ffe846e0a6306806477c0459f6feb588163ca3ead","tests/ui/unexpected-struct-source.rs":"2ad5a79ad119d310310ff02da4e4f18c1127a42d9954dc1e30dae9a7e7e44373","tests/ui/unexpected-struct-source.stderr":"f3930172b5a705e45a2fa85a443d06ac3e34e9dfb1309a878226c71d502c5d82","tests/ui/union.rs":"d03e526e970e667423f3579a966e99b328794586a8dd9cdce1210a4db303fdc6","tests/ui/union.stderr":"4ba0864f18a58b84acb83e08bec5dc3b6ad857ef101dfa9a087e735294fc2aa7"}}
\ No newline at end of file
diff --git a/crates/thiserror/cargo_embargo.json b/crates/thiserror/cargo_embargo.json
index 6f0db49..2e1324b 100644
--- a/crates/thiserror/cargo_embargo.json
+++ b/crates/thiserror/cargo_embargo.json
@@ -3,11 +3,10 @@
   "min_sdk_version": "29",
   "run_cargo": false,
   "variants": [
-    {
-      "generate_rulesmk": true
-    },
+    {},
     {
       "features": [],
+      "generate_rulesmk": true,
       "module_name_overrides": {
         "libthiserror": "libthiserror_nostd"
       },
diff --git a/crates/thiserror/rules.mk b/crates/thiserror/rules.mk
index 9c6f667..57dd4e9 100644
--- a/crates/thiserror/rules.mk
+++ b/crates/thiserror/rules.mk
@@ -8,12 +8,11 @@
 MODULE_CRATE_NAME := thiserror
 MODULE_RUST_CRATE_TYPES := rlib
 MODULE_SRCS := $(LOCAL_DIR)/src/lib.rs
+MODULE_ADD_IMPLICIT_DEPS := false
 MODULE_RUST_EDITION := 2021
-MODULE_RUSTFLAGS += \
-	--cfg 'feature="default"' \
-	--cfg 'feature="std"'
-
 MODULE_LIBRARY_DEPS := \
-	$(call FIND_CRATE,thiserror-impl)
+	$(call FIND_CRATE,thiserror-impl) \
+	trusty/user/base/lib/libcompiler_builtins-rust \
+	trusty/user/base/lib/libcore-rust
 
 include make/library.mk
diff --git a/pseudo_crate/Cargo.lock b/pseudo_crate/Cargo.lock
index 9064c76..3982fee 100644
--- a/pseudo_crate/Cargo.lock
+++ b/pseudo_crate/Cargo.lock
@@ -95,6 +95,7 @@
  "acpi",
  "ahash 0.8.11",
  "aho-corasick",
+ "android_bp",
  "android_log-sys",
  "android_logger",
  "anes 0.2.0",
@@ -176,8 +177,8 @@
  "document-features",
  "downcast",
  "downcast-rs",
- "drm",
- "drm-ffi",
+ "drm 0.14.1",
+ "drm-ffi 0.9.0",
  "drm-fourcc",
  "either",
  "enumn",
@@ -193,6 +194,7 @@
  "flagset",
  "flate2",
  "fnv",
+ "foldhash",
  "foreign-types 0.3.1",
  "foreign-types-shared 0.1.0",
  "form_urlencoded",
@@ -240,6 +242,7 @@
  "inotify",
  "inotify-sys",
  "intrusive-collections",
+ "is-terminal",
  "itertools 0.14.0",
  "itoa",
  "jni",
@@ -401,6 +404,7 @@
  "tempfile",
  "termcolor",
  "termtree",
+ "textdistance",
  "textwrap 0.16.1",
  "thiserror 2.0.11",
  "thiserror-impl 2.0.11",
@@ -485,6 +489,15 @@
 checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
 
 [[package]]
+name = "android_bp"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9e5389cea0250e70ca9323df3b5b5b31749d4face289c92cd4e99f54b1d6425"
+dependencies = [
+ "nom",
+]
+
+[[package]]
 name = "android_log-sys"
 version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1731,18 +1744,42 @@
 dependencies = [
  "bitflags 2.8.0",
  "bytemuck",
- "drm-ffi",
+ "drm-ffi 0.8.0",
  "drm-fourcc",
  "rustix",
 ]
 
 [[package]]
+name = "drm"
+version = "0.14.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "80bc8c5c6c2941f70a55c15f8d9f00f9710ebda3ffda98075f996a0e6c92756f"
+dependencies = [
+ "bitflags 2.8.0",
+ "bytemuck",
+ "drm-ffi 0.9.0",
+ "drm-fourcc",
+ "libc",
+ "rustix",
+]
+
+[[package]]
 name = "drm-ffi"
 version = "0.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "97c98727e48b7ccb4f4aea8cfe881e5b07f702d17b7875991881b41af7278d53"
 dependencies = [
- "drm-sys",
+ "drm-sys 0.7.0",
+ "rustix",
+]
+
+[[package]]
+name = "drm-ffi"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d8e41459d99a9b529845f6d2c909eb9adf3b6d2f82635ae40be8de0601726e8b"
+dependencies = [
+ "drm-sys 0.8.0",
  "rustix",
 ]
 
@@ -1763,6 +1800,16 @@
 ]
 
 [[package]]
+name = "drm-sys"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bafb66c8dbc944d69e15cfcc661df7e703beffbaec8bd63151368b06c5f9858c"
+dependencies = [
+ "libc",
+ "linux-raw-sys 0.6.5",
+]
+
+[[package]]
 name = "dwrote"
 version = "0.11.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1974,6 +2021,12 @@
 checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
 
 [[package]]
+name = "foldhash"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2"
+
+[[package]]
 name = "font-kit"
 version = "0.14.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2196,7 +2249,7 @@
 checksum = "45bf55ba6dd53ad0ac115046ff999c5324c283444ee6e0be82454c4e8eb2f36a"
 dependencies = [
  "bitflags 2.8.0",
- "drm",
+ "drm 0.12.0",
  "drm-fourcc",
  "gbm-sys",
  "libc",
@@ -5029,6 +5082,12 @@
 checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76"
 
 [[package]]
+name = "textdistance"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa672c55ab69f787dbc9126cc387dbe57fdd595f585e4524cf89018fa44ab819"
+
+[[package]]
 name = "textwrap"
 version = "0.11.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/pseudo_crate/Cargo.toml b/pseudo_crate/Cargo.toml
index c38dccb..4c4bef8 100644
--- a/pseudo_crate/Cargo.toml
+++ b/pseudo_crate/Cargo.toml
@@ -10,6 +10,7 @@
 acpi = "=5.1.0"
 ahash = "=0.8.11"
 aho-corasick = "=0.7.20"
+android_bp = "=0.3.0"
 android_log-sys = "=0.3.1"
 android_logger = "=0.13.3"
 anes = "=0.2.0"
@@ -91,8 +92,8 @@
 document-features = "=0.2.10"
 downcast = "=0.11.0"
 downcast-rs = "=1.2.1"
-drm = "=0.12.0"
-drm-ffi = "=0.8.0"
+drm = "=0.14.1"
+drm-ffi = "=0.9.0"
 drm-fourcc = "=2.2.0"
 either = "=1.13.0"
 enumn = "=0.1.14"
@@ -108,6 +109,7 @@
 flagset = "=0.4.6"
 flate2 = "=1.0.35"
 fnv = "=1.0.7"
+foldhash = "=0.1.3"
 foreign-types = "=0.3.1"
 foreign-types-shared = "=0.1.0"
 form_urlencoded = "=1.2.1"
@@ -155,6 +157,7 @@
 inotify = "=0.11.0"
 inotify-sys = "=0.1.5"
 intrusive-collections = "=0.9.7"
+is-terminal = "=0.4.13"
 itertools = "=0.14.0"
 itoa = "=1.0.14"
 jni = "=0.21.1"
@@ -316,6 +319,7 @@
 tempfile = "=3.12.0"
 termcolor = "=1.4.1"
 termtree = "=0.4.1"
+textdistance = "=1.1.1"
 textwrap = "=0.16.1"
 thiserror = "=2.0.11"
 thiserror-impl = "=2.0.11"
diff --git a/pseudo_crate/crate-list.txt b/pseudo_crate/crate-list.txt
index 4fe0ee6..c45f0a0 100644
--- a/pseudo_crate/crate-list.txt
+++ b/pseudo_crate/crate-list.txt
@@ -2,6 +2,7 @@
 acpi
 ahash
 aho-corasick
+android_bp
 android_log-sys
 android_logger
 anes
@@ -100,6 +101,7 @@
 flagset
 flate2
 fnv
+foldhash
 foreign-types
 foreign-types-shared
 form_urlencoded
@@ -148,6 +150,7 @@
 inotify-sys
 instant
 intrusive-collections
+is-terminal
 itertools
 itoa
 jni
@@ -310,6 +313,7 @@
 tempfile
 termcolor
 termtree
+textdistance
 textwrap
 thiserror
 thiserror-impl