Snap for 12397640 from f788bf442b931ff9192498ab69fdfaea7f54e506 to 24Q4-release

Change-Id: I5322e982a3e769b34b974a49ff615c3414496dbe
diff --git a/.bazelci/presubmit.yml b/.bazelci/presubmit.yml
index 8649797..b778ac4 100644
--- a/.bazelci/presubmit.yml
+++ b/.bazelci/presubmit.yml
@@ -59,7 +59,7 @@
   build_targets: ["..."]
   test_targets: ["..."]
 .coverage_targets_example_bzlmod: &coverage_targets_example_bzlmod
-  coverage_targets: ["//:test"]
+  coverage_targets: ["..."]
 .coverage_targets_example_bzlmod_build_file_generation: &coverage_targets_example_bzlmod_build_file_generation
   coverage_targets: ["//:bzlmod_build_file_generation_test"]
 .coverage_targets_example_multi_python: &coverage_targets_example_multi_python
@@ -182,6 +182,7 @@
     platform: rbe_ubuntu1604
     test_flags:
       - "--test_tag_filters=-integration-test,-acceptance-test"
+      - "--extra_toolchains=@buildkite_config//config:cc-toolchain"
 
   integration_test_build_file_generation_ubuntu_minimum_supported_workspace:
     <<: *minimum_supported_version
@@ -218,7 +219,7 @@
   integration_test_bzlmod_ubuntu_min:
     <<: *minimum_supported_version
     <<: *reusable_build_test_all
-    <<: *coverage_targets_example_bzlmod
+    coverage_targets: ["//:test"]
     name: "examples/bzlmod: Ubuntu, minimum Bazel"
     working_directory: examples/bzlmod
     platform: ubuntu2004
@@ -515,18 +516,6 @@
     - "bazel run //:os_specific_requirements.update"
     - "git diff --exit-code"
 
-  integration_test_pip_repository_entry_points_macos_workspace:
-    <<: *reusable_build_test_all
-    <<: *common_workspace_flags
-    name: "pip_repository_entry_points: macOS, workspace"
-    working_directory: tests/integration/pip_repository_entry_points
-    platform: macos
-  integration_test_pip_repository_entry_points_windows_workspace:
-    <<: *reusable_build_test_all
-    <<: *common_workspace_flags
-    name: "pip_repository_entry_points: Windows, workspace"
-    working_directory: tests/integration/pip_repository_entry_points
-    platform: windows
 
   integration_test_ignore_root_user_error_macos_workspace:
     <<: *reusable_build_test_all
diff --git a/.bazelignore b/.bazelignore
index 9bcb523..9539173 100644
--- a/.bazelignore
+++ b/.bazelignore
@@ -25,4 +25,4 @@
 examples/py_proto_library/bazel-py_proto_library
 tests/integration/compile_pip_requirements/bazel-compile_pip_requirements
 tests/integration/ignore_root_user_error/bazel-ignore_root_user_error
-tests/integration/pip_repository_entry_points/bazel-pip_repository_entry_points
+tests/integration/local_toolchains/bazel-local_toolchains
diff --git a/.bazelrc b/.bazelrc
index 250377a..b484751 100644
--- a/.bazelrc
+++ b/.bazelrc
@@ -4,8 +4,8 @@
 # (Note, we cannot use `common --deleted_packages` because the bazel version command doesn't support it)
 # To update these lines, execute
 # `bazel run @rules_bazel_integration_test//tools:update_deleted_packages`
-build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/dupe_requirements,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/ignore_root_user_error,tests/integration/pip_repository_entry_points,tests/integration/py_cc_toolchain_registered
-query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/dupe_requirements,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/ignore_root_user_error,tests/integration/pip_repository_entry_points,tests/integration/py_cc_toolchain_registered
+build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/python/private,gazelle/pythonconfig,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered
+query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/python/private,gazelle/pythonconfig,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered
 
 test --test_output=errors
 
@@ -17,9 +17,11 @@
 # Python targets as required.
 build --incompatible_default_to_explicit_init_py
 
+# Ensure ongoing compatibility with this flag.
+common --incompatible_disallow_struct_provider_syntax
+
 # Windows makes use of runfiles for some rules
 build --enable_runfiles
-startup --windows_enable_symlinks
 
 # Make Bazel 6 use bzlmod by default
 common --enable_bzlmod
@@ -30,3 +32,5 @@
 build:rtd --stamp
 # Some bzl files contain repos only available under bzlmod
 build:rtd --enable_bzlmod
+
+build --lockfile_mode=update
diff --git a/.bcr/gazelle/presubmit.yml b/.bcr/gazelle/presubmit.yml
index 037055d..659beab 100644
--- a/.bcr/gazelle/presubmit.yml
+++ b/.bcr/gazelle/presubmit.yml
@@ -16,10 +16,12 @@
   module_path: "../examples/bzlmod_build_file_generation"
   matrix:
     platform: ["debian11", "macos", "ubuntu2004", "windows"]
+    bazel: [6.x, 7.x]
   tasks:
     run_tests:
       name: "Run test module"
       platform: ${{ platform }}
+      bazel: ${{ bazel }}
       build_targets:
         - "//..."
         - ":modules_map"
diff --git a/.bcr/presubmit.yml b/.bcr/presubmit.yml
index 252df6b..875ea93 100644
--- a/.bcr/presubmit.yml
+++ b/.bcr/presubmit.yml
@@ -16,9 +16,16 @@
   module_path: "examples/bzlmod"
   matrix:
     platform: ["debian11", "macos", "ubuntu2004", "windows"]
+    bazel: [6.x, 7.x]
   tasks:
     run_tests:
       name: "Run test module"
       platform: ${{ platform }}
+      bazel: ${{ bazel }}
+      test_flags:
+        - "--keep_going"
+          # Without these cxxopts, BCR's Mac builds fail
+        - '--cxxopt=-std=c++14'
+        - '--host_cxxopt=-std=c++14'
       test_targets:
         - "//..."
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 3449bcf..6a8a48f 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -1,15 +1,11 @@
 # NB: Last matching rule takes precedence in CODEOWNERS.
 
-* @rickeylev
+* @rickeylev @aignas
 
 # Directory containing the Gazelle extension and Go code.
 /gazelle/ @f0rmiga
 /examples/build_file_generation/ @f0rmiga
 
-# Toolchains
-/python/repositories.bzl @f0rmiga
-/python/private/toolchains_repo.bzl @f0rmiga
-/python/tests/toolchains/ @f0rmiga
-
-# pip_parse related code
-/python/pip_install/ @hrfuller
+# PyPI integration related code
+/python/private/pypi/ @aignas @groodt
+/tests/pypi/ @aignas @groodt
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..9632f4e
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,17 @@
+---
+version: 2
+updates:
+  # Maintain dependencies for GitHub Actions
+  - package-ecosystem: "github-actions"
+    directory: "/"
+    schedule:
+      interval: "weekly"
+
+  - package-ecosystem: "pip"
+    directories:
+      # Maintain dependencies for our tools
+      - "/docs/sphinx"
+      - "/tools/publish"
+    schedule:
+      interval: "weekly"
+    open-pull-requests-limit: 3
diff --git a/.github/workflows/mypy.yaml b/.github/workflows/mypy.yaml
index b0d0cdf..4297751 100644
--- a/.github/workflows/mypy.yaml
+++ b/.github/workflows/mypy.yaml
@@ -18,7 +18,7 @@
     runs-on: ubuntu-20.04
     steps:
       # Checkout the code
-      - uses: actions/checkout@v2
+      - uses: actions/checkout@v4
       - uses: jpetrucciani/mypy-check@master
         with:
           requirements: 1.6.0
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index eb23bc8..96624b3 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -25,7 +25,7 @@
     runs-on: ubuntu-latest
     steps:
       - name: Checkout
-        uses: actions/checkout@v2
+        uses: actions/checkout@v4
       - name: Create release archive and notes
         run: .github/workflows/create_archive_and_notes.sh
       - name: Publish wheel dist
@@ -37,7 +37,7 @@
           TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
         run: bazel run --stamp --embed_label=${{ github.ref_name }} //python/runfiles:wheel.publish
       - name: Release
-        uses: softprops/action-gh-release@v1
+        uses: softprops/action-gh-release@v2
         with:
           # Use GH feature to populate the changelog automatically
           generate_release_notes: true
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index 8d388e2..d37121b 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -26,7 +26,7 @@
     runs-on: ubuntu-latest
 
     steps:
-    - uses: actions/stale@v3
+    - uses: actions/stale@v9
       with:
         repo-token: ${{ secrets.GITHUB_TOKEN }}
 
diff --git a/.gitignore b/.gitignore
index bf901e2..863b0e9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -43,5 +43,12 @@
 *.swp
 *.swo
 
+# CLion
+.clwb
+
 # Python cache
 **/__pycache__/
+
+# MODULE.bazel.lock is ignored for now as per recommendation from upstream.
+# See https://github.com/bazelbuild/bazel/issues/20369
+MODULE.bazel.lock
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 060cb9c..54aa043 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -41,7 +41,7 @@
     hooks:
       - id: update-deleted-packages
         name: Update deleted packages
-        language: script
+        language: system
         entry: bazel run @rules_bazel_integration_test//tools:update_deleted_packages
         files: ^((examples|tests)/.*/(MODULE.bazel|WORKSPACE|WORKSPACE.bzlmod|BUILD.bazel)|.bazelrc)$
         pass_filenames: false
diff --git a/BUILD.bazel b/BUILD.bazel
index cd4cbc5..038b56a 100644
--- a/BUILD.bazel
+++ b/BUILD.bazel
@@ -35,11 +35,11 @@
         "BUILD.bazel",
         "MODULE.bazel",
         "WORKSPACE",
+        "WORKSPACE.bzlmod",
         "internal_deps.bzl",
         "internal_setup.bzl",
         "version.bzl",
         "//python:distribution",
-        "//python/pip_install:distribution",
         "//tools:distribution",
         "@rules_python_gazelle_plugin//:distribution",
     ],
diff --git a/CHANGELOG.md b/CHANGELOG.md
index af61b44..1b457c4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,6 @@
+:::{default-domain} bzl
+:::
+
 # rules_python Changelog
 
 This is a human-friendly changelog in a keepachangelog.com style format.
@@ -19,14 +22,526 @@
 
 ## Unreleased
 
-[0.XX.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.XX.0
+[x.x.x]: https://github.com/bazelbuild/rules_python/releases/tag/x.x.x
 
 ### Changed
+* Nothing yet
+
+### Added
+* Nothing yet
+
+### Removed
+* Nothing yet
+
+## [0.35.0] - 2024-08-15
+
+[0.35.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.35.0
+
+### Changed
+* (whl_library) A better log message when the wheel is built from an sdist or
+  when the wheel is downloaded using `download_only` feature to aid debugging.
+* (gazelle): Simplify and make gazelle_python.yaml have only top level package name.
+  It would work well in cases to reduce merge conflicts.
+* (toolchains): Change some old toochain versions to use [20240726] release to
+  include dependency updates `3.8.19`, `3.9.19`, `3.10.14`, `3.11.9`
+* (toolchains): Bump default toolchain versions to:
+    * `3.12 -> 3.12.4`
+* (rules) `PYTHONSAFEPATH` is inherited from the calling environment to allow
+  disabling it (Requires {obj}`--bootstrap_impl=script`)
+  ([#2060](https://github.com/bazelbuild/rules_python/issues/2060)).
+
+### Fixed
+* (rules) `compile_pip_requirements` now sets the `USERPROFILE` env variable on
+  Windows to work around an issue where `setuptools` fails to locate the user's
+  home directory.
+* (rules) correctly handle absolute URLs in parse_simpleapi_html.bzl.
+* (rules) Fixes build targets linking against `@rules_python//python/cc:current_py_cc_libs`
+  in host platform builds on macOS, by editing the `LC_ID_DYLIB` field of the hermetic interpreter's
+  `libpython3.x.dylib` using `install_name_tool`, setting it to its absolute path under Bazel's
+  execroot.
+* (rules) Signals are properly received when using {obj}`--bootstrap_impl=script`
+  (for non-zip builds).
+  ([#2043](https://github.com/bazelbuild/rules_python/issues/2043))
+* (rules) Fixes Python builds when the `--build_python_zip` is set to `false` on
+  Windows. See [#1840](https://github.com/bazelbuild/rules_python/issues/1840).
+* (rules) Fixes Mac + `--build_python_zip` + {obj}`--bootstrap_impl=script`
+  ([#2030](https://github.com/bazelbuild/rules_python/issues/2030)).
+* (rules) User dependencies come before runtime site-packages when using
+  {obj}`--bootstrap_impl=script`.
+  ([#2064](https://github.com/bazelbuild/rules_python/issues/2064)).
+* (rules) Version-aware rules now return both `@_builtins` and `@rules_python`
+  providers instead of only one.
+  ([#2114](https://github.com/bazelbuild/rules_python/issues/2114)).
+* (pip) Fixed pypi parse_simpleapi_html function for feeds with package metadata
+  containing ">" sign
+* (toolchains) Added missing executable permission to
+  `//python/runtime_env_toolchains` interpreter script so that it is runnable.
+  ([#2085](https://github.com/bazelbuild/rules_python/issues/2085)).
+* (pip) Correctly use the `sdist` downloaded by the bazel downloader when using
+  `experimental_index_url` feature. Fixes
+  [#2091](https://github.com/bazelbuild/rules_python/issues/2090).
+* (gazelle) Make `gazelle_python_manifest.update` manual to avoid unnecessary
+  network behavior.
+* (bzlmod): The conflicting toolchains during `python` extension will no longer
+  cause warnings by default. In order to see the warnings for diagnostic purposes
+  set the env var `RULES_PYTHON_REPO_DEBUG_VERBOSITY` to one of `INFO`, `DEBUG` or `TRACE`.
+  Fixes [#1818](https://github.com/bazelbuild/rules_python/issues/1818).
+* (runfiles) Make runfiles lookups work for the situation of Bazel 7,
+  Python 3.9 (or earlier, where safepath isn't present), and the Rlocation call
+  in the same directory as the main file.
+  Fixes [#1631](https://github.com/bazelbuild/rules_python/issues/1631).
+
+### Added
+* (rules) `compile_pip_requirements` supports multiple requirements input files as `srcs`.
+* (rules) `PYTHONSAFEPATH` is inherited from the calling environment to allow
+  disabling it (Requires {obj}`--bootstrap_impl=script`)
+  ([#2060](https://github.com/bazelbuild/rules_python/issues/2060)).
+* (gazelle) Added `python_generation_mode_per_package_require_test_entry_point`
+  in order to better accommodate users who use a custom macro,
+  [`pytest-bazel`][pytest_bazel], [rules_python_pytest] or `rules_py`
+  [py_test_main] in order to integrate with `pytest`. Currently the default
+  flag value is set to `true` for backwards compatible behaviour, but in the
+  future the flag will be flipped be `false` by default.
+* (toolchains) New Python versions available: `3.12.4` using the [20240726] release.
+* (pypi) Support env markers in requirements files. Note, that this means that
+  if your requirements files contain env markers, the Python interpreter will
+  need to be run during bzlmod phase to evaluate them. This may incur
+  downloading an interpreter (for hermetic-based builds) or cause non-hermetic
+  behavior (if using a system Python).
+
+[rules_python_pytest]: https://github.com/caseyduquettesc/rules_python_pytest
+[py_test_main]: https://docs.aspect.build/rulesets/aspect_rules_py/docs/rules/#py_pytest_main
+[pytest_bazel]: https://pypi.org/project/pytest-bazel
+[20240726]: https://github.com/indygreg/python-build-standalone/releases/tag/20240726
+
+
+## [0.34.0] - 2024-07-04
+
+[0.34.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.34.0
+
+### Changed
+* `protobuf`/`com_google_protobuf` dependency bumped to `v24.4`
+* (bzlmod): optimize the creation of config settings used in pip to
+  reduce the total number of targets in the hub repo.
+* (toolchains) The exec tools toolchain now finds its interpreter by reusing
+  the regular interpreter toolchain. This avoids having to duplicate specifying
+  where the runtime for the exec tools toolchain is.
+* (toolchains) ({obj}`//python:autodetecting_toolchain`) is deprecated. It is
+  replaced by {obj}`//python/runtime_env_toolchains:all`. The old target will be
+  removed in a future release.
+
+### Fixed
+* (bzlmod): When using `experimental_index_url` the `all_requirements`,
+  `all_whl_requirements` and `all_data_requirements` will now only include
+  common packages that are available on all target platforms. This is to ensure
+  that packages that are only present for some platforms are pulled only via
+  the `deps` of the materialized `py_library`. If you would like to include
+  platform specific packages, using a `select` statement with references to the
+  specific package will still work (e.g.
+  ```
+  my_attr = all_requirements + select(
+      {
+          "@platforms//os:linux": ["@pypi//foo_available_only_on_linux"],
+          "//conditions:default": [],
+      }
+  )
+  ```
+* (bzlmod): Targets in `all_requirements` now use the same form as targets returned by the `requirement` macro.
+* (rules) Auto exec groups are enabled. This allows actions run by the rules,
+  such as precompiling, to pick an execution platform separately from what
+  other toolchains support.
+* (providers) {obj}`PyRuntimeInfo` doesn't require passing the
+  `interpreter_version_info` arg.
+* (bzlmod) Correctly pass `isolated`, `quiet` and `timeout` values to `whl_library`
+  and drop the defaults from the lock file.
+* (whl_library) Correctly handle arch-specific dependencies when we encounter a
+  platform specific wheel and use `experimental_target_platforms`.
+  Fixes [#1996](https://github.com/bazelbuild/rules_python/issues/1996).
+* (rules) The first element of the default outputs is now the executable again.
+* (pip) Fixed crash when pypi packages lacked a sha (e.g. yanked packages)
+
+### Added
+* (toolchains) {obj}`//python/runtime_env_toolchains:all`, which is a drop-in
+  replacement for the "autodetecting" toolchain.
+* (gazelle) Added new `python_label_convention` and `python_label_normalization` directives. These directive
+  allows altering default Gazelle label format to third-party dependencies useful for re-using Gazelle plugin
+  with other rules, including `rules_pycross`. See [#1939](https://github.com/bazelbuild/rules_python/issues/1939).
+
+### Removed
+* (pip): Removes the `entrypoint` macro that was replaced by `py_console_script_binary` in 0.26.0.
+
+## [0.33.2] - 2024-06-13
+
+[0.33.2]: https://github.com/bazelbuild/rules_python/releases/tag/0.33.2
+
+### Fixed
+* (toolchains) The {obj}`exec_tools_toolchain_type` is disabled by default.
+  To enable it, set {obj}`--//python/config_settings:exec_tools_toolchain=enabled`.
+  This toolchain must be enabled for precompilation to work. This toolchain will
+  be enabled by default in a future release.
+  Fixes [#1967](https://github.com/bazelbuild/rules_python/issues/1967).
+
+## [0.33.1] - 2024-06-13
+
+[0.33.1]: https://github.com/bazelbuild/rules_python/releases/tag/0.33.1
+
+### Fixed
+* (py_binary) Fix building of zip file when using `--build_python_zip`
+  argument. Fixes [#1954](https://github.com/bazelbuild/rules_python/issues/1954).
+
+## [0.33.0] - 2024-06-12
+
+[0.33.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.33.0
+
+### Changed
+* (deps) Upgrade the `pip_install` dependencies to pick up a new version of pip.
+* (toolchains) Optional toolchain dependency: `py_binary`, `py_test`, and
+  `py_library` now depend on the `//python:exec_tools_toolchain_type` for build
+  tools.
+* (deps): Bumped `bazel_skylib` to 1.6.1.
+* (bzlmod): The `python` and internal `rules_python` extensions have been
+  marked as `reproducible` and will not include any lock file entries from now
+  on.
+* (gazelle): Remove gazelle plugin's python deps and make it hermetic.
+  Introduced a new Go-based helper leveraging tree-sitter for syntax analysis.
+  Implemented the use of `pypi/stdlib-list` for standard library module verification.
+* (pip.parse): Do not ignore yanked packages when using `experimental_index_url`.
+  This is to mimic what `uv` is doing. We will print a warning instead.
+* (pip.parse): Add references to all supported wheels when using `experimental_index_url`
+  to allowing to correctly fetch the wheels for the right platform. See the
+  updated docs on how to use the feature. This is work towards addressing
+  [#735](https://github.com/bazelbuild/rules_python/issues/735) and
+  [#260](https://github.com/bazelbuild/rules_python/issues/260). The spoke
+  repository names when using this flag will have a structure of
+  `{pip_hub_prefix}_{wheel_name}_{py_tag}_{abi_tag}_{platform_tag}_{sha256}`,
+  which is an implementation detail which should not be relied on and is there
+  purely for better debugging experience.
+* (bzlmod) The `pythons_hub//:interpreters.bzl` no longer has platform-specific
+  labels which where left there for compatibility reasons. Move to
+  `python_{version}_host` keys if you would like to have access to a Python
+  interpreter that can be used in a repository rule context.
+
+### Fixed
+* (gazelle) Remove `visibility` from `NonEmptyAttr`.
+  Now empty(have no `deps/main/srcs/imports` attr) `py_library/test/binary` rules will
+  be automatically deleted correctly. For example, if `python_generation_mode`
+  is set to package, when `__init__.py` is deleted, the `py_library` generated
+  for this package before will be deleted automatically.
+* (whl_library): Use `is_python_config_setting` to correctly handle multi-python
+  version dependency select statements when the `experimental_target_platforms`
+  includes the Python ABI. The default python version case within the select is
+  also now handled correctly, stabilizing the implementation.
+* (gazelle) Fix Gazelle failing on Windows with
+  "panic: runtime error: invalid memory address or nil pointer dereference"
+* (bzlmod) remove `pip.parse(annotations)` attribute as it is unused and has been
+  replaced by whl_modifications.
+* (pip) Correctly select wheels when the python tag includes minor versions.
+  See ([#1930](https://github.com/bazelbuild/rules_python/issues/1930))
+* (pip.parse): The lock file is now reproducible on any host platform if the
+  `experimental_index_url` is not used by any of the modules in the dependency
+  chain. To make the lock file identical on each `os` and `arch`, please use
+  the `experimental_index_url` feature which will fetch metadata from PyPI or a
+  different private index and write the contents to the lock file. Fixes
+  [#1643](https://github.com/bazelbuild/rules_python/issues/1643).
+* (pip.parse): Install `yanked` packages and print a warning instead of
+  ignoring them. This better matches the behaviour of `uv pip install`.
+* (toolchains): Now matching of the default hermetic toolchain is more robust
+  and explicit and should fix rare edge-cases where the host toolchain
+  autodetection would match a different toolchain than expected. This may yield
+  to toolchain selection failures when the python toolchain is not registered,
+  but is requested via `//python/config_settings:python_version` flag setting.
+* (doc) Fix the `WORKSPACE` requirement vendoring example. Fixes
+  [#1918](https://github.com/bazelbuild/rules_python/issues/1918).
+
+### Added
+* (rules) Precompiling Python source at build time is available. but is
+  disabled by default, for now. Set
+  `@rules_python//python/config_settings:precompile=enabled` to enable it
+  by default. A subsequent release will enable it by default. See the
+  [Precompiling docs][precompile-docs] and API reference docs for more
+  information on precompiling. Note this requires Bazel 7+ and the Pystar rule
+  implementation enabled.
+  ([#1761](https://github.com/bazelbuild/rules_python/issues/1761))
+* (rules) Attributes and flags to control precompile behavior: `precompile`,
+  `precompile_optimize_level`, `precompile_source_retention`,
+  `precompile_invalidation_mode`, and `pyc_collection`
+* (toolchains) The target runtime toolchain (`//python:toolchain_type`) has
+  two new optional attributes: `pyc_tag` (tells the pyc filename infix to use) and
+  `implementation_name` (tells the Python implementation name).
+* (toolchains) A toolchain type for build tools has been added:
+  `//python:exec_tools_toolchain_type`.
+* (providers) `PyInfo` has two new attributes: `direct_pyc_files` and
+  `transitive_pyc_files`, which tell the pyc files a target makes available
+  directly and transitively, respectively.
+* `//python:features.bzl` added to allow easy feature-detection in the future.
+* (pip) Allow specifying the requirements by (os, arch) and add extra
+  validations when parsing the inputs. This is a non-breaking change for most
+  users unless they have been passing multiple `requirements_*` files together
+  with `extra_pip_args = ["--platform=manylinux_2_4_x86_64"]`, that was an
+  invalid usage previously but we were not failing the build. From now on this
+  is explicitly disallowed.
+* (toolchains) Added riscv64 platform definition for python toolchains.
+* (gazelle) The `python_visibility` directive now supports the `$python_root$`
+  placeholder, just like the `python_default_visibility` directive does.
+* (rules) A new bootstrap implementation that doesn't require a system Python
+  is available. It can be enabled by setting
+  {obj}`--@rules_python//python/config_settings:bootstrap_impl=script`. It
+  will become the default in a subsequent release.
+  ([#691](https://github.com/bazelbuild/rules_python/issues/691))
+* (providers) `PyRuntimeInfo` has two new attributes:
+  {obj}`PyRuntimeInfo.stage2_bootstrap_template` and
+  {obj}`PyRuntimeInfo.zip_main_template`.
+* (toolchains) A replacement for the Bazel-builtn autodetecting toolchain is
+  available. The `//python:autodetecting_toolchain` alias now uses it.
+* (pip): Support fetching and using the wheels for other platforms. This
+  supports customizing whether the linux wheels are pulled for `musl` or
+  `glibc`, whether `universal2` or arch-specific MacOS wheels are preferred and
+  it also allows to select a particular `libc` version. All of this is done via
+  the `string_flags` in `@rules_python//python/config_settings`. If there are
+  no wheels that are supported for the target platform, `rules_python` will
+  fallback onto building the `sdist` from source. This behaviour can be
+  disabled if desired using one of the available string flags as well.
+* (whl_filegroup) Added a new `whl_filegroup` rule to extract files from a wheel file.
+  This is useful to extract headers for use in a `cc_library`.
+
+[precompile-docs]: /precompiling
+
+## [0.32.2] - 2024-05-14
+
+[0.32.2]: https://github.com/bazelbuild/rules_python/releases/tag/0.32.2
 
 ### Fixed
 
+* Workaround existence of infinite symlink loops on case insensitive filesystems when targeting linux platforms with recent Python toolchains. Works around an upstream [issue][indygreg-231]. Fixes [#1800][rules_python_1800].
+
+[indygreg-231]: https://github.com/indygreg/python-build-standalone/issues/231
+[rules_python_1800]: https://github.com/bazelbuild/rules_python/issues/1800
+
+## [0.32.0] - 2024-05-12
+
+[0.32.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.32.0
+
+### Changed
+
+* (bzlmod): The `MODULE.bazel.lock` `whl_library` rule attributes are now
+  sorted in the attributes section. We are also removing values that are not
+  default in order to reduce the size of the lock file.
+* (coverage) Bump `coverage.py` to [7.4.3](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst#version-743--2024-02-23).
+* (deps): Bumped `bazel_features` to 1.9.1 to detect optional support
+  non-blocking downloads.
+* (deps): Updated `pip_tools` to >= 7.4.0
+* (toolchains): Change some old toolchain versions to use [20240224] release to
+  include security fixes `3.8.18`, `3.9.18` and `3.10.13`
+* (toolchains): Bump default toolchain versions to:
+    * `3.8 -> 3.8.19`
+    * `3.9 -> 3.9.19`
+    * `3.10 -> 3.10.14`
+    * `3.11 -> 3.11.9`
+    * `3.12 -> 3.12.3`
+
+### Fixed
+
+* (whl_library): Fix the experimental_target_platforms overriding for platform
+  specific wheels when the wheels are for any python interpreter version. Fixes
+  [#1810](https://github.com/bazelbuild/rules_python/issues/1810).
+* (whl_library): Stop generating duplicate dependencies when encountering
+  duplicates in the METADATA. Fixes
+  [#1873](https://github.com/bazelbuild/rules_python/issues/1873).
+* (gazelle) In `project` or `package` generation modes, do not generate `py_test`
+  rules when there are no test files and do not set `main = "__test__.py"` when
+  that file doesn't exist.
+* (whl_library) The group redirection is only added when the package is part of
+  the group potentially fixing aspects that want to traverse a `py_library` graph.
+  Fixes [#1760](https://github.com/bazelbuild/rules_python/issues/1760).
+* (bzlmod) Setting a particular micro version for the interpreter and the
+  `pip.parse` extension is now possible, see the
+  `examples/pip_parse/MODULE.bazel` for how to do it.
+  See [#1371](https://github.com/bazelbuild/rules_python/issues/1371).
+* (refactor) The pre-commit developer workflow should now pass `isort` and `black`
+  checks (see [#1674](https://github.com/bazelbuild/rules_python/issues/1674)).
+
 ### Added
 
+* (toolchains) Added armv7 platform definition for python toolchains.
+* (toolchains) New Python versions available: `3.11.8`, `3.12.2` using the [20240224] release.
+* (toolchains) New Python versions available: `3.8.19`, `3.9.19`, `3.10.14`, `3.11.9`, `3.12.3` using
+  the [20240415] release.
+* (gazelle) Added a new `python_visibility` directive to control visibility
+  of generated targets by appending additional visibility labels.
+* (gazelle) Added a new `python_default_visibility` directive to control the
+  _default_ visibility of generated targets. See the [docs][python_default_visibility]
+  for details.
+* (gazelle) Added a new `python_test_file_pattern` directive. This directive tells
+  gazelle which python files should be mapped to the `py_test` rule. See the
+  [original issue][test_file_pattern_issue] and the [docs][test_file_pattern_docs]
+  for details.
+* (wheel) Add support for `data_files` attributes in py_wheel rule
+  ([#1777](https://github.com/bazelbuild/rules_python/issues/1777))
+* (py_wheel) `bzlmod` installations now provide a `twine` setup for the default
+  Python toolchain in `rules_python` for version 3.11.
+* (bzlmod) New `experimental_index_url`, `experimental_extra_index_urls` and
+  `experimental_index_url_overrides` to `pip.parse` for using the bazel
+  downloader. If you see any issues, report in
+  [#1357](https://github.com/bazelbuild/rules_python/issues/1357). The URLs for
+  the whl and sdist files will be written to the lock file. Controlling whether
+  the downloading of metadata is done in parallel can be done using
+  `parallel_download` attribute.
+* (gazelle) Add a new annotation `include_dep`. Also add documentation for
+  annotations to `gazelle/README.md`.
+* (deps): `rules_python` depends now on `rules_cc` 0.0.9
+* (pip_parse): A new flag `use_hub_alias_dependencies` has been added that is going
+  to become default in the next release. This makes use of `dep_template` flag
+  in the `whl_library` rule. This also affects the
+  `experimental_requirement_cycles` feature where the dependencies that are in
+  a group would be only accessible via the hub repo aliases. If you still
+  depend on legacy labels instead of the hub repo aliases and you use the
+  `experimental_requirement_cycles`, now is a good time to migrate.
+
+[python_default_visibility]: gazelle/README.md#directive-python_default_visibility
+[test_file_pattern_issue]: https://github.com/bazelbuild/rules_python/issues/1816
+[test_file_pattern_docs]: gazelle/README.md#directive-python_test_file_pattern
+[20240224]: https://github.com/indygreg/python-build-standalone/releases/tag/20240224.
+[20240415]: https://github.com/indygreg/python-build-standalone/releases/tag/20240415.
+
+
+## [0.31.0] - 2024-02-12
+
+[0.31.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.31.0
+
+### Changed
+
+* For Bazel 7, the core rules and providers are now implemented in rules_python
+  directly and the rules bundled with Bazel are not used. Bazel 6 and earlier
+  continue to use the Bazel builtin symbols. Of particular note, this means,
+  under Bazel 7, the builtin global symbol `PyInfo` is **not** the same as what
+  is loaded from rules_python. The same is true of `PyRuntimeInfo`.
+
+## [0.30.0] - 2024-02-12
+
+[0.30.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.30.0
+
+### Changed
+
+* (toolchains) Windows hosts always ignore pyc files in the downloaded runtimes.
+  This fixes issues due to pyc files being created at runtime and affecting the
+  definition of what files were considered part of the runtime.
+
+* (pip_parse) Added the `envsubst` parameter, which enables environment variable
+  substitutions in the `extra_pip_args` attribute.
+
+* (pip_repository) Added the `envsubst` parameter, which enables environment
+  variable substitutions in the `extra_pip_args` attribute.
+
+### Fixed
+
+* (bzlmod) pip.parse now does not fail with an empty `requirements.txt`.
+
+* (py_wheel) Wheels generated by `py_wheel` now preserve executable bits when
+  being extracted by `installer` and/or `pip`.
+
+* (coverage) During the running of lcov, the stdout/stderr was causing test
+  failures.  By default, suppress output when generating lcov.  This can be
+  overridden by setting 'VERBOSE_COVERAGE'.  This change only affect bazel
+  7.x.x and above.
+
+* (toolchain) Changed the `host_toolchain` to symlink all files to support
+  Windows host environments without symlink support.
+
+* (PyRuntimeInfo) Switch back to builtin PyRuntimeInfo for Bazel 6.4 and when
+  pystar is disabled. This fixes an error about `target ... does not have ...
+  PyRuntimeInfo`.
+  ([#1732](https://github.com/bazelbuild/rules_python/issues/1732))
+
+### Added
+
+* (py_wheel) Added `requires_file` and `extra_requires_files` attributes.
+
+* (whl_library) *experimental_target_platforms* now supports specifying the
+  Python version explicitly and the output `BUILD.bazel` file will be correct
+  irrespective of the python interpreter that is generating the file and
+  extracting the `whl` distribution. Multiple python target version can be
+  specified and the code generation will generate version specific dependency
+  closures but that is not yet ready to be used and may break the build if
+  the default python version is not selected using
+  `common --@rules_python//python/config_settings:python_version=X.Y.Z`.
+
+* New Python versions available: `3.11.7`, `3.12.1` using
+  https://github.com/indygreg/python-build-standalone/releases/tag/20240107.
+
+* (toolchain) Allow setting `x.y` as the `python_version` parameter in
+  the version-aware `py_binary` and `py_test` rules. This allows users to
+  use the same rule import for testing with specific Python versions and
+  rely on toolchain configuration and how the latest version takes precedence
+  if e.g. `3.8` is selected. That also simplifies `.bazelrc` for any users
+  that set the default `python_version` string flag in that way.
+
+* (toolchain) The runtime's shared libraries (libpython.so et al) can be
+  accessed using `@rules_python//python/cc:current_py_cc_libs`. This uses
+  toolchain resolution, so the files are from the same runtime used to run a
+  target. If you were previously using e.g. `@python_3_11//:libpython`, then
+  switch to `:current_py_cc_libs` for looser coupling to the underlying runtime
+  repo implementation.
+
+* (repo rules) The environment variable `RULES_PYTHON_REPO_DEBUG=1` can be
+  set to make repository rules log detailed information about what they're
+  up to.
+
+* (coverage) Add support for python 3.12 and bump `coverage.py` to
+  7.4.1.
+
+
+## [0.29.0] - 2024-01-22
+
+[0.29.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.29.0
+
+### Changed
+
+* **BREAKING** The deprecated `incompatible_generate_aliases` feature flags
+  from `pip_parse` and `gazelle` got removed. They had been flipped to `True`
+  in 0.27.0 release.
+* **BREAKING** (wheel) The `incompatible_normalize_name` and
+  `incompatible_normalize_version` flags have been removed. They had been
+  flipped to `True` in 0.27.0 release.
+* (bzlmod) The pip hub repository now uses the newly introduced config settings
+  using the `X.Y` python version notation. This improves cross module
+  interoperability and allows to share wheels built by interpreters using
+  different patch versions.
+
+### Fixed
+
+* (bzlmod pip.parse) Use a platform-independent reference to the interpreter
+  pip uses. This reduces (but doesn't eliminate) the amount of
+  platform-specific content in `MODULE.bazel.lock` files; Follow
+  [#1643](https://github.com/bazelbuild/rules_python/issues/1643) for removing
+  platform-specific content in `MODULE.bazel.lock` files.
+
+* (wheel) The stamp variables inside the distribution name are no longer
+  lower-cased when normalizing under PEP440 conventions.
+
+### Added
+
+* (toolchains) `python_register_toolchains` now also generates a repository
+  that is suffixed with `_host`, that has a single label `:python` that is a
+  symlink to the python interpreter for the host platform. The intended use is
+  mainly in `repository_rule`, which are always run using `host` platform
+  Python. This means that `WORKSPACE` users can now copy the `requirements.bzl`
+  file for vendoring as seen in the updated `pip_parse_vendored` example.
+
+* (runfiles) `rules_python.python.runfiles.Runfiles` now has a static `Create`
+  method to make imports more ergonomic. Users should only need to import the
+  `Runfiles` object to locate runfiles.
+
+* (toolchains) `PyRuntimeInfo` now includes a `interpreter_version_info` field
+  that contains the static version information for the given interpreter.
+  This can be set via `py_runtime` when registering an interpreter toolchain,
+  and will done automatically for the builtin interpreter versions registered via
+  `python_register_toolchains`.
+  Note that this only available on the Starlark implementation of the provider.
+
+* (config_settings) Added `//python/config_settings:is_python_X.Y` config
+  settings to match on minor Python version. These settings match any `X.Y`
+  version instead of just an exact `X.Y.Z` version.
+
 ## [0.28.0] - 2024-01-07
 
 [0.28.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.28.0
@@ -57,6 +572,14 @@
   from the target are not supported yet.
   ([#1612](https://github.com/bazelbuild/rules_python/issues/1612))
 
+* (gazelle) When `python_generation_mode` is set to `file`, create one `py_binary`
+  target for each file with `if __name__ == "__main__"` instead of just one
+  `py_binary` for the whole module.
+
+* (gazelle) the Gazelle manifest integrity field is now optional. If the
+  `requirements` argument to `gazelle_python_manifest` is unset, no integrity
+  field will be generated.
+
 ### Fixed
 
 * (gazelle) The gazelle plugin helper was not working with Python toolchains 3.11
@@ -87,6 +610,9 @@
 * (toolchains) Workspace builds register the py cc toolchain (bzlmod already
   was). This makes e.g. `//python/cc:current_py_cc_headers` Just Work.
   ([#1669](https://github.com/bazelbuild/rules_python/issues/1669))
+* (bzlmod python.toolchain) The value of `ignore_root_user_error` is now decided
+  by the root module only.
+  ([#1658](https://github.com/bazelbuild/rules_python/issues/1658))
 
 ### Added
 
@@ -97,7 +623,6 @@
   attribute for every target in the package. This is enabled through a separate
   directive `python_generation_mode_per_file_include_init`.
 
-
 ## [0.27.0] - 2023-11-16
 
 [0.27.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.27.0
@@ -194,8 +719,6 @@
 * (utils) Added a `pip_utils` struct with a `normalize_name` function to allow users
   to find out how `rules_python` would normalize a PyPI distribution name.
 
-[0.27.0]: https://github.com/bazelbuild/rules_python/releases/tag/0.27.0
-
 ## [0.26.0] - 2023-10-06
 
 ### Changed
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 10d1149..cb123bf 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -175,6 +175,7 @@
   functionality, should also be filed in this repository but without the
   `core-rules` label.
 
+(breaking-changes)=
 ## Breaking Changes
 
 Breaking changes are generally permitted, but we follow a 3-step process for
diff --git a/DEVELOPING.md b/DEVELOPING.md
index 3c9e89d..a70d3b1 100644
--- a/DEVELOPING.md
+++ b/DEVELOPING.md
@@ -2,7 +2,7 @@
 
 ## Updating internal dependencies
 
-1. Modify the `./python/pip_install/tools/requirements.txt` file and run:
+1. Modify the `./python/private/pypi/requirements.txt` file and run:
    ```
    bazel run //tools/private/update_deps:update_pip_deps
    ```
diff --git a/METADATA b/METADATA
index 099312e..e534c8d 100644
--- a/METADATA
+++ b/METADATA
@@ -1,6 +1,6 @@
 # This project was upgraded with external_updater.
-# Usage: tools/external_updater/updater.sh update bazelbuild-rules_python
-# For more info, check https://cs.android.com/android/platform/superproject/+/main:tools/external_updater/README.md
+# Usage: tools/external_updater/updater.sh update external/bazelbuild-rules_python
+# For more info, check https://cs.android.com/android/platform/superproject/main/+/main:tools/external_updater/README.md
 
 name: "bazelbuild-rules_python"
 description: "A repository of Starlark implementation of Python rules in Bazel"
@@ -8,12 +8,12 @@
   license_type: NOTICE
   last_upgrade_date {
     year: 2024
-    month: 7
-    day: 17
+    month: 9
+    day: 19
   }
   identifier {
     type: "Git"
     value: "https://github.com/bazelbuild/rules_python"
-    version: "0.28.0"
+    version: "0.35.0"
   }
 }
diff --git a/MODULE.bazel b/MODULE.bazel
index e89b8ef..c4d0e5f 100644
--- a/MODULE.bazel
+++ b/MODULE.bazel
@@ -4,20 +4,18 @@
     compatibility_level = 1,
 )
 
-bazel_dep(name = "bazel_features", version = "1.1.1")
-bazel_dep(name = "bazel_skylib", version = "1.3.0")
+bazel_dep(name = "bazel_features", version = "1.9.1")
+bazel_dep(name = "bazel_skylib", version = "1.6.1")
+bazel_dep(name = "rules_cc", version = "0.0.9")
 bazel_dep(name = "platforms", version = "0.0.4")
 
 # Those are loaded only when using py_proto_library
-bazel_dep(name = "rules_proto", version = "5.3.0-21.7")
-bazel_dep(name = "protobuf", version = "21.7", repo_name = "com_google_protobuf")
+bazel_dep(name = "rules_proto", version = "6.0.0-rc1")
+bazel_dep(name = "protobuf", version = "24.4", repo_name = "com_google_protobuf")
 
-internal_deps = use_extension("@rules_python//python/private/bzlmod:internal_deps.bzl", "internal_deps")
-internal_deps.install()
+internal_deps = use_extension("//python/private:internal_deps.bzl", "internal_deps")
 use_repo(
     internal_deps,
-    "rules_python_internal",
-    # START: maintained by 'bazel run //tools/private:update_pip_deps'
     "pypi__build",
     "pypi__click",
     "pypi__colorama",
@@ -33,12 +31,12 @@
     "pypi__tomli",
     "pypi__wheel",
     "pypi__zipp",
-    # END: maintained by 'bazel run //tools/private:update_pip_deps'
+    "rules_python_internal",
 )
 
 # We need to do another use_extension call to expose the "pythons_hub"
 # repo.
-python = use_extension("@rules_python//python/extensions:python.bzl", "python")
+python = use_extension("//python/extensions:python.bzl", "python")
 
 # The default toolchain to use if nobody configures a toolchain.
 # NOTE: This is not a stable version. It is provided for convenience, but will
@@ -48,35 +46,63 @@
     is_default = True,
     python_version = "3.11",
 )
-use_repo(python, "pythons_hub")
+use_repo(python, "python_3_11", "python_versions", "pythons_hub")
 
 # This call registers the Python toolchains.
 register_toolchains("@pythons_hub//:all")
 
+#####################
+# Install twine for our own runfiles wheel publishing and allow bzlmod users to use it.
+
+pip = use_extension("//python/private/pypi:pip.bzl", "pip_internal")
+pip.parse(
+    hub_name = "rules_python_publish_deps",
+    python_version = "3.11",
+    requirements_by_platform = {
+        "//tools/publish:requirements.txt": "linux_*",
+        "//tools/publish:requirements_darwin.txt": "osx_*",
+        "//tools/publish:requirements_windows.txt": "windows_*",
+    },
+)
+use_repo(pip, "rules_python_publish_deps")
+
 # ===== DEV ONLY DEPS AND SETUP BELOW HERE =====
 bazel_dep(name = "stardoc", version = "0.6.2", dev_dependency = True, repo_name = "io_bazel_stardoc")
 bazel_dep(name = "rules_bazel_integration_test", version = "0.20.0", dev_dependency = True)
+bazel_dep(name = "rules_testing", version = "0.6.0", dev_dependency = True)
+
+# Extra gazelle plugin deps so that WORKSPACE.bzlmod can continue including it for e2e tests.
+# We use `WORKSPACE.bzlmod` because it is impossible to have dev-only local overrides.
+bazel_dep(name = "rules_go", version = "0.41.0", dev_dependency = True, repo_name = "io_bazel_rules_go")
+bazel_dep(name = "gazelle", version = "0.33.0", dev_dependency = True, repo_name = "bazel_gazelle")
+
+dev_python = use_extension(
+    "//python/extensions:python.bzl",
+    "python",
+    dev_dependency = True,
+)
+dev_python.rules_python_private_testing(
+    register_all_versions = True,
+)
 
 dev_pip = use_extension(
-    "//python/extensions:pip.bzl",
-    "pip",
+    "//python/private/pypi:pip.bzl",
+    "pip_internal",
     dev_dependency = True,
 )
 dev_pip.parse(
-    experimental_requirement_cycles = {
-        "sphinx": [
-            "sphinx",
-            "sphinxcontrib-serializinghtml",
-            "sphinxcontrib-qthelp",
-            "sphinxcontrib-htmlhelp",
-            "sphinxcontrib-devhelp",
-            "sphinxcontrib-applehelp",
-        ],
-    },
     hub_name = "dev_pip",
     python_version = "3.11",
     requirements_lock = "//docs/sphinx:requirements.txt",
 )
+dev_pip.parse(
+    hub_name = "pypiserver",
+    python_version = "3.11",
+    requirements_lock = "//examples/wheel:requirements_server.txt",
+)
+use_repo(dev_pip, "dev_pip", "pypiserver")
+
+# Bazel integration test setup below
 
 bazel_binaries = use_extension(
     "@rules_bazel_integration_test//:extensions.bzl",
@@ -101,3 +127,17 @@
     "build_bazel_bazel_rolling",
     "build_bazel_bazel_self",
 )
+
+# EXPERIMENTAL: This is experimental and may be removed without notice
+uv = use_extension(
+    "//python/uv:extensions.bzl",
+    "uv",
+    dev_dependency = True,
+)
+uv.toolchain(uv_version = "0.2.23")
+use_repo(uv, "uv_toolchains")
+
+register_toolchains(
+    "@uv_toolchains//:all",
+    dev_dependency = True,
+)
diff --git a/README.md b/README.md
index 546af97..d890d70 100644
--- a/README.md
+++ b/README.md
@@ -13,13 +13,9 @@
 
 Examples live in the [examples](examples) directory.
 
-Currently, the core rules build into the Bazel binary, and the symbols in this
-repository are simple aliases. However, we are migrating the rules to Starlark and removing them from the Bazel binary. Therefore, the future-proof way to depend on Python rules is via this repository. See[`Migrating from the Bundled Rules`](#Migrating-from-the-bundled-rules) below.
-
-The core rules are stable. Their implementation in Bazel is subject to Bazel's
+The core rules are stable. Their implementation is subject to Bazel's
 [backward compatibility policy](https://docs.bazel.build/versions/master/backward-compatibility.html).
-Once migrated to rules_python, they may evolve at a different
-rate, but this repository will still follow [semantic versioning](https://semver.org).
+This repository aims to follow [semantic versioning](https://semver.org).
 
 The Bazel community maintains this repository. Neither Google nor the Bazel team provides support for the code. However, this repository is part of the test suite used to vet new Bazel releases. See [How to contribute](CONTRIBUTING.md) page for information on our development workflow.
 
diff --git a/WORKSPACE b/WORKSPACE
index 5631dce..695b0e9 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -42,12 +42,13 @@
 rules_python_internal_setup()
 
 load("//python:repositories.bzl", "python_register_multi_toolchains")
-load("//python:versions.bzl", "MINOR_MAPPING")
+load("//python:versions.bzl", "MINOR_MAPPING", "TOOL_VERSIONS")
 
 python_register_multi_toolchains(
     name = "python",
     default_version = MINOR_MAPPING.values()[-2],
-    python_versions = MINOR_MAPPING.values(),
+    # Integration tests verify each version, so register all of them.
+    python_versions = TOOL_VERSIONS.keys(),
 )
 
 load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file")
@@ -85,7 +86,7 @@
 _py_gazelle_deps()
 
 # This interpreter is used for various rules_python dev-time tools
-load("@python//3.11.6:defs.bzl", "interpreter")
+load("@python//3.11.9:defs.bzl", "interpreter")
 
 #####################
 # Install twine for our own runfiles wheel publishing.
@@ -94,33 +95,32 @@
 load("@rules_python//python:pip.bzl", "pip_parse")
 
 pip_parse(
-    name = "publish_deps",
+    name = "rules_python_publish_deps",
     python_interpreter_target = interpreter,
     requirements_darwin = "//tools/publish:requirements_darwin.txt",
     requirements_lock = "//tools/publish:requirements.txt",
     requirements_windows = "//tools/publish:requirements_windows.txt",
 )
 
-load("@publish_deps//:requirements.bzl", "install_deps")
+load("@rules_python_publish_deps//:requirements.bzl", "install_deps")
 
 install_deps()
 
+pip_parse(
+    name = "pypiserver",
+    python_interpreter_target = interpreter,
+    requirements_lock = "//examples/wheel:requirements_server.txt",
+)
+
+load("@pypiserver//:requirements.bzl", install_pypiserver = "install_deps")
+
+install_pypiserver()
+
 #####################
 # Install sphinx for doc generation.
 
 pip_parse(
     name = "dev_pip",
-    experimental_requirement_cycles = {
-        "sphinx": [
-            "sphinx",
-            "sphinxcontrib-serializinghtml",
-            "sphinxcontrib-qthelp",
-            "sphinxcontrib-htmlhelp",
-            "sphinxcontrib-devhelp",
-            "sphinxcontrib-applehelp",
-        ],
-    },
-    incompatible_generate_aliases = True,
     python_interpreter_target = interpreter,
     requirements_lock = "//docs/sphinx:requirements.txt",
 )
diff --git a/WORKSPACE.bzlmod b/WORKSPACE.bzlmod
new file mode 100644
index 0000000..ca89afe
--- /dev/null
+++ b/WORKSPACE.bzlmod
@@ -0,0 +1,62 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains everything that is needed when using bzlmod
+workspace(name = "rules_python")
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file")
+
+# Used for Bazel CI
+http_archive(
+    name = "bazelci_rules",
+    sha256 = "eca21884e6f66a88c358e580fd67a6b148d30ab57b1680f62a96c00f9bc6a07e",
+    strip_prefix = "bazelci_rules-1.0.0",
+    url = "https://github.com/bazelbuild/continuous-integration/releases/download/rules-1.0.0/bazelci_rules-1.0.0.tar.gz",
+)
+
+load("@bazelci_rules//:rbe_repo.bzl", "rbe_preconfig")
+
+# Creates a default toolchain config for RBE.
+# Use this as is if you are using the rbe_ubuntu16_04 container,
+# otherwise refer to RBE docs.
+rbe_preconfig(
+    name = "buildkite_config",
+    toolchain = "ubuntu1804-bazel-java11",
+)
+
+# Add gazelle plugin so that we can run the gazelle example as an e2e integration
+# test and include the distribution files.
+local_repository(
+    name = "rules_python_gazelle_plugin",
+    path = "gazelle",
+)
+
+#####################
+
+# This wheel is purely here to validate the wheel extraction code. It's not
+# intended for anything else.
+http_file(
+    name = "wheel_for_testing",
+    downloaded_file_path = "numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+    sha256 = "0d60fbae8e0019865fc4784745814cff1c421df5afee233db6d88ab4f14655a2",
+    urls = [
+        "https://files.pythonhosted.org/packages/50/67/3e966d99a07d60a21a21d7ec016e9e4c2642a86fea251ec68677daf71d4d/numpy-1.25.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+    ],
+)
+
+# rules_proto expects //external:python_headers to point at the python headers.
+bind(
+    name = "python_headers",
+    actual = "//python/cc:current_py_cc_headers",
+)
diff --git a/docs/sphinx/BUILD.bazel b/docs/sphinx/BUILD.bazel
index 8912f2c..947ebba 100644
--- a/docs/sphinx/BUILD.bazel
+++ b/docs/sphinx/BUILD.bazel
@@ -12,12 +12,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+load("@bazel_skylib//rules:write_file.bzl", "write_file")
 load("@dev_pip//:requirements.bzl", "requirement")
-load("//python:pip.bzl", "compile_pip_requirements")
+load("//python:py_binary.bzl", "py_binary")
 load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")  # buildifier: disable=bzl-visibility
 load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")  # buildifier: disable=bzl-visibility
 load("//sphinxdocs:readthedocs.bzl", "readthedocs_install")
-load("//sphinxdocs:sphinx.bzl", "sphinx_build_binary", "sphinx_docs", "sphinx_inventory")
+load("//sphinxdocs:sphinx.bzl", "sphinx_build_binary", "sphinx_docs")
 load("//sphinxdocs:sphinx_stardoc.bzl", "sphinx_stardocs")
 
 # We only build for Linux and Mac because:
@@ -30,7 +31,7 @@
     "@platforms//os:linux": [],
     "@platforms//os:macos": [],
     "//conditions:default": ["@platforms//:incompatible"],
-})
+}) if IS_BAZEL_7_OR_HIGHER else ["@platforms//:incompatible"]
 
 # See README.md for instructions. Short version:
 # * `bazel run //docs/sphinx:docs.serve` in a separate terminal
@@ -38,7 +39,6 @@
 sphinx_docs(
     name = "docs",
     srcs = [
-        ":bazel_inventory",
         ":bzl_api_docs",
     ] + glob(
         include = [
@@ -60,6 +60,7 @@
     renamed_srcs = {
         "//:CHANGELOG.md": "changelog.md",
         "//:CONTRIBUTING.md": "contributing.md",
+        "//sphinxdocs/inventories:bazel_inventory": "bazel_inventory.inv",
     },
     sphinx = ":sphinx-build",
     strip_prefix = package_name() + "/",
@@ -67,32 +68,41 @@
     target_compatible_with = _TARGET_COMPATIBLE_WITH,
 )
 
-sphinx_inventory(
-    name = "bazel_inventory",
-    src = "bazel_inventory.txt",
-)
-
 sphinx_stardocs(
     name = "bzl_api_docs",
     docs = {
-        "api/cc/py_cc_toolchain.md": dict(
+        "api/python/cc/py_cc_toolchain.md": dict(
             dep = "//python/private:py_cc_toolchain_bzl",
             input = "//python/private:py_cc_toolchain_rule.bzl",
             public_load_path = "//python/cc:py_cc_toolchain.bzl",
         ),
-        "api/cc/py_cc_toolchain_info.md": "//python/cc:py_cc_toolchain_info_bzl",
-        "api/defs.md": "//python:defs_bzl",
-        "api/entry_points/py_console_script_binary.md": "//python/entry_points:py_console_script_binary_bzl",
-        "api/packaging.md": "//python:packaging_bzl",
-        "api/pip.md": "//python:pip_bzl",
+        "api/python/cc/py_cc_toolchain_info.md": "//python/cc:py_cc_toolchain_info_bzl",
+        "api/python/defs.md": "//python:defs_bzl",
+        "api/python/entry_points/py_console_script_binary.md": "//python/entry_points:py_console_script_binary_bzl",
+        "api/python/packaging.md": "//python:packaging_bzl",
+        "api/python/pip.md": "//python:pip_bzl",
+        "api/python/private/common/py_binary_rule_bazel.md": "//python/private/common:py_binary_rule_bazel_bzl",
+        "api/python/private/common/py_library_rule_bazel.md": "//python/private/common:py_library_rule_bazel_bzl",
+        "api/python/private/common/py_runtime_rule.md": "//python/private/common:py_runtime_rule_bzl",
+        "api/python/private/common/py_test_rule_bazel.md": "//python/private/common:py_test_rule_bazel_bzl",
+        "api/python/py_binary.md": "//python:py_binary_bzl",
+        "api/python/py_cc_link_params_info.md": "//python:py_cc_link_params_info_bzl",
+        "api/python/py_library.md": "//python:py_library_bzl",
+        "api/python/py_runtime.md": "//python:py_runtime_bzl",
+        "api/python/py_runtime_info.md": "//python:py_runtime_info_bzl",
+        "api/python/py_runtime_pair.md": dict(
+            dep = "//python/private:py_runtime_pair_rule_bzl",
+            input = "//python/private:py_runtime_pair_rule.bzl",
+            public_load_path = "//python:py_runtime_pair.bzl",
+        ),
+        "api/python/py_test.md": "//python:py_test_bzl",
     } | ({
         # Bazel 6 + Stardoc isn't able to parse something about the python bzlmod extension
-        "api/extensions/python.md": "//python/extensions:python_bzl",
+        "api/python/extensions/python.md": "//python/extensions:python_bzl",
     } if IS_BAZEL_7_OR_HIGHER else {}) | ({
         # This depends on @pythons_hub, which is only created under bzlmod,
-        "api/extensions/pip.md": "//python/extensions:pip_bzl",
+        "api/python/extensions/pip.md": "//python/extensions:pip_bzl",
     } if IS_BAZEL_7_OR_HIGHER and BZLMOD_ENABLED else {}),
-    footer = "_stardoc_footer.md",
     tags = ["docs"],
     target_compatible_with = _TARGET_COMPATIBLE_WITH,
 )
@@ -111,13 +121,74 @@
         requirement("sphinx_rtd_theme"),
         requirement("myst_parser"),
         requirement("readthedocs_sphinx_ext"),
+        requirement("typing_extensions"),
+        "//sphinxdocs/src/sphinx_bzl",
     ],
 )
 
+_REQUIREMENTS_TARGET_COMPATIBLE_WITH = select({
+    "@platforms//os:linux": [],
+    "@platforms//os:macos": [],
+    "@platforms//os:windows": [],
+    "//conditions:default": ["@platforms//:incompatible"],
+}) if BZLMOD_ENABLED else ["@platforms//:incompatible"]
+
 # Run bazel run //docs/sphinx:requirements.update
-compile_pip_requirements(
+genrule(
     name = "requirements",
-    src = "pyproject.toml",
-    requirements_txt = "requirements.txt",
-    target_compatible_with = _TARGET_COMPATIBLE_WITH,
+    srcs = ["pyproject.toml"],
+    outs = ["_requirements.txt"],
+    cmd = "$(UV_BIN) pip compile " + " ".join([
+        "--custom-compile-command='bazel run //docs/sphinx:requirements.update'",
+        "--generate-hashes",
+        "--universal",
+        "--emit-index-url",
+        "--no-strip-extras",
+        "--no-build",
+        "--python=$(PYTHON3)",
+        "$<",
+        "--output-file=$@",
+        # Always try upgrading
+        "--upgrade",
+    ]),
+    tags = [
+        "local",
+        "manual",
+        "no-cache",
+    ],
+    target_compatible_with = _REQUIREMENTS_TARGET_COMPATIBLE_WITH,
+    toolchains = [
+        "//python/uv:current_toolchain",
+        "//python:current_py_toolchain",
+    ],
+)
+
+# Write a script that can be used for updating the in-tree version of the
+# requirements file
+write_file(
+    name = "gen_update_requirements",
+    out = "requirements.update.py",
+    content = [
+        "from os import environ",
+        "from pathlib import Path",
+        "from sys import stderr",
+        "",
+        'src = Path(environ["REQUIREMENTS_FILE"])',
+        'dst = Path(environ["BUILD_WORKSPACE_DIRECTORY"]) / "docs" / "sphinx" / "requirements.txt"',
+        'print(f"Writing requirements contents from {src} to {dst}", file=stderr)',
+        "dst.write_text(src.read_text())",
+        'print("Success!", file=stderr)',
+    ],
+    target_compatible_with = _REQUIREMENTS_TARGET_COMPATIBLE_WITH,
+)
+
+py_binary(
+    name = "requirements.update",
+    srcs = ["requirements.update.py"],
+    data = [":requirements"],
+    env = {
+        "REQUIREMENTS_FILE": "$(location :requirements)",
+    },
+    tags = ["manual"],
+    target_compatible_with = _REQUIREMENTS_TARGET_COMPATIBLE_WITH,
 )
diff --git a/docs/sphinx/api/python/cc/index.md b/docs/sphinx/api/python/cc/index.md
new file mode 100644
index 0000000..acaaf4f
--- /dev/null
+++ b/docs/sphinx/api/python/cc/index.md
@@ -0,0 +1,27 @@
+:::{bzl:currentfile} //python/cc:BUILD.bazel
+:::
+# //python/cc
+
+:::{bzl:target} current_py_cc_headers
+
+A convenience target that provides the Python headers. It uses toolchain
+resolution to find the headers for the Python runtime matching the interpreter
+that will be used. This basically forwards the underlying
+`cc_library(name="python_headers")` target defined in the `@python_X_Y` repo.
+
+This target provides:
+
+* `CcInfo`: The C++ information about the Python headers.
+:::
+
+:::{bzl:target} current_py_cc_libs
+
+A convenience target that provides the Python libraries. It uses toolchain
+resolution to find the libraries for the Python runtime matching the interpreter
+that will be used. This basically forwards the underlying
+`cc_library(name="libpython")` target defined in the `@python_X_Y` repo.
+
+This target provides:
+
+* `CcInfo`: The C++ information about the Python libraries.
+:::
diff --git a/docs/sphinx/api/python/config_settings/index.md b/docs/sphinx/api/python/config_settings/index.md
new file mode 100644
index 0000000..50647ab
--- /dev/null
+++ b/docs/sphinx/api/python/config_settings/index.md
@@ -0,0 +1,197 @@
+:::{default-domain} bzl
+:::
+:::{bzl:currentfile} //python/config_settings:BUILD.bazel
+:::
+
+# //python/config_settings
+
+:::{bzl:flag} python_version
+Determines the default hermetic Python toolchain version. This can be set to
+one of the values that `rules_python` maintains.
+:::
+
+::::{bzl:flag} exec_tools_toolchain
+Determines if the {obj}`exec_tools_toolchain_type` toolchain is enabled.
+
+:::{note}
+* Note that this only affects the rules_python generated toolchains.
+:::
+
+Values:
+
+* `enabled`: Allow matching of the registered toolchains at build time.
+* `disabled`: Prevent the toolchain from being matched at build time.
+
+:::{versionadded} 0.33.2
+:::
+::::
+
+::::{bzl:flag} precompile
+Determines if Python source files should be compiled at build time.
+
+:::{note}
+The flag value is overridden by the target level `precompile` attribute,
+except for the case of `force_enabled` and `forced_disabled`.
+:::
+
+Values:
+
+* `auto`: Automatically decide the effective value based on environment,
+  target platform, etc.
+* `enabled`: Compile Python source files at build time. Note that
+  {bzl:obj}`--precompile_add_to_runfiles` affects how the compiled files are included into
+  a downstream binary.
+* `disabled`: Don't compile Python source files at build time.
+* `if_generated_source`: Compile Python source files, but only if they're a
+  generated file.
+* `force_enabled`: Like `enabled`, except overrides target-level setting. This
+  is mostly useful for development, testing enabling precompilation more
+  broadly, or as an escape hatch if build-time compiling is not available.
+* `force_disabled`: Like `disabled`, except overrides target-level setting. This
+  is useful useful for development, testing enabling precompilation more
+  broadly, or as an escape hatch if build-time compiling is not available.
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} precompile_source_retention
+Determines, when a source file is compiled, if the source file is kept
+in the resulting output or not.
+
+:::{note}
+This flag is overridden by the target level `precompile_source_retention`
+attribute.
+:::
+
+Values:
+
+* `keep_source`: Include the original Python source.
+* `omit_source`: Don't include the orignal py source.
+* `omit_if_generated_source`: Keep the original source if it's a regular source
+  file, but omit it if it's a generated file.
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} precompile_add_to_runfiles
+Determines if a target adds its compiled files to its runfiles.
+
+When a target compiles its files, but doesn't add them to its own runfiles, it
+relies on a downstream target to retrieve them from
+{bzl:obj}`PyInfo.transitive_pyc_files`
+
+Values:
+* `always`: Always include the compiled files in the target's runfiles.
+* `decided_elsewhere`: Don't include the compiled files in the target's
+  runfiles; they are still added to {bzl:obj}`PyInfo.transitive_pyc_files`. See
+  also: {bzl:obj}`py_binary.pyc_collection` attribute. This is useful for allowing
+  incrementally enabling precompilation on a per-binary basis.
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} pyc_collection
+Determine if `py_binary` collects transitive pyc files.
+
+:::{note}
+This flag is overridden by the target level `pyc_collection` attribute.
+:::
+
+Values:
+* `include_pyc`: Include `PyInfo.transitive_pyc_files` as part of the binary.
+* `disabled`: Don't include `PyInfo.transitive_pyc_files` as part of the binary.
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} py_linux_libc
+Set what libc is used for the target platform. This will affect which whl binaries will be pulled and what toolchain will be auto-detected. Currently `rules_python` only supplies toolchains compatible with `glibc`.
+
+Values:
+* `glibc`: Use `glibc`, default.
+* `muslc`: Use `muslc`.
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} pip_whl
+Set what distributions are used in the `pip` integration.
+
+Values:
+* `auto`: Prefer `whl` distributions if they are compatible with a target
+  platform, but fallback to `sdist`. This is the default.
+* `only`: Only use `whl` distributions and error out if it is not available.
+* `no`: Only use `sdist` distributions. The wheels will be built non-hermetically in the `whl_library` repository rule.
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} pip_whl_osx_arch
+Set what wheel types we should prefer when building on the OSX platform.
+
+Values:
+* `arch`: Prefer architecture specific wheels.
+* `universal`: Prefer universal wheels that usually are bigger and contain binaries for both, Intel and ARM architectures in the same wheel.
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} pip_whl_glibc_version
+Set the minimum `glibc` version that the `py_binary` using `whl` distributions from a PyPI index should support.
+
+Values:
+* `""`: Select the lowest available version of each wheel giving you the maximum compatibility. This is the default.
+* `X.Y`: The string representation of a `glibc` version. The allowed values depend on the `requirements.txt` lock file contents.
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} pip_whl_muslc_version
+Set the minimum `muslc` version that the `py_binary` using `whl` distributions from a PyPI index should support.
+
+Values:
+* `""`: Select the lowest available version of each wheel giving you the maximum compatibility. This is the default.
+* `X.Y`: The string representation of a `muslc` version. The allowed values depend on the `requirements.txt` lock file contents.
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} pip_whl_osx_version
+Set the minimum `osx` version that the `py_binary` using `whl` distributions from a PyPI index should support.
+
+Values:
+* `""`: Select the lowest available version of each wheel giving you the maximum compatibility. This is the default.
+* `X.Y`: The string representation of the MacOS version. The allowed values depend on the `requirements.txt` lock file contents.
+
+:::{versionadded} 0.33.0
+:::
+::::
+
+::::{bzl:flag} bootstrap_impl
+Determine how programs implement their startup process.
+
+Values:
+* `system_python`: Use a bootstrap that requires a system Python available
+  in order to start programs. This requires
+  {obj}`PyRuntimeInfo.bootstrap_template` to be a Python program.
+* `script`: Use a bootstrap that uses an arbitrary executable script (usually a
+  shell script) instead of requiring it be a Python program.
+
+:::{note}
+The `script` bootstrap requires the toolchain to provide the `PyRuntimeInfo`
+provider from `rules_python`. This loosely translates to using Bazel 7+ with a
+toolchain created by rules_python. Most notably, WORKSPACE builds default to
+using a legacy toolchain built into Bazel itself which doesn't support the
+script bootstrap. If not available, the `system_python` bootstrap will be used
+instead.
+:::
+
+:::{seealso}
+{obj}`PyRuntimeInfo.bootstrap_template` and
+{obj}`PyRuntimeInfo.stage2_bootstrap_template`
+:::
+
+:::{versionadded} 0.33.0
+:::
+
+::::
diff --git a/docs/sphinx/api/python/index.md b/docs/sphinx/api/python/index.md
new file mode 100644
index 0000000..6c79447
--- /dev/null
+++ b/docs/sphinx/api/python/index.md
@@ -0,0 +1,36 @@
+:::{default-domain} bzl
+:::
+:::{bzl:currentfile} //python:BUILD.bazel
+:::
+
+# //python
+
+:::{bzl:target} toolchain_type
+
+Identifier for the toolchain type for the target platform.
+:::
+
+:::{bzl:target} exec_tools_toolchain_type
+
+Identifier for the toolchain type for exec tools used to build Python targets.
+:::
+
+:::{bzl:target} current_py_toolchain
+
+Helper target to resolve to the consumer's current Python toolchain. This target
+provides:
+
+* `PyRuntimeInfo`: The consuming target's target toolchain information
+
+:::
+
+::::{target} autodetecting_toolchain
+
+Legacy toolchain; despite its name, it doesn't autodetect anything.
+
+:::{deprecated} 0.34.0
+
+Use {obj}`@rules_python//python/runtime_env_toolchain:all` instead.
+:::
+::::
+
diff --git a/docs/sphinx/api/python/runtime_env_toolchains/index.md b/docs/sphinx/api/python/runtime_env_toolchains/index.md
new file mode 100644
index 0000000..ef31f08
--- /dev/null
+++ b/docs/sphinx/api/python/runtime_env_toolchains/index.md
@@ -0,0 +1,38 @@
+:::{default-domain} bzl
+:::
+:::{bzl:currentfile} //python/runtime_env_toolchain:BUILD.bazel
+:::
+
+# //python/runtime_env_toolchain
+
+::::{target} all
+
+A set of toolchains that invoke `python3` from the runtime environment.
+
+Note that this toolchain provides no build-time information, which makes it of
+limited utility. This is because the invocation of `python3` is done when a
+program is run, not at build time.
+
+This is only provided to aid migration off the builtin Bazel toolchain 
+(`@bazel_tools//python:autodetecting_toolchain`), and is largely only applicable
+to WORKSPACE builds.
+
+To use this target, register it as a toolchain in WORKSPACE or MODULE.bazel:
+
+:::
+register_toolchains("@rules_python//python/runtime_env_toolchains:all")
+:::
+
+The benefit of this target over the legacy targets is this defines additional
+toolchain types that rules_python needs. This prevents toolchain resolution from
+continuing to search elsewhere (e.g. potentially incurring a download of the
+hermetic runtimes when they won't be used).
+
+:::{deprecated} 0.34.0
+
+Switch to using a hermetic toolchain or manual toolchain configuration instead.
+:::
+
+:::{versionadded} 0.34.0
+:::
+::::
diff --git a/docs/sphinx/api/tools/precompiler/index.md b/docs/sphinx/api/tools/precompiler/index.md
new file mode 100644
index 0000000..1a47651
--- /dev/null
+++ b/docs/sphinx/api/tools/precompiler/index.md
@@ -0,0 +1,15 @@
+:::{bzl:currentfile} //tools/precompiler:BUILD.bazel
+:::
+
+# //tools/precompiler
+
+:::{bzl:flag} execution_requirements
+Determines the execution requirements `//tools/precompiler:precompiler` uses.
+
+This is a repeatable string_list flag. The values are `key=value` entries, each
+of which are added to the execution requirements for the `PyCompile` action to
+generate pyc files.
+
+Customizing this flag mostly allows controlling whether Bazel runs the
+precompiler as a regular worker, persistent worker, or regular action.
+:::
diff --git a/docs/sphinx/bazel_inventory.txt b/docs/sphinx/bazel_inventory.txt
deleted file mode 100644
index 869e66a..0000000
--- a/docs/sphinx/bazel_inventory.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-# Sphinx inventory version 2
-# Project: Bazel
-# Version: 7.0.0
-# The remainder of this file is compressed using zlib
-Action bzl:obj 1 rules/lib/Action -
-File bzl:obj 1 rules/lib/File -
-Label bzl:obj 1 rules/lib/Label -
-Target bzl:obj 1 rules/lib/builtins/Target -
-bool bzl:obj 1 rules/lib/bool -
-depset bzl:obj 1 rules/lib/depset -
-dict bzl:obj 1 rules/lib/dict -
-label bzl:doc 1 concepts/labels -
-list bzl:obj: 1 rules/lib/list -
-python bzl:doc 1 reference/be/python -
-str bzl:obj 1 rules/lib/string -
-struct bzl:obj 1 rules/lib/builtins/struct -
-target-name bzl:doc 1 concepts/labels#target-names -
diff --git a/docs/sphinx/conf.py b/docs/sphinx/conf.py
index e9af97a..b315577 100644
--- a/docs/sphinx/conf.py
+++ b/docs/sphinx/conf.py
@@ -27,6 +27,7 @@
     "sphinx.ext.intersphinx",
     "myst_parser",
     "sphinx_rtd_theme",  # Necessary to get jquery to make flyout work
+    "sphinx_bzl.bzl",
 ]
 
 # Adapted from the template code:
@@ -89,6 +90,10 @@
 
 myst_substitutions = {}
 
+# --- sphinx_stardoc configuration
+
+bzl_default_repository_name = "@rules_python"
+
 # -- Options for HTML output
 # See https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
 # For additional html settings
diff --git a/docs/sphinx/environment-variables.md b/docs/sphinx/environment-variables.md
new file mode 100644
index 0000000..2a00529
--- /dev/null
+++ b/docs/sphinx/environment-variables.md
@@ -0,0 +1,48 @@
+# Environment Variables
+
+:::{envvar} RULES_PYTHON_REPO_DEBUG
+
+When `1`, repository rules will print debug information about what they're
+doing. This is mostly useful for development to debug errors.
+:::
+
+:::{envvar} RULES_PYTHON_REPO_DEBUG_VERBOSITY
+
+Determines the verbosity of logging output for repo rules. Valid values:
+
+* `DEBUG`
+* `INFO`
+* `TRACE`
+:::
+
+:::{envvar} RULES_PYTHON_PIP_ISOLATED
+
+Determines if `--isolated` is used with pip.
+
+Valid values:
+* `0` and `false` mean to not use isolated mode
+* Other non-empty values mean to use isolated mode.
+:::
+
+:::{envvar} RULES_PYTHON_BZLMOD_DEBUG
+
+When `1`, bzlmod extensions will print debug information about what they're
+doing. This is mostly useful for development to debug errors.
+:::
+
+:::{envvar} RULES_PYTHON_ENABLE_PYSTAR
+
+When `1`, the rules_python Starlark implementation of the core rules is used
+instead of the Bazel-builtin rules. Note this requires Bazel 7+.
+:::
+
+:::{envvar} RULES_PYTHON_BOOTSTRAP_VERBOSE
+
+When `1`, debug information about bootstrapping of a program is printed to
+stderr.
+:::
+
+:::{envvar} VERBOSE_COVERAGE
+
+When `1`, debug information about coverage behavior is printed to stderr.
+:::
diff --git a/docs/sphinx/getting-started.md b/docs/sphinx/getting-started.md
index d7542fa..45d1962 100644
--- a/docs/sphinx/getting-started.md
+++ b/docs/sphinx/getting-started.md
@@ -1,13 +1,18 @@
 # Getting started
 
-The following two sections cover using `rules_python` with bzlmod and
-the older way of configuring bazel with a `WORKSPACE` file.
+This doc is a simplified guide to help get started started quickly. It provides
+a simplified introduction to having a working Python program for both bzlmod
+and the older way of using `WORKSPACE`.
 
+It assumes you have a `requirements.txt` file with your PyPI dependencies.
+
+For more details information about configuring `rules_python`, see:
+* [Configuring the runtime](toolchains)
+* [Configuring third party dependencies (pip/pypi)](pypi-dependencies)
+* [API docs](api/index)
 
 ## Using bzlmod
 
-**IMPORTANT: bzlmod support is still in Beta; APIs are subject to change.**
-
 The first step to using rules_python with bzlmod is to add the dependency to
 your MODULE.bazel file:
 
@@ -15,99 +20,20 @@
 # Update the version "0.0.0" to the release found here:
 # https://github.com/bazelbuild/rules_python/releases.
 bazel_dep(name = "rules_python", version = "0.0.0")
-```
 
-Once added, you can load the rules and use them:
-
-```starlark
-load("@rules_python//python:py_binary.bzl", "py_binary")
-
-py_binary(...)
-```
-
-Depending on what you're doing, you likely want to do some additional
-configuration to control what Python version is used; read the following
-sections for how to do that.
-
-### Toolchain registration with bzlmod
-
-A default toolchain is automatically configured depending on
-`rules_python`. Note, however, the version used tracks the most recent Python
-release and will change often.
-
-If you want to use a specific Python version for your programs, then how
-to do so depends on if you're configuring the root module or not. The root
-module is special because it can set the *default* Python version, which
-is used by the version-unaware rules (e.g. `//python:py_binary.bzl` et al). For
-submodules, it's recommended to use the version-aware rules to pin your programs
-to a specific Python version so they don't accidentally run with a different
-version configured by the root module.
-
-#### Configuring and using the default Python version
-
-To specify what the default Python version is, set `is_default = True` when
-calling `python.toolchain()`. This can only be done by the root module; it is
-silently ignored if a submodule does it. Similarly, using the version-unaware
-rules (which always use the default Python version) should only be done by the
-root module. If submodules use them, then they may run with a different Python
-version than they expect.
-
-```starlark
-python = use_extension("@rules_python//python/extensions:python.bzl", "python")
-
-python.toolchain(
+pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
+pip.parse(
+    hub_name = "my_deps",
     python_version = "3.11",
-    is_default = True,
+    requirements_lock = "//:requirements.txt",
 )
+use_repo(pip, "my_deps")
 ```
 
-Then use the base rules from e.g. `//python:py_binary.bzl`.
-
-#### Pinning to a Python version
-
-Pinning to a version allows targets to force that a specific Python version is
-used, even if the root module configures a different version as a default. This
-is most useful for two cases:
-
-1. For submodules to ensure they run with the appropriate Python version
-2. To allow incremental, per-target, upgrading to newer Python versions,
-   typically in a mono-repo situation.
-
-To configure a submodule with the version-aware rules, request the particular
-version you need, then use the `@python_versions` repo to use the rules that
-force specific versions:
-
-```starlark
-python = use_extension("@rules_python//python/extensions:python.bzl", "python")
-
-python.toolchain(
-    python_version = "3.11",
-)
-use_repo(python, "python_versions")
-```
-
-Then use e.g. `load("@python_versions//3.11:defs.bzl", "py_binary")` to use
-the rules that force that particular version. Multiple versions can be specified
-and use within a single build.
-
-For more documentation, see the bzlmod examples under the {gh-path}`examples`
-folder.  Look for the examples that contain a `MODULE.bazel` file.
-
-#### Other toolchain details
-
-The `python.toolchain()` call makes its contents available under a repo named
-`python_X_Y`, where X and Y are the major and minor versions. For example,
-`python.toolchain(python_version="3.11")` creates the repo `@python_3_11`.
-Remember to call `use_repo()` to make repos visible to your module:
-`use_repo(python, "python_3_11")`
-
 ## Using a WORKSPACE file
 
-To import rules_python in your project, you first need to add it to your
-`WORKSPACE` file, using the snippet provided in the
-[release you choose](https://github.com/bazelbuild/rules_python/releases)
-
-To depend on a particular unreleased version, you can do the following:
+Using WORKSPACE is deprecated, but still supported, and a bit more involved than
+using Bzlmod. Here is a simplified setup to download the prebuilt runtimes.
 
 ```starlark
 load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
@@ -130,13 +56,7 @@
 load("@rules_python//python:repositories.bzl", "py_repositories")
 
 py_repositories()
-```
 
-### Toolchain registration
-
-To register a hermetic Python toolchain rather than rely on a system-installed interpreter for runtime execution, you can add to the `WORKSPACE` file:
-
-```starlark
 load("@rules_python//python:repositories.bzl", "python_register_toolchains")
 
 python_register_toolchains(
@@ -157,19 +77,10 @@
 )
 ```
 
-After registration, your Python targets will use the toolchain's interpreter during execution, but a system-installed interpreter
-is still used to 'bootstrap' Python targets (see https://github.com/bazelbuild/rules_python/issues/691).
-You may also find some quirks while using this toolchain. Please refer to [python-build-standalone documentation's _Quirks_ section](https://python-build-standalone.readthedocs.io/en/latest/quirks.html).
-
-## Toolchain usage in other rules
-
-Python toolchains can be utilized in other bazel rules, such as `genrule()`, by adding the `toolchains=["@rules_python//python:current_py_toolchain"]` attribute. You can obtain the path to the Python interpreter using the `$(PYTHON2)` and `$(PYTHON3)` ["Make" Variables](https://bazel.build/reference/be/make-variables). See the
-{gh-path}`test_current_py_toolchain <tests/load_from_macro/BUILD.bazel>` target for an example.
-
 ## "Hello World"
 
-Once you've imported the rule set into your `WORKSPACE` using any of these
-methods, you can then load the core rules in your `BUILD` files with the following:
+Once you've imported the rule set using either Bzlmod or WORKSPACE, you can then
+load the core rules in your `BUILD` files with the following:
 
 ```starlark
 load("@rules_python//python:defs.bzl", "py_binary")
@@ -177,5 +88,9 @@
 py_binary(
   name = "main",
   srcs = ["main.py"],
+  deps = [
+      "@my_deps//foo",
+      "@my_deps//bar",
+  ]
 )
 ```
diff --git a/docs/sphinx/glossary.md b/docs/sphinx/glossary.md
index f54034d..9afbcff 100644
--- a/docs/sphinx/glossary.md
+++ b/docs/sphinx/glossary.md
@@ -7,6 +7,30 @@
   [Common attributes](https://bazel.build/reference/be/common-definitions#common-attributes)
   for a complete listing
 
+in-build runtime
+: An in-build runtime is one where the Python runtime, and all its files, are
+known to the build system and a Python binary includes all the necessary parts
+of the runtime in its runfiles. Such runtimes may be remotely downloaded, part
+of your source control, or mapped in from local files by repositories.
+
+The main advantage of in-build runtimes is they ensure you know what Python
+runtime will be used, since it's part of the build itself and included in
+the resulting binary. The main disadvantage is the additional work it adds to
+building. The whole Python runtime is included in a Python binary's runfiles,
+which can be a significant number of files.
+
+platform runtime
+: A platform runtime is a Python runtime that is assumed to be installed on the
+system where a Python binary runs, whereever that may be. For example, using `/usr/bin/python3`
+as the interpreter is a platform runtime -- it assumes that, wherever the binary
+runs (your local machine, a remote worker, within a container, etc), that path
+is available. Such runtimes are _not_ part of a binary's runfiles.
+
+The main advantage of platform runtimes is they are lightweight insofar as
+building the binary is concerned. All Bazel has to do is pass along a string
+path to the interpreter. The disadvantage is, if you don't control the systems
+being run on, you may get different Python installations than expected.
+
 rule callable
 : A function that behaves like a rule. This includes, but is not is not
   limited to:
@@ -26,3 +50,4 @@
 nonconfigurable
 : A nonconfigurable value cannot use `select`. See Bazel's
   [configurable attributes](https://bazel.build/reference/be/common-definitions#configurable-attributes) documentation.
+
diff --git a/docs/sphinx/index.md b/docs/sphinx/index.md
index 0a9c70f..8405eac 100644
--- a/docs/sphinx/index.md
+++ b/docs/sphinx/index.md
@@ -57,13 +57,16 @@
 self
 getting-started
 pypi-dependencies
+toolchains
 pip
 coverage
+precompiling
 gazelle
 Contributing <contributing>
 support
 Changelog <changelog>
 api/index
+environment-variables
 glossary
 genindex
 ```
diff --git a/docs/sphinx/pip.md b/docs/sphinx/pip.md
index 34248d2..43d8fc4 100644
--- a/docs/sphinx/pip.md
+++ b/docs/sphinx/pip.md
@@ -1,84 +1,4 @@
 (pip-integration)=
 # Pip Integration
 
-To pull in dependencies from PyPI, the `pip_parse` function is used, which
-invokes `pip` to download and install dependencies from PyPI.
-
-In your WORKSPACE file:
-
-```starlark
-load("@rules_python//python:pip.bzl", "pip_parse")
-
-pip_parse(
-    name = "pip_deps",
-    requirements_lock = ":requirements.txt",
-)
-
-load("@pip_deps//:requirements.bzl", "install_deps")
-
-install_deps()
-```
-
-You can then reference installed dependencies from a `BUILD` file with:
-
-```starlark
-load("@pip_deps//:requirements.bzl", "requirement")
-
-py_library(
-    name = "bar",
-    ...
-    deps = [
-        "//my/other:dep",
-        requirement("requests"),
-        requirement("numpy"),
-    ],
-)
-```
-
-In addition to the `requirement` macro, which is used to access the generated `py_library`
-target generated from a package's wheel, The generated `requirements.bzl` file contains
-functionality for exposing [entry points][whl_ep] as `py_binary` targets as well.
-
-[whl_ep]: https://packaging.python.org/specifications/entry-points/
-
-```starlark
-load("@pip_deps//:requirements.bzl", "entry_point")
-
-alias(
-    name = "pip-compile",
-    actual = entry_point(
-        pkg = "pip-tools",
-        script = "pip-compile",
-    ),
-)
-```
-
-Note that for packages whose name and script are the same, only the name of the package
-is needed when calling the `entry_point` macro.
-
-```starlark
-load("@pip_deps//:requirements.bzl", "entry_point")
-
-alias(
-    name = "flake8",
-    actual = entry_point("flake8"),
-)
-```
-
-(vendoring-requirements)=
-## Vendoring the requirements.bzl file
-
-In some cases you may not want to generate the requirements.bzl file as a repository rule
-while Bazel is fetching dependencies. For example, if you produce a reusable Bazel module
-such as a ruleset, you may want to include the requirements.bzl file rather than make your users
-install the WORKSPACE setup to generate it.
-See https://github.com/bazelbuild/rules_python/issues/608
-
-This is the same workflow as Gazelle, which creates `go_repository` rules with
-[`update-repos`](https://github.com/bazelbuild/bazel-gazelle#update-repos)
-
-To do this, use the "write to source file" pattern documented in
-https://blog.aspect.dev/bazel-can-write-to-the-source-folder
-to put a copy of the generated requirements.bzl into your project.
-Then load the requirements.bzl file directly rather than from the generated repository.
-See the example in rules_python/examples/pip_parse_vendored.
+See [PyPI dependencies](./pypi-dependencies).
diff --git a/docs/sphinx/precompiling.md b/docs/sphinx/precompiling.md
new file mode 100644
index 0000000..52678e6
--- /dev/null
+++ b/docs/sphinx/precompiling.md
@@ -0,0 +1,94 @@
+# Precompiling
+
+Precompiling is compiling Python source files (`.py` files) into byte code
+(`.pyc` files) at build time instead of runtime. Doing it at build time can
+improve performance by skipping that work at runtime.
+
+Precompiling is disabled by default, so you must enable it using flags or
+attributes to use it.
+
+## Overhead of precompiling
+
+While precompiling helps runtime performance, it has two main costs:
+1. Increasing the size (count and disk usage) of runfiles. It approximately
+   double the count of the runfiles because for every `.py` file, there is also
+   a `.pyc` file. Compiled files are generally around the same size as the
+   source files, so it approximately doubles the disk usage.
+2. Precompiling requires running an extra action at build time. While
+   compiling itself isn't that expensive, the overhead can become noticable
+   as more files need to be compiled.
+
+## Binary-level opt-in
+
+Because of the costs of precompiling, it may not be feasible to globally enable it
+for your repo for everything. For example, some binaries may be
+particularly large, and doubling the number of runfiles isn't doable.
+
+If this is the case, there's an alternative way to more selectively and
+incrementally control precompiling on a per-binry basis.
+
+To use this approach, the two basic steps are:
+1. Disable pyc files from being automatically added to runfiles:
+   {bzl:obj}`--@rules_python//python/config_settings:precompile_add_to_runfiles=decided_elsewhere`,
+2. Set the `pyc_collection` attribute on the binaries/tests that should or should
+   not use precompiling.
+
+The default for the `pyc_collection` attribute is controlled by the flag
+{bzl:obj}`--@rules_python//python/config_settings:pyc_collection`, so you
+can use an opt-in or opt-out approach by setting its value:
+* targets must opt-out: `--@rules_python//python/config_settings:pyc_collection=include_pyc`
+* targets must opt-in: `--@rules_python//python/config_settings:pyc_collection=disabled`
+
+## Advanced precompiler customization
+
+The default implementation of the precompiler is a persistent, multiplexed,
+sandbox-aware, cancellation-enabled, json-protocol worker that uses the same
+interpreter as the target toolchain. This works well for local builds, but may
+not work as well for remote execution builds. To customize the precompiler, two
+mechanisms are available:
+
+* The exec tools toolchain allows customizing the precompiler binary used with
+  the `precompiler` attribute. Arbitrary binaries are supported.
+* The execution requirements can be customized using
+  `--@rules_python//tools/precompiler:execution_requirements`. This is a list
+  flag that can be repeated. Each entry is a key=value that is added to the
+  execution requirements of the `PyCompile` action. Note that this flag
+  is specific to the rules_python precompiler. If a custom binary is used,
+  this flag will have to be propagated from the custom binary using the
+  `testing.ExecutionInfo` provider; refer to the `py_interpreter_program` an
+
+The default precompiler implementation is an asynchronous/concurrent
+implementation. If you find it has bugs or hangs, please report them. In the
+meantime, the flag `--worker_extra_flag=PyCompile=--worker_impl=serial` can
+be used to switch to a synchronous/serial implementation that may not perform
+as well, but is less likely to have issues.
+
+The `execution_requirements` keys of most relevance are:
+* `supports-workers`: 1 or 0, to indicate if a regular persistent worker is
+  desired.
+* `supports-multiplex-workers`: 1 o 0, to indicate if a multiplexed persistent
+  worker is desired.
+* `requires-worker-protocol`: json or proto; the rules_python precompiler
+  currently only supports json.
+* `supports-multiplex-sandboxing`: 1 or 0, to indicate if sanboxing is of the
+  worker is supported.
+* `supports-worker-cancellation`: 1 or 1, to indicate if requests to the worker
+  can be cancelled.
+
+Note that any execution requirements values can be specified in the flag.
+
+## Known issues, caveats, and idiosyncracies
+
+* Precompiling requires Bazel 7+ with the Pystar rule implementation enabled.
+* Mixing rules_python PyInfo with Bazel builtin PyInfo will result in pyc files
+  being dropped.
+* Precompiled files may not be used in certain cases prior to Python 3.11. This
+  occurs due Python adding the directory of the binary's main `.py` file, which
+  causes the module to be found in the workspace source directory instead of
+  within the binary's runfiles directory (where the pyc files are). This can
+  usually be worked around by removing `sys.path[0]` (or otherwise ensuring the
+  runfiles directory comes before the repos source directory in `sys.path`).
+* The pyc filename does not include the optimization level (e.g.
+  `foo.cpython-39.opt-2.pyc`). This works fine (it's all byte code), but also
+  means the interpreter `-O` argument can't be used -- doing so will cause the
+  interpreter to look for the non-existent `opt-N` named files.
diff --git a/docs/sphinx/pypi-dependencies.md b/docs/sphinx/pypi-dependencies.md
index 880945f..db017d2 100644
--- a/docs/sphinx/pypi-dependencies.md
+++ b/docs/sphinx/pypi-dependencies.md
@@ -1,3 +1,6 @@
+:::{default-domain} bzl
+:::
+
 # Using dependencies from PyPI
 
 Using PyPI packages (aka "pip install") involves two main steps.
@@ -25,7 +28,21 @@
 use_repo(pip, "my_deps")
 ```
 For more documentation, including how the rules can update/create a requirements
-file, see the bzlmod examples under the {gh-path}`examples` folder.
+file, see the bzlmod examples under the {gh-path}`examples` folder or the documentation
+for the {obj}`@rules_python//python/extensions:pip.bzl` extension.
+
+```{note}
+We are using a host-platform compatible toolchain by default to setup pip dependencies.
+During the setup phase, we create some symlinks, which may be inefficient on Windows
+by default. In that case use the following `.bazelrc` options to improve performance if
+you have admin privileges:
+
+    startup --windows_enable_symlinks
+
+This will enable symlinks on Windows and help with bootstrap performance of setting up the 
+hermetic host python interpreter on this platform. Linux and OSX users should see no
+difference.
+```
 
 ### Using a WORKSPACE file
 
@@ -47,16 +64,67 @@
 install_deps()
 ```
 
+(vendoring-requirements)=
+#### Vendoring the requirements.bzl file
+
+In some cases you may not want to generate the requirements.bzl file as a repository rule
+while Bazel is fetching dependencies. For example, if you produce a reusable Bazel module
+such as a ruleset, you may want to include the requirements.bzl file rather than make your users
+install the WORKSPACE setup to generate it.
+See https://github.com/bazelbuild/rules_python/issues/608
+
+This is the same workflow as Gazelle, which creates `go_repository` rules with
+[`update-repos`](https://github.com/bazelbuild/bazel-gazelle#update-repos)
+
+To do this, use the "write to source file" pattern documented in
+https://blog.aspect.dev/bazel-can-write-to-the-source-folder
+to put a copy of the generated requirements.bzl into your project.
+Then load the requirements.bzl file directly rather than from the generated repository.
+See the example in rules_python/examples/pip_parse_vendored.
+
+(per-os-arch-requirements)=
+### Requirements for a specific OS/Architecture
+
+In some cases you may need to use different requirements files for different OS, Arch combinations. This is enabled via the `requirements_by_platform` attribute in `pip.parse` extension and the `pip_parse` repository rule. The keys of the dictionary are labels to the file and the values are a list of comma separated target (os, arch) tuples.
+
+For example:
+```starlark
+    # ...
+    requirements_by_platform = {
+        "requirements_linux_x86_64.txt": "linux_x86_64",
+        "requirements_osx.txt": "osx_*",
+        "requirements_linux_exotic.txt": "linux_exotic",
+        "requirements_some_platforms.txt": "linux_aarch64,windows_*",
+    },
+    # For the list of standard platforms that the rules_python has toolchains for, default to
+    # the following requirements file.
+    requirements_lock = "requirements_lock.txt",
+```
+
+In case of duplicate platforms, `rules_python` will raise an error as there has
+to be unambiguous mapping of the requirement files to the (os, arch) tuples.
+
+An alternative way is to use per-OS requirement attributes.
+```starlark
+    # ...
+    requirements_windows = "requirements_windows.txt",
+    requirements_darwin = "requirements_darwin.txt",
+    # For the remaining platforms (which is basically only linux OS), use this file.
+    requirements_lock = "requirements_lock.txt",
+)
+```
+
 ### pip rules
 
-Note that since `pip_parse` is a repository rule and therefore executes pip at
-WORKSPACE-evaluation time, Bazel has no information about the Python toolchain
-and cannot enforce that the interpreter used to invoke pip matches the
-interpreter used to run `py_binary` targets. By default, `pip_parse` uses the
-system command `"python3"`. To override this, pass in the `python_interpreter`
-attribute or `python_interpreter_target` attribute to `pip_parse`.
+Note that since `pip_parse` and `pip.parse` are executed at evaluation time,
+Bazel has no information about the Python toolchain and cannot enforce that the
+interpreter used to invoke `pip` matches the interpreter used to run
+`py_binary` targets. By default, `pip_parse` uses the system command
+`"python3"`. To override this, pass in the `python_interpreter` attribute or
+`python_interpreter_target` attribute to `pip_parse`. The `pip.parse` `bzlmod` extension
+by default uses the hermetic python toolchain for the host platform.
 
-You can have multiple `pip_parse`s in the same workspace.  Or use the pip
+You can have multiple `pip_parse`s in the same workspace, or use the pip
 extension multiple times when using bzlmod. This configuration will create
 multiple external repos that have no relation to one another and may result in
 downloading the same wheels numerous times.
@@ -99,7 +167,7 @@
 labels directly instead. For `pip_parse`, the labels are of the following form:
 
 ```starlark
-@{name}_{package}//:pkg
+@{name}//{package}
 ```
 
 Here `name` is the `name` attribute that was passed to `pip_parse` and
@@ -109,30 +177,67 @@
 buildozer command:
 
 ```shell
-buildozer 'substitute deps @old_([^/]+)//:pkg @new_${1}//:pkg' //...:*
+buildozer 'substitute deps @old//([^/]+) @new//${1}' //...:*
 ```
 
 [requirements-drawbacks]: https://github.com/bazelbuild/rules_python/issues/414
 
+### Entry points
+
+If you would like to access [entry points][whl_ep], see the `py_console_script_binary` rule documentation,
+which can help you create a `py_binary` target for a particular console script exposed by a package.
+
+[whl_ep]: https://packaging.python.org/specifications/entry-points/
+
 ### 'Extras' dependencies
 
 Any 'extras' specified in the requirements lock file will be automatically added
 as transitive dependencies of the package. In the example above, you'd just put
-`requirement("useful_dep")`.
+`requirement("useful_dep")` or `@pypi//useful_dep`.
 
-### Packaging cycles
+### Consuming Wheel Dists Directly
 
-Sometimes PyPi packages contain dependency cycles -- for instance `sphinx`
-depends on `sphinxcontrib-serializinghtml`. When using them as `requirement()`s,
-ala
+If you need to depend on the wheel dists themselves, for instance, to pass them
+to some other packaging tool, you can get a handle to them with the
+`whl_requirement` macro. For example:
+
+```starlark
+load("@pypi//:requirements.bzl", "whl_requirement")
+
+filegroup(
+    name = "whl_files",
+    data = [
+        # This is equivalent to "@pypi//boto3:whl"
+        whl_requirement("boto3"),
+    ]
+)
+```
+
+### Creating a filegroup of files within a whl
+
+The rule {obj}`whl_filegroup` exists as an easy way to extract the necessary files
+from a whl file without the need to modify the `BUILD.bazel` contents of the
+whl repositories generated via `pip_repository`. Use it similarly to the `filegroup`
+above. See the API docs for more information.
+
+(advance-topics)=
+## Advanced topics
+
+(circular-deps)=
+### Circular dependencies
+
+Sometimes PyPi packages contain dependency cycles -- for instance a particular
+version `sphinx` (this is no longer the case in the latest version as of
+2024-06-02) depends on `sphinxcontrib-serializinghtml`. When using them as
+`requirement()`s, ala
 
 ```
 py_binary(
-  name = "doctool",
-  ...
-  deps = [
-    requirement("sphinx"),
-   ]
+    name = "doctool",
+    ...
+    deps = [
+        requirement("sphinx"),
+    ],
 )
 ```
 
@@ -154,15 +259,15 @@
 transparently fix the cycles for you and provide the cyclic dependencies
 simultaneously.
 
-```
+```starlark
 pip_parse(
-  ...
-  experimental_requirement_cycles = {
-    "sphinx": [
-      "sphinx",
-      "sphinxcontrib-serializinghtml",
-    ]
-  },
+    ...
+    experimental_requirement_cycles = {
+        "sphinx": [
+            "sphinx",
+            "sphinxcontrib-serializinghtml",
+        ]
+    },
 )
 ```
 
@@ -171,17 +276,17 @@
 of its optional dependencies, which means those optional dependencies must all
 be a part of the `airflow` cycle. For instance --
 
-```
+```starlark
 pip_parse(
-  ...
-  experimental_requirement_cycles = {
-    "airflow": [
-      "apache-airflow",
-      "apache-airflow-providers-common-sql",
-      "apache-airflow-providers-postgres",
-      "apache-airflow-providers-sqlite",
-    ]
-  }
+    ...
+    experimental_requirement_cycles = {
+        "airflow": [
+            "apache-airflow",
+            "apache-airflow-providers-common-sql",
+            "apache-airflow-providers-postgres",
+            "apache-airflow-providers-sqlite",
+        ]
+    }
 )
 ```
 
@@ -201,17 +306,98 @@
 `apache-airflow-providers-postgres` not explicitly depend on `apache-airflow` or
 perhaps `apache-airflow-providers-common-sql`.
 
-## Consuming Wheel Dists Directly
 
-If you need to depend on the wheel dists themselves, for instance, to pass them
-to some other packaging tool, you can get a handle to them with the
-`whl_requirement` macro. For example:
+(bazel-downloader)=
+### Bazel downloader and multi-platform wheel hub repository.
 
-```starlark
-filegroup(
-    name = "whl_files",
-    data = [
-        whl_requirement("boto3"),
-    ]
-)
+The `bzlmod` `pip.parse` call supports pulling information from `PyPI` (or a
+compatible mirror) and it will ensure that the [bazel
+downloader][bazel_downloader] is used for downloading the wheels. This allows
+the users to use the [credential helper](#credential-helper) to authenticate
+with the mirror and it also ensures that the distribution downloads are cached.
+It also avoids using `pip` altogether and results in much faster dependency
+fetching.
+
+This can be enabled by `experimental_index_url` and related flags as shown in
+the {gh-path}`examples/bzlmod/MODULE.bazel` example.
+
+When using this feature during the `pip` extension evaluation you will see the accessed indexes similar to below:
+```console
+Loading: 0 packages loaded
+    currently loading: docs/sphinx
+    Fetching module extension pip in @@//python/extensions:pip.bzl; starting
+    Fetching https://pypi.org/simple/twine/
 ```
+
+This does not mean that `rules_python` is fetching the wheels eagerly, but it
+rather means that it is calling the PyPI server to get the Simple API response
+to get the list of all available source and wheel distributions. Once it has
+got all of the available distributions, it will select the right ones depending
+on the `sha256` values in your `requirements_lock.txt` file. The compatible
+distribution URLs will be then written to the `MODULE.bazel.lock` file. Currently
+users wishing to use the lock file with `rules_python` with this feature have
+to set an environment variable `RULES_PYTHON_OS_ARCH_LOCK_FILE=0` which will
+become default in the next release.
+
+Fetching the distribution information from the PyPI allows `rules_python` to
+know which `whl` should be used on which target platform and it will determine
+that by parsing the `whl` filename based on [PEP600], [PEP656] standards. This
+allows the user to configure the behaviour by using the following publicly
+available flags:
+* {obj}`--@rules_python//python/config_settings:py_linux_libc` for selecting the Linux libc variant.
+* {obj}`--@rules_python//python/config_settings:pip_whl` for selecting `whl` distribution preference.
+* {obj}`--@rules_python//python/config_settings:pip_whl_osx_arch` for selecting MacOS wheel preference.
+* {obj}`--@rules_python//python/config_settings:pip_whl_glibc_version` for selecting the GLIBC version compatibility.
+* {obj}`--@rules_python//python/config_settings:pip_whl_muslc_version` for selecting the musl version compatibility.
+* {obj}`--@rules_python//python/config_settings:pip_whl_osx_version` for selecting MacOS version compatibility.
+
+[bazel_downloader]: https://bazel.build/rules/lib/builtins/repository_ctx#download
+[pep600]: https://peps.python.org/pep-0600/
+[pep656]: https://peps.python.org/pep-0656/
+
+(credential-helper)=
+### Credential Helper
+
+The "use Bazel downloader for python wheels" experimental feature includes support for the Bazel
+[Credential Helper][cred-helper-design].
+
+Your python artifact registry may provide a credential helper for you. Refer to your index's docs
+to see if one is provided.
+
+See the [Credential Helper Spec][cred-helper-spec] for details.
+
+[cred-helper-design]: https://github.com/bazelbuild/proposals/blob/main/designs/2022-06-07-bazel-credential-helpers.md
+[cred-helper-spec]: https://github.com/EngFlow/credential-helper-spec/blob/main/spec.md
+
+
+#### Basic Example:
+
+The simplest form of a credential helper is a bash script that accepts an arg and spits out JSON to
+stdout. For a service like Google Artifact Registry that uses ['Basic' HTTP Auth][rfc7617] and does
+not provide a credential helper that conforms to the [spec][cred-helper-spec], the script might
+look like:
+
+```bash
+#!/bin/bash
+# cred_helper.sh
+ARG=$1  # but we don't do anything with it as it's always "get"
+
+# formatting is optional
+echo '{'
+echo '  "headers": {'
+echo '    "Authorization": ["Basic dGVzdDoxMjPCow=="]'
+echo '  }'
+echo '}'
+```
+
+Configure Bazel to use this credential helper for your python index `example.com`:
+
+```
+# .bazelrc
+build --credential_helper=example.com=/full/path/to/cred_helper.sh
+```
+
+Bazel will call this file like `cred_helper.sh get` and use the returned JSON to inject headers
+into whatever HTTP(S) request it performs against `example.com`.
+
+[rfc7617]: https://datatracker.ietf.org/doc/html/rfc7617
diff --git a/docs/sphinx/pyproject.toml b/docs/sphinx/pyproject.toml
index d36c9f2..03279c5 100644
--- a/docs/sphinx/pyproject.toml
+++ b/docs/sphinx/pyproject.toml
@@ -10,4 +10,5 @@
     "sphinx_rtd_theme",
     "readthedocs-sphinx-ext",
     "absl-py",
+    "typing-extensions"
 ]
diff --git a/docs/sphinx/requirements.txt b/docs/sphinx/requirements.txt
index 85c61f3..e0d3bba 100644
--- a/docs/sphinx/requirements.txt
+++ b/docs/sphinx/requirements.txt
@@ -1,24 +1,22 @@
-#
-# This file is autogenerated by pip-compile with Python 3.11
-# by the following command:
-#
+# This file was autogenerated by uv via the following command:
 #    bazel run //docs/sphinx:requirements.update
-#
-absl-py==2.0.0 \
-    --hash=sha256:9a28abb62774ae4e8edbe2dd4c49ffcd45a6a848952a5eccc6a49f3f0fc1e2f3 \
-    --hash=sha256:d9690211c5fcfefcdd1a45470ac2b5c5acd45241c3af71eed96bc5441746c0d5
+--index-url https://pypi.org/simple
+
+absl-py==2.1.0 \
+    --hash=sha256:526a04eadab8b4ee719ce68f204172ead1027549089702d99b9059f129ff1308 \
+    --hash=sha256:7820790efbb316739cde8b4e19357243fc3608a152024288513dd968d7d959ff
     # via rules-python-docs (docs/sphinx/pyproject.toml)
-alabaster==0.7.13 \
-    --hash=sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3 \
-    --hash=sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2
+alabaster==0.7.16 \
+    --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \
+    --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92
     # via sphinx
-babel==2.13.1 \
-    --hash=sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900 \
-    --hash=sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed
+babel==2.15.0 \
+    --hash=sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb \
+    --hash=sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413
     # via sphinx
-certifi==2023.11.17 \
-    --hash=sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1 \
-    --hash=sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474
+certifi==2024.7.4 \
+    --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \
+    --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90
     # via requests
 charset-normalizer==3.3.2 \
     --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \
@@ -112,6 +110,10 @@
     --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \
     --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561
     # via requests
+colorama==0.4.6 ; sys_platform == 'win32' \
+    --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
+    --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
+    # via sphinx
 docutils==0.20.1 \
     --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \
     --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b
@@ -119,17 +121,17 @@
     #   myst-parser
     #   sphinx
     #   sphinx-rtd-theme
-idna==3.6 \
-    --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
-    --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
+idna==3.7 \
+    --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \
+    --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0
     # via requests
 imagesize==1.4.1 \
     --hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \
     --hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a
     # via sphinx
-jinja2==3.1.2 \
-    --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
-    --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
+jinja2==3.1.4 \
+    --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
+    --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
     # via
     #   myst-parser
     #   readthedocs-sphinx-ext
@@ -140,89 +142,89 @@
     # via
     #   mdit-py-plugins
     #   myst-parser
-markupsafe==2.1.3 \
-    --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
-    --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \
-    --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \
-    --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \
-    --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \
-    --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \
-    --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \
-    --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \
-    --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \
-    --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \
-    --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \
-    --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \
-    --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \
-    --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \
-    --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \
-    --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \
-    --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \
-    --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \
-    --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \
-    --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \
-    --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \
-    --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \
-    --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \
-    --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \
-    --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \
-    --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \
-    --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \
-    --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \
-    --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \
-    --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \
-    --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \
-    --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \
-    --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \
-    --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \
-    --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \
-    --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \
-    --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \
-    --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \
-    --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \
-    --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \
-    --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \
-    --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \
-    --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \
-    --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \
-    --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \
-    --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \
-    --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \
-    --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \
-    --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \
-    --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \
-    --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \
-    --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \
-    --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \
-    --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \
-    --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \
-    --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \
-    --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \
-    --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \
-    --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \
-    --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11
+markupsafe==2.1.5 \
+    --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
+    --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
+    --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \
+    --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \
+    --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \
+    --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \
+    --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \
+    --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \
+    --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \
+    --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \
+    --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \
+    --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \
+    --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \
+    --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \
+    --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \
+    --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \
+    --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \
+    --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \
+    --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \
+    --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \
+    --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \
+    --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \
+    --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \
+    --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \
+    --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \
+    --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \
+    --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \
+    --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \
+    --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \
+    --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \
+    --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \
+    --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \
+    --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \
+    --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \
+    --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \
+    --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \
+    --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \
+    --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \
+    --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \
+    --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \
+    --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \
+    --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \
+    --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \
+    --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \
+    --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \
+    --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \
+    --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \
+    --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \
+    --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \
+    --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \
+    --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \
+    --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \
+    --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \
+    --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \
+    --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \
+    --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \
+    --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \
+    --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \
+    --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \
+    --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68
     # via jinja2
-mdit-py-plugins==0.4.0 \
-    --hash=sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9 \
-    --hash=sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b
+mdit-py-plugins==0.4.1 \
+    --hash=sha256:1020dfe4e6bfc2c79fb49ae4e3f5b297f5ccd20f010187acc52af2921e27dc6a \
+    --hash=sha256:834b8ac23d1cd60cec703646ffd22ae97b7955a6d596eb1d304be1e251ae499c
     # via myst-parser
 mdurl==0.1.2 \
     --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \
     --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba
     # via markdown-it-py
-myst-parser==2.0.0 \
-    --hash=sha256:7c36344ae39c8e740dad7fdabf5aa6fc4897a813083c6cc9990044eb93656b14 \
-    --hash=sha256:ea929a67a6a0b1683cdbe19b8d2e724cd7643f8aa3e7bb18dd65beac3483bead
+myst-parser==3.0.1 \
+    --hash=sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1 \
+    --hash=sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87
     # via rules-python-docs (docs/sphinx/pyproject.toml)
-packaging==23.2 \
-    --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \
-    --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7
+packaging==24.1 \
+    --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
+    --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
     # via
     #   readthedocs-sphinx-ext
     #   sphinx
-pygments==2.17.2 \
-    --hash=sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c \
-    --hash=sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367
+pygments==2.18.0 \
+    --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \
+    --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a
     # via sphinx
 pyyaml==6.0.1 \
     --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \
@@ -254,6 +256,7 @@
     --hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \
     --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
     --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
+    --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \
     --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
     --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
     --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \
@@ -276,13 +279,13 @@
     --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \
     --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f
     # via myst-parser
-readthedocs-sphinx-ext==2.2.3 \
-    --hash=sha256:6583c26791a5853ee9e57ce9db864e2fb06808ba470f805d74d53fc50811e012 \
-    --hash=sha256:e9d911792789b88ae12e2be94d88c619f89a4fa1fe9e42c1505c9930a07163d8
+readthedocs-sphinx-ext==2.2.5 \
+    --hash=sha256:ee5fd5b99db9f0c180b2396cbce528aa36671951b9526bb0272dbfce5517bd27 \
+    --hash=sha256:f8c56184ea011c972dd45a90122568587cc85b0127bc9cf064d17c68bc809daa
     # via rules-python-docs (docs/sphinx/pyproject.toml)
-requests==2.31.0 \
-    --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
-    --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
+requests==2.32.3 \
+    --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
+    --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
     # via
     #   readthedocs-sphinx-ext
     #   sphinx
@@ -290,34 +293,29 @@
     --hash=sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1 \
     --hash=sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a
     # via sphinx
-sphinx==7.2.6 \
-    --hash=sha256:1e09160a40b956dc623c910118fa636da93bd3ca0b9876a7b3df90f07d691560 \
-    --hash=sha256:9a5160e1ea90688d5963ba09a2dcd8bdd526620edbb65c328728f1b2228d5ab5
+sphinx==7.4.7 \
+    --hash=sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe \
+    --hash=sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239
     # via
-    #   myst-parser
     #   rules-python-docs (docs/sphinx/pyproject.toml)
+    #   myst-parser
     #   sphinx-rtd-theme
-    #   sphinxcontrib-applehelp
-    #   sphinxcontrib-devhelp
-    #   sphinxcontrib-htmlhelp
     #   sphinxcontrib-jquery
-    #   sphinxcontrib-qthelp
-    #   sphinxcontrib-serializinghtml
 sphinx-rtd-theme==2.0.0 \
     --hash=sha256:bd5d7b80622406762073a04ef8fadc5f9151261563d47027de09910ce03afe6b \
     --hash=sha256:ec93d0856dc280cf3aee9a4c9807c60e027c7f7b461b77aeffed682e68f0e586
     # via rules-python-docs (docs/sphinx/pyproject.toml)
-sphinxcontrib-applehelp==1.0.7 \
-    --hash=sha256:094c4d56209d1734e7d252f6e0b3ccc090bd52ee56807a5d9315b19c122ab15d \
-    --hash=sha256:39fdc8d762d33b01a7d8f026a3b7d71563ea3b72787d5f00ad8465bd9d6dfbfa
+sphinxcontrib-applehelp==2.0.0 \
+    --hash=sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1 \
+    --hash=sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5
     # via sphinx
-sphinxcontrib-devhelp==1.0.5 \
-    --hash=sha256:63b41e0d38207ca40ebbeabcf4d8e51f76c03e78cd61abe118cf4435c73d4212 \
-    --hash=sha256:fe8009aed765188f08fcaadbb3ea0d90ce8ae2d76710b7e29ea7d047177dae2f
+sphinxcontrib-devhelp==2.0.0 \
+    --hash=sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad \
+    --hash=sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2
     # via sphinx
-sphinxcontrib-htmlhelp==2.0.4 \
-    --hash=sha256:6c26a118a05b76000738429b724a0568dbde5b72391a688577da08f11891092a \
-    --hash=sha256:8001661c077a73c29beaf4a79968d0726103c5605e27db92b9ebed8bab1359e9
+sphinxcontrib-htmlhelp==2.1.0 \
+    --hash=sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8 \
+    --hash=sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9
     # via sphinx
 sphinxcontrib-jquery==4.1 \
     --hash=sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a \
@@ -327,15 +325,19 @@
     --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \
     --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8
     # via sphinx
-sphinxcontrib-qthelp==1.0.6 \
-    --hash=sha256:62b9d1a186ab7f5ee3356d906f648cacb7a6bdb94d201ee7adf26db55092982d \
-    --hash=sha256:bf76886ee7470b934e363da7a954ea2825650013d367728588732c7350f49ea4
+sphinxcontrib-qthelp==2.0.0 \
+    --hash=sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab \
+    --hash=sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb
     # via sphinx
-sphinxcontrib-serializinghtml==1.1.9 \
-    --hash=sha256:0c64ff898339e1fac29abd2bf5f11078f3ec413cfe9c046d3120d7ca65530b54 \
-    --hash=sha256:9b36e503703ff04f20e9675771df105e58aa029cfcbc23b8ed716019b7416ae1
+sphinxcontrib-serializinghtml==2.0.0 \
+    --hash=sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331 \
+    --hash=sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d
     # via sphinx
-urllib3==2.1.0 \
-    --hash=sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3 \
-    --hash=sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54
+typing-extensions==4.12.2 \
+    --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
+    --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
+    # via rules-python-docs (docs/sphinx/pyproject.toml)
+urllib3==2.2.2 \
+    --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \
+    --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168
     # via requests
diff --git a/docs/sphinx/support.md b/docs/sphinx/support.md
index a2b8e3a..ea09965 100644
--- a/docs/sphinx/support.md
+++ b/docs/sphinx/support.md
@@ -46,7 +46,8 @@
 
 Breaking changes are allowed, but follow a process to introduce them over
 a series of releases to so users can still incrementally upgrade. See the
-[Breaking Changes](contributing#breaking-changes) doc for the process.
+[Breaking Changes](#breaking-changes) doc for the process.
+
 
 ## Experimental Features
 
diff --git a/docs/sphinx/toolchains.md b/docs/sphinx/toolchains.md
new file mode 100644
index 0000000..fac1bfc
--- /dev/null
+++ b/docs/sphinx/toolchains.md
@@ -0,0 +1,244 @@
+:::{default-domain} bzl
+:::
+
+# Configuring Python toolchains and runtimes
+
+This documents how to configure the Python toolchain and runtimes for different
+use cases.
+
+## Bzlmod MODULE configuration
+
+How to configure `rules_python` in your MODULE.bazel file depends on how and why
+you're using Python. There are 4 basic use cases:
+
+1. A root module that always uses Python. For example, you're building a
+   Python application.
+2. A library module with dev-only uses of Python. For example, a Java project
+   that only uses Python as part of testing itself.
+3. A library module without version constraints. For example, a rule set with
+   Python build tools, but defers to the user as to what Python version is used
+   for the tools.
+4. A library module with version constraints. For example, a rule set with
+   Python build tools, and the module requires a specific version of Python
+   be used with its tools.
+
+### Root modules
+
+Root modules are always the top-most module. These are special in two ways:
+
+1. Some `rules_python` bzlmod APIs are only respected by the root module.
+2. The root module can force module overrides and specific module dependency
+   ordering.
+
+When configuring `rules_python` for a root module, you typically want to
+explicitly specify the Python version you want to use. This ensures that
+dependencies don't change the Python version out from under you. Remember that
+`rules_python` will set a version by default, but it will change regularly as
+it tracks a recent Python version.
+
+NOTE: If your root module only uses Python for development of the module itself,
+you should read the dev-only library module section.
+
+```
+bazel_dep(name="rules_python", version=...)
+python = use_extension("@rules_python//python/extensions:python.bzl", "python")
+
+python.toolchain(python_version = "3.12", is_default = True)
+```
+
+### Library modules
+
+A library module is a module that can show up in arbitrary locations in the
+bzlmod module graph -- it's unknown where in the breadth-first search order the
+module will be relative to other modules. For example, `rules_python` is a
+library module.
+
+#### Library modules with dev-only Python usage
+
+A library module with dev-only Python usage is usually one where Python is only
+used as part of its tests. For example, a module for Java rules might run some
+Python program to generate test data, but real usage of the rules don't need
+Python to work. To configure this, follow the root-module setup, but remember to
+specify `dev_dependency = True` to the bzlmod APIs:
+
+```
+# MODULE.bazel
+bazel_dep(name = "rules_python", version=..., dev_dependency = True)
+
+python = use_extension(
+    "@rules_python//python/extensions:python.bzl",
+    "python",
+    dev_dependency = True
+)
+
+python.toolchain(python_version = "3.12", is_default=True)
+```
+
+#### Library modules without version constraints
+
+A library module without version constraints is one where the version of Python
+used for the Python programs it runs isn't chosen by the module itself. Instead,
+it's up to the root module to pick an appropriate version of Python.
+
+For this case, configuration is simple: just depend on `rules_python` and use
+the normal `//python:py_binary.bzl` et al rules. There is no need to call
+`python.toolchain` -- rules_python ensures _some_ Python version is available,
+but more often the root module will specify some version.
+
+```
+# MODULE.bazel
+bazel_dep(name = "rules_python", version=...)
+```
+
+#### Library modules with version constraints
+
+A library module with version constraints is one where the module requires a
+specific Python version be used with its tools. This has some pros/cons:
+
+* It allows the library's tools to use a different version of Python than
+  the rest of the build. For example, a user's program could use Python 3.12,
+  while the library module's tools use Python 3.10.
+* It reduces the support burden for the library module because the library only needs
+  to test for the particular Python version they intend to run as.
+* It raises the support burden for the library module because the version of
+  Python being used needs to be regularly incremented.
+* It has higher build overhead because additional runtimes and libraries need
+  to be downloaded, and Bazel has to keep additional configuration state.
+
+To configure this, request the Python versions needed in MODULE.bazel and use
+the version-aware rules for `py_binary`.
+
+```
+# MODULE.bazel
+bazel_dep(name = "rules_python", version=...)
+
+python = use_extension("@rules_python//python/extensions:python.bzl", "python")
+python.toolchain(python_version = "3.12")
+
+# BUILD.bazel
+load("@python_versions//3.12:defs.bzl", "py_binary")
+
+py_binary(...)
+```
+
+### Pinning to a Python version
+
+Pinning to a version allows targets to force that a specific Python version is
+used, even if the root module configures a different version as a default. This
+is most useful for two cases:
+
+1. For submodules to ensure they run with the appropriate Python version
+2. To allow incremental, per-target, upgrading to newer Python versions,
+   typically in a mono-repo situation.
+
+To configure a submodule with the version-aware rules, request the particular
+version you need, then use the `@python_versions` repo to use the rules that
+force specific versions:
+
+```starlark
+python = use_extension("@rules_python//python/extensions:python.bzl", "python")
+
+python.toolchain(
+    python_version = "3.11",
+)
+use_repo(python, "python_versions")
+```
+
+Then use e.g. `load("@python_versions//3.11:defs.bzl", "py_binary")` to use
+the rules that force that particular version. Multiple versions can be specified
+and use within a single build.
+
+For more documentation, see the bzlmod examples under the {gh-path}`examples`
+folder.  Look for the examples that contain a `MODULE.bazel` file.
+
+### Other toolchain details
+
+The `python.toolchain()` call makes its contents available under a repo named
+`python_X_Y`, where X and Y are the major and minor versions. For example,
+`python.toolchain(python_version="3.11")` creates the repo `@python_3_11`.
+Remember to call `use_repo()` to make repos visible to your module:
+`use_repo(python, "python_3_11")`
+
+#### Toolchain usage in other rules
+
+Python toolchains can be utilized in other bazel rules, such as `genrule()`, by adding the `toolchains=["@rules_python//python:current_py_toolchain"]` attribute. You can obtain the path to the Python interpreter using the `$(PYTHON2)` and `$(PYTHON3)` ["Make" Variables](https://bazel.build/reference/be/make-variables). See the
+{gh-path}`test_current_py_toolchain <tests/load_from_macro/BUILD.bazel>` target for an example.
+
+
+## Workspace configuration
+
+To import rules_python in your project, you first need to add it to your
+`WORKSPACE` file, using the snippet provided in the
+[release you choose](https://github.com/bazelbuild/rules_python/releases)
+
+To depend on a particular unreleased version, you can do the following:
+
+```starlark
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+
+
+# Update the SHA and VERSION to the lastest version available here:
+# https://github.com/bazelbuild/rules_python/releases.
+
+SHA="84aec9e21cc56fbc7f1335035a71c850d1b9b5cc6ff497306f84cced9a769841"
+
+VERSION="0.23.1"
+
+http_archive(
+    name = "rules_python",
+    sha256 = SHA,
+    strip_prefix = "rules_python-{}".format(VERSION),
+    url = "https://github.com/bazelbuild/rules_python/releases/download/{}/rules_python-{}.tar.gz".format(VERSION,VERSION),
+)
+
+load("@rules_python//python:repositories.bzl", "py_repositories")
+
+py_repositories()
+```
+
+### Workspace toolchain registration
+
+To register a hermetic Python toolchain rather than rely on a system-installed interpreter for runtime execution, you can add to the `WORKSPACE` file:
+
+```starlark
+load("@rules_python//python:repositories.bzl", "python_register_toolchains")
+
+python_register_toolchains(
+    name = "python_3_11",
+    # Available versions are listed in @rules_python//python:versions.bzl.
+    # We recommend using the same version your team is already standardized on.
+    python_version = "3.11",
+)
+
+load("@python_3_11//:defs.bzl", "interpreter")
+
+load("@rules_python//python:pip.bzl", "pip_parse")
+
+pip_parse(
+    ...
+    python_interpreter_target = interpreter,
+    ...
+)
+```
+
+After registration, your Python targets will use the toolchain's interpreter during execution, but a system-installed interpreter
+is still used to 'bootstrap' Python targets (see https://github.com/bazelbuild/rules_python/issues/691).
+You may also find some quirks while using this toolchain. Please refer to [python-build-standalone documentation's _Quirks_ section](https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html).
+
+## Autodetecting toolchain
+
+The autodetecting toolchain is a deprecated toolchain that is built into Bazel.
+It's name is a bit misleading: it doesn't autodetect anything. All it does is
+use `python3` from the environment a binary runs within. This provides extremely
+limited functionality to the rules (at build time, nothing is knowable about
+the Python runtime).
+
+Bazel itself automatically registers `@bazel_tools//tools/python:autodetecting_toolchain`
+as the lowest priority toolchain. For WORKSPACE builds, if no other toolchain
+is registered, that toolchain will be used. For bzlmod builds, rules_python
+automatically registers a higher-priority toolchain; it won't be used unless
+there is a toolchain misconfiguration somewhere.
+
+To aid migration off the Bazel-builtin toolchain, rules_python provides
+{obj}`@rules_python//python/runtime_env_toolchains:all`. This is an equivalent
+toolchain, but is implemented using rules_python's objects.
diff --git a/examples/build_file_generation/.bazelrc b/examples/build_file_generation/.bazelrc
index 7e6911f..e0b1984 100644
--- a/examples/build_file_generation/.bazelrc
+++ b/examples/build_file_generation/.bazelrc
@@ -2,7 +2,6 @@
 
 # Windows requires these for multi-python support:
 build --enable_runfiles
-startup --windows_enable_symlinks
 
 # The bzlmod version of this example is in examples/bzlmod_build_file_generation
 # Once WORKSPACE support is dropped, this example can be entirely deleted.
diff --git a/examples/build_file_generation/WORKSPACE b/examples/build_file_generation/WORKSPACE
index e283260..3f1fad8 100644
--- a/examples/build_file_generation/WORKSPACE
+++ b/examples/build_file_generation/WORKSPACE
@@ -84,8 +84,6 @@
     python_version = "3.9",
 )
 
-# Load the interpreter and pip_parse rules.
-load("@python39//:defs.bzl", "interpreter")
 load("@rules_python//python:pip.bzl", "pip_parse")
 
 # This macro wraps the `pip_repository` rule that invokes `pip`, with `incremental` set.
@@ -114,7 +112,7 @@
     # 3. Wrapper script, like in the autodetecting python toolchain.
     #
     # Here, we use the interpreter constant that resolves to the host interpreter from the default Python toolchain.
-    python_interpreter_target = interpreter,
+    python_interpreter_target = "@python39_host//:python",
     # Set the location of the lock file.
     requirements_lock = "//:requirements_lock.txt",
     requirements_windows = "//:requirements_windows.txt",
diff --git a/examples/build_file_generation/__init__.py b/examples/build_file_generation/__init__.py
index 37dea1b..22e4221 100644
--- a/examples/build_file_generation/__init__.py
+++ b/examples/build_file_generation/__init__.py
@@ -12,16 +12,18 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import sphinx  # noqa
 from flask import Flask, jsonify
 from random_number_generator import generate_random_number
-import sphinx  # noqa
 
 app = Flask(__name__)
 
[email protected]('/random-number', methods=['GET'])
-def get_random_number():
-    return jsonify({'number': generate_random_number.generate_random_number()})
 
-"""Start the python web server"""
[email protected]("/random-number", methods=["GET"])
+def get_random_number():
+    return jsonify({"number": generate_random_number.generate_random_number()})
+
+
 def main():
+    """Start the python web server"""
     app.run()
diff --git a/examples/build_file_generation/__main__.py b/examples/build_file_generation/__main__.py
index 8f8efba..a77055f 100644
--- a/examples/build_file_generation/__main__.py
+++ b/examples/build_file_generation/__main__.py
@@ -14,5 +14,5 @@
 
 from __init__ import main
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     main()
diff --git a/examples/build_file_generation/__test__.py b/examples/build_file_generation/__test__.py
index c4fa5ef..45e127b 100644
--- a/examples/build_file_generation/__test__.py
+++ b/examples/build_file_generation/__test__.py
@@ -13,16 +13,19 @@
 # limitations under the License.
 
 import unittest
+
 from __init__ import app
 
+
 class TestServer(unittest.TestCase):
     def setUp(self):
         self.app = app.test_client()
-        
+
     def test_get_random_number(self):
-        response = self.app.get('/random-number')
+        response = self.app.get("/random-number")
         self.assertEqual(response.status_code, 200)
-        self.assertIn('number', response.json)
-        
-if __name__ == '__main__':
+        self.assertIn("number", response.json)
+
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/examples/build_file_generation/gazelle_python.yaml b/examples/build_file_generation/gazelle_python.yaml
index 6761b8d..cd5904d 100644
--- a/examples/build_file_generation/gazelle_python.yaml
+++ b/examples/build_file_generation/gazelle_python.yaml
@@ -6,822 +6,32 @@
 manifest:
   modules_mapping:
     alabaster: alabaster
-    alabaster.support: alabaster
     babel: Babel
-    babel.core: Babel
-    babel.dates: Babel
-    babel.languages: Babel
-    babel.lists: Babel
-    babel.localedata: Babel
-    babel.localtime: Babel
-    babel.messages: Babel
-    babel.messages.catalog: Babel
-    babel.messages.checkers: Babel
-    babel.messages.extract: Babel
-    babel.messages.frontend: Babel
-    babel.messages.jslexer: Babel
-    babel.messages.mofile: Babel
-    babel.messages.plurals: Babel
-    babel.messages.pofile: Babel
-    babel.numbers: Babel
-    babel.plural: Babel
-    babel.support: Babel
-    babel.units: Babel
-    babel.util: Babel
     certifi: certifi
-    certifi.core: certifi
     charset_normalizer: charset_normalizer
-    charset_normalizer.api: charset_normalizer
-    charset_normalizer.cd: charset_normalizer
-    charset_normalizer.cli: charset_normalizer
-    charset_normalizer.constant: charset_normalizer
-    charset_normalizer.legacy: charset_normalizer
-    charset_normalizer.md: charset_normalizer
-    charset_normalizer.md__mypyc: charset_normalizer
-    charset_normalizer.models: charset_normalizer
-    charset_normalizer.utils: charset_normalizer
-    charset_normalizer.version: charset_normalizer
     click: click
-    click.core: click
-    click.decorators: click
-    click.exceptions: click
-    click.formatting: click
-    click.globals: click
-    click.parser: click
-    click.shell_completion: click
-    click.termui: click
-    click.testing: click
-    click.types: click
-    click.utils: click
     docutils: docutils
-    docutils.core: docutils
-    docutils.examples: docutils
-    docutils.frontend: docutils
-    docutils.io: docutils
-    docutils.languages: docutils
-    docutils.languages.af: docutils
-    docutils.languages.ar: docutils
-    docutils.languages.ca: docutils
-    docutils.languages.cs: docutils
-    docutils.languages.da: docutils
-    docutils.languages.de: docutils
-    docutils.languages.en: docutils
-    docutils.languages.eo: docutils
-    docutils.languages.es: docutils
-    docutils.languages.fa: docutils
-    docutils.languages.fi: docutils
-    docutils.languages.fr: docutils
-    docutils.languages.gl: docutils
-    docutils.languages.he: docutils
-    docutils.languages.it: docutils
-    docutils.languages.ja: docutils
-    docutils.languages.ko: docutils
-    docutils.languages.lt: docutils
-    docutils.languages.lv: docutils
-    docutils.languages.nl: docutils
-    docutils.languages.pl: docutils
-    docutils.languages.pt_br: docutils
-    docutils.languages.ru: docutils
-    docutils.languages.sk: docutils
-    docutils.languages.sv: docutils
-    docutils.languages.uk: docutils
-    docutils.languages.zh_cn: docutils
-    docutils.languages.zh_tw: docutils
-    docutils.nodes: docutils
-    docutils.parsers: docutils
-    docutils.parsers.commonmark_wrapper: docutils
-    docutils.parsers.null: docutils
-    docutils.parsers.recommonmark_wrapper: docutils
-    docutils.parsers.rst: docutils
-    docutils.parsers.rst.directives: docutils
-    docutils.parsers.rst.directives.admonitions: docutils
-    docutils.parsers.rst.directives.body: docutils
-    docutils.parsers.rst.directives.html: docutils
-    docutils.parsers.rst.directives.images: docutils
-    docutils.parsers.rst.directives.misc: docutils
-    docutils.parsers.rst.directives.parts: docutils
-    docutils.parsers.rst.directives.references: docutils
-    docutils.parsers.rst.directives.tables: docutils
-    docutils.parsers.rst.languages: docutils
-    docutils.parsers.rst.languages.af: docutils
-    docutils.parsers.rst.languages.ar: docutils
-    docutils.parsers.rst.languages.ca: docutils
-    docutils.parsers.rst.languages.cs: docutils
-    docutils.parsers.rst.languages.da: docutils
-    docutils.parsers.rst.languages.de: docutils
-    docutils.parsers.rst.languages.en: docutils
-    docutils.parsers.rst.languages.eo: docutils
-    docutils.parsers.rst.languages.es: docutils
-    docutils.parsers.rst.languages.fa: docutils
-    docutils.parsers.rst.languages.fi: docutils
-    docutils.parsers.rst.languages.fr: docutils
-    docutils.parsers.rst.languages.gl: docutils
-    docutils.parsers.rst.languages.he: docutils
-    docutils.parsers.rst.languages.it: docutils
-    docutils.parsers.rst.languages.ja: docutils
-    docutils.parsers.rst.languages.ko: docutils
-    docutils.parsers.rst.languages.lt: docutils
-    docutils.parsers.rst.languages.lv: docutils
-    docutils.parsers.rst.languages.nl: docutils
-    docutils.parsers.rst.languages.pl: docutils
-    docutils.parsers.rst.languages.pt_br: docutils
-    docutils.parsers.rst.languages.ru: docutils
-    docutils.parsers.rst.languages.sk: docutils
-    docutils.parsers.rst.languages.sv: docutils
-    docutils.parsers.rst.languages.uk: docutils
-    docutils.parsers.rst.languages.zh_cn: docutils
-    docutils.parsers.rst.languages.zh_tw: docutils
-    docutils.parsers.rst.roles: docutils
-    docutils.parsers.rst.states: docutils
-    docutils.parsers.rst.tableparser: docutils
-    docutils.readers: docutils
-    docutils.readers.doctree: docutils
-    docutils.readers.pep: docutils
-    docutils.readers.standalone: docutils
-    docutils.statemachine: docutils
-    docutils.transforms: docutils
-    docutils.transforms.components: docutils
-    docutils.transforms.frontmatter: docutils
-    docutils.transforms.misc: docutils
-    docutils.transforms.parts: docutils
-    docutils.transforms.peps: docutils
-    docutils.transforms.references: docutils
-    docutils.transforms.universal: docutils
-    docutils.transforms.writer_aux: docutils
-    docutils.utils: docutils
-    docutils.utils.code_analyzer: docutils
-    docutils.utils.error_reporting: docutils
-    docutils.utils.math: docutils
-    docutils.utils.math.latex2mathml: docutils
-    docutils.utils.math.math2html: docutils
-    docutils.utils.math.tex2mathml_extern: docutils
-    docutils.utils.math.tex2unichar: docutils
-    docutils.utils.math.unichar2tex: docutils
-    docutils.utils.punctuation_chars: docutils
-    docutils.utils.roman: docutils
-    docutils.utils.smartquotes: docutils
-    docutils.utils.urischemes: docutils
-    docutils.writers: docutils
-    docutils.writers.docutils_xml: docutils
-    docutils.writers.html4css1: docutils
-    docutils.writers.html5_polyglot: docutils
-    docutils.writers.latex2e: docutils
-    docutils.writers.manpage: docutils
-    docutils.writers.null: docutils
-    docutils.writers.odf_odt: docutils
-    docutils.writers.odf_odt.prepstyles: docutils
-    docutils.writers.odf_odt.pygmentsformatter: docutils
-    docutils.writers.pep_html: docutils
-    docutils.writers.pseudoxml: docutils
-    docutils.writers.s5_html: docutils
-    docutils.writers.xetex: docutils
     flask: Flask
-    flask.app: Flask
-    flask.blueprints: Flask
-    flask.cli: Flask
-    flask.config: Flask
-    flask.ctx: Flask
-    flask.debughelpers: Flask
-    flask.globals: Flask
-    flask.helpers: Flask
-    flask.json: Flask
-    flask.json.provider: Flask
-    flask.json.tag: Flask
-    flask.logging: Flask
-    flask.scaffold: Flask
-    flask.sessions: Flask
-    flask.signals: Flask
-    flask.templating: Flask
-    flask.testing: Flask
-    flask.typing: Flask
-    flask.views: Flask
-    flask.wrappers: Flask
     idna: idna
-    idna.codec: idna
-    idna.compat: idna
-    idna.core: idna
-    idna.idnadata: idna
-    idna.intranges: idna
-    idna.package_data: idna
-    idna.uts46data: idna
     imagesize: imagesize
-    imagesize.imagesize: imagesize
     importlib_metadata: importlib_metadata
     itsdangerous: itsdangerous
-    itsdangerous.encoding: itsdangerous
-    itsdangerous.exc: itsdangerous
-    itsdangerous.serializer: itsdangerous
-    itsdangerous.signer: itsdangerous
-    itsdangerous.timed: itsdangerous
-    itsdangerous.url_safe: itsdangerous
     jinja2: Jinja2
-    jinja2.async_utils: Jinja2
-    jinja2.bccache: Jinja2
-    jinja2.compiler: Jinja2
-    jinja2.constants: Jinja2
-    jinja2.debug: Jinja2
-    jinja2.defaults: Jinja2
-    jinja2.environment: Jinja2
-    jinja2.exceptions: Jinja2
-    jinja2.ext: Jinja2
-    jinja2.filters: Jinja2
-    jinja2.idtracking: Jinja2
-    jinja2.lexer: Jinja2
-    jinja2.loaders: Jinja2
-    jinja2.meta: Jinja2
-    jinja2.nativetypes: Jinja2
-    jinja2.nodes: Jinja2
-    jinja2.optimizer: Jinja2
-    jinja2.parser: Jinja2
-    jinja2.runtime: Jinja2
-    jinja2.sandbox: Jinja2
-    jinja2.utils: Jinja2
-    jinja2.visitor: Jinja2
     markupsafe: MarkupSafe
     packaging: packaging
-    packaging.markers: packaging
-    packaging.metadata: packaging
-    packaging.requirements: packaging
-    packaging.specifiers: packaging
-    packaging.tags: packaging
-    packaging.utils: packaging
-    packaging.version: packaging
     pygments: Pygments
-    pygments.cmdline: Pygments
-    pygments.console: Pygments
-    pygments.filter: Pygments
-    pygments.filters: Pygments
-    pygments.formatter: Pygments
-    pygments.formatters: Pygments
-    pygments.formatters.bbcode: Pygments
-    pygments.formatters.groff: Pygments
-    pygments.formatters.html: Pygments
-    pygments.formatters.img: Pygments
-    pygments.formatters.irc: Pygments
-    pygments.formatters.latex: Pygments
-    pygments.formatters.other: Pygments
-    pygments.formatters.pangomarkup: Pygments
-    pygments.formatters.rtf: Pygments
-    pygments.formatters.svg: Pygments
-    pygments.formatters.terminal: Pygments
-    pygments.formatters.terminal256: Pygments
-    pygments.lexer: Pygments
-    pygments.lexers: Pygments
-    pygments.lexers.actionscript: Pygments
-    pygments.lexers.ada: Pygments
-    pygments.lexers.agile: Pygments
-    pygments.lexers.algebra: Pygments
-    pygments.lexers.ambient: Pygments
-    pygments.lexers.amdgpu: Pygments
-    pygments.lexers.ampl: Pygments
-    pygments.lexers.apdlexer: Pygments
-    pygments.lexers.apl: Pygments
-    pygments.lexers.archetype: Pygments
-    pygments.lexers.arrow: Pygments
-    pygments.lexers.arturo: Pygments
-    pygments.lexers.asc: Pygments
-    pygments.lexers.asm: Pygments
-    pygments.lexers.asn1: Pygments
-    pygments.lexers.automation: Pygments
-    pygments.lexers.bare: Pygments
-    pygments.lexers.basic: Pygments
-    pygments.lexers.bdd: Pygments
-    pygments.lexers.berry: Pygments
-    pygments.lexers.bibtex: Pygments
-    pygments.lexers.blueprint: Pygments
-    pygments.lexers.boa: Pygments
-    pygments.lexers.bqn: Pygments
-    pygments.lexers.business: Pygments
-    pygments.lexers.c_cpp: Pygments
-    pygments.lexers.c_like: Pygments
-    pygments.lexers.capnproto: Pygments
-    pygments.lexers.carbon: Pygments
-    pygments.lexers.cddl: Pygments
-    pygments.lexers.chapel: Pygments
-    pygments.lexers.clean: Pygments
-    pygments.lexers.comal: Pygments
-    pygments.lexers.compiled: Pygments
-    pygments.lexers.configs: Pygments
-    pygments.lexers.console: Pygments
-    pygments.lexers.cplint: Pygments
-    pygments.lexers.crystal: Pygments
-    pygments.lexers.csound: Pygments
-    pygments.lexers.css: Pygments
-    pygments.lexers.d: Pygments
-    pygments.lexers.dalvik: Pygments
-    pygments.lexers.data: Pygments
-    pygments.lexers.dax: Pygments
-    pygments.lexers.devicetree: Pygments
-    pygments.lexers.diff: Pygments
-    pygments.lexers.dns: Pygments
-    pygments.lexers.dotnet: Pygments
-    pygments.lexers.dsls: Pygments
-    pygments.lexers.dylan: Pygments
-    pygments.lexers.ecl: Pygments
-    pygments.lexers.eiffel: Pygments
-    pygments.lexers.elm: Pygments
-    pygments.lexers.elpi: Pygments
-    pygments.lexers.email: Pygments
-    pygments.lexers.erlang: Pygments
-    pygments.lexers.esoteric: Pygments
-    pygments.lexers.ezhil: Pygments
-    pygments.lexers.factor: Pygments
-    pygments.lexers.fantom: Pygments
-    pygments.lexers.felix: Pygments
-    pygments.lexers.fift: Pygments
-    pygments.lexers.floscript: Pygments
-    pygments.lexers.forth: Pygments
-    pygments.lexers.fortran: Pygments
-    pygments.lexers.foxpro: Pygments
-    pygments.lexers.freefem: Pygments
-    pygments.lexers.func: Pygments
-    pygments.lexers.functional: Pygments
-    pygments.lexers.futhark: Pygments
-    pygments.lexers.gcodelexer: Pygments
-    pygments.lexers.gdscript: Pygments
-    pygments.lexers.go: Pygments
-    pygments.lexers.grammar_notation: Pygments
-    pygments.lexers.graph: Pygments
-    pygments.lexers.graphics: Pygments
-    pygments.lexers.graphql: Pygments
-    pygments.lexers.graphviz: Pygments
-    pygments.lexers.gsql: Pygments
-    pygments.lexers.haskell: Pygments
-    pygments.lexers.haxe: Pygments
-    pygments.lexers.hdl: Pygments
-    pygments.lexers.hexdump: Pygments
-    pygments.lexers.html: Pygments
-    pygments.lexers.idl: Pygments
-    pygments.lexers.igor: Pygments
-    pygments.lexers.inferno: Pygments
-    pygments.lexers.installers: Pygments
-    pygments.lexers.int_fiction: Pygments
-    pygments.lexers.iolang: Pygments
-    pygments.lexers.j: Pygments
-    pygments.lexers.javascript: Pygments
-    pygments.lexers.jmespath: Pygments
-    pygments.lexers.jslt: Pygments
-    pygments.lexers.jsonnet: Pygments
-    pygments.lexers.julia: Pygments
-    pygments.lexers.jvm: Pygments
-    pygments.lexers.kuin: Pygments
-    pygments.lexers.lilypond: Pygments
-    pygments.lexers.lisp: Pygments
-    pygments.lexers.macaulay2: Pygments
-    pygments.lexers.make: Pygments
-    pygments.lexers.markup: Pygments
-    pygments.lexers.math: Pygments
-    pygments.lexers.matlab: Pygments
-    pygments.lexers.maxima: Pygments
-    pygments.lexers.meson: Pygments
-    pygments.lexers.mime: Pygments
-    pygments.lexers.minecraft: Pygments
-    pygments.lexers.mips: Pygments
-    pygments.lexers.ml: Pygments
-    pygments.lexers.modeling: Pygments
-    pygments.lexers.modula2: Pygments
-    pygments.lexers.monte: Pygments
-    pygments.lexers.mosel: Pygments
-    pygments.lexers.ncl: Pygments
-    pygments.lexers.nimrod: Pygments
-    pygments.lexers.nit: Pygments
-    pygments.lexers.nix: Pygments
-    pygments.lexers.oberon: Pygments
-    pygments.lexers.objective: Pygments
-    pygments.lexers.ooc: Pygments
-    pygments.lexers.openscad: Pygments
-    pygments.lexers.other: Pygments
-    pygments.lexers.parasail: Pygments
-    pygments.lexers.parsers: Pygments
-    pygments.lexers.pascal: Pygments
-    pygments.lexers.pawn: Pygments
-    pygments.lexers.perl: Pygments
-    pygments.lexers.phix: Pygments
-    pygments.lexers.php: Pygments
-    pygments.lexers.pointless: Pygments
-    pygments.lexers.pony: Pygments
-    pygments.lexers.praat: Pygments
-    pygments.lexers.procfile: Pygments
-    pygments.lexers.prolog: Pygments
-    pygments.lexers.promql: Pygments
-    pygments.lexers.ptx: Pygments
-    pygments.lexers.python: Pygments
-    pygments.lexers.q: Pygments
-    pygments.lexers.qlik: Pygments
-    pygments.lexers.qvt: Pygments
-    pygments.lexers.r: Pygments
-    pygments.lexers.rdf: Pygments
-    pygments.lexers.rebol: Pygments
-    pygments.lexers.resource: Pygments
-    pygments.lexers.ride: Pygments
-    pygments.lexers.rita: Pygments
-    pygments.lexers.rnc: Pygments
-    pygments.lexers.roboconf: Pygments
-    pygments.lexers.robotframework: Pygments
-    pygments.lexers.ruby: Pygments
-    pygments.lexers.rust: Pygments
-    pygments.lexers.sas: Pygments
-    pygments.lexers.savi: Pygments
-    pygments.lexers.scdoc: Pygments
-    pygments.lexers.scripting: Pygments
-    pygments.lexers.sgf: Pygments
-    pygments.lexers.shell: Pygments
-    pygments.lexers.sieve: Pygments
-    pygments.lexers.slash: Pygments
-    pygments.lexers.smalltalk: Pygments
-    pygments.lexers.smithy: Pygments
-    pygments.lexers.smv: Pygments
-    pygments.lexers.snobol: Pygments
-    pygments.lexers.solidity: Pygments
-    pygments.lexers.sophia: Pygments
-    pygments.lexers.special: Pygments
-    pygments.lexers.spice: Pygments
-    pygments.lexers.sql: Pygments
-    pygments.lexers.srcinfo: Pygments
-    pygments.lexers.stata: Pygments
-    pygments.lexers.supercollider: Pygments
-    pygments.lexers.tal: Pygments
-    pygments.lexers.tcl: Pygments
-    pygments.lexers.teal: Pygments
-    pygments.lexers.templates: Pygments
-    pygments.lexers.teraterm: Pygments
-    pygments.lexers.testing: Pygments
-    pygments.lexers.text: Pygments
-    pygments.lexers.textedit: Pygments
-    pygments.lexers.textfmts: Pygments
-    pygments.lexers.theorem: Pygments
-    pygments.lexers.thingsdb: Pygments
-    pygments.lexers.tlb: Pygments
-    pygments.lexers.tls: Pygments
-    pygments.lexers.tnt: Pygments
-    pygments.lexers.trafficscript: Pygments
-    pygments.lexers.typoscript: Pygments
-    pygments.lexers.ul4: Pygments
-    pygments.lexers.unicon: Pygments
-    pygments.lexers.urbi: Pygments
-    pygments.lexers.usd: Pygments
-    pygments.lexers.varnish: Pygments
-    pygments.lexers.verification: Pygments
-    pygments.lexers.verifpal: Pygments
-    pygments.lexers.web: Pygments
-    pygments.lexers.webassembly: Pygments
-    pygments.lexers.webidl: Pygments
-    pygments.lexers.webmisc: Pygments
-    pygments.lexers.wgsl: Pygments
-    pygments.lexers.whiley: Pygments
-    pygments.lexers.wowtoc: Pygments
-    pygments.lexers.wren: Pygments
-    pygments.lexers.x10: Pygments
-    pygments.lexers.xorg: Pygments
-    pygments.lexers.yang: Pygments
-    pygments.lexers.yara: Pygments
-    pygments.lexers.zig: Pygments
-    pygments.modeline: Pygments
-    pygments.plugin: Pygments
-    pygments.regexopt: Pygments
-    pygments.scanner: Pygments
-    pygments.sphinxext: Pygments
-    pygments.style: Pygments
-    pygments.styles: Pygments
-    pygments.styles.abap: Pygments
-    pygments.styles.algol: Pygments
-    pygments.styles.algol_nu: Pygments
-    pygments.styles.arduino: Pygments
-    pygments.styles.autumn: Pygments
-    pygments.styles.borland: Pygments
-    pygments.styles.bw: Pygments
-    pygments.styles.colorful: Pygments
-    pygments.styles.default: Pygments
-    pygments.styles.dracula: Pygments
-    pygments.styles.emacs: Pygments
-    pygments.styles.friendly: Pygments
-    pygments.styles.friendly_grayscale: Pygments
-    pygments.styles.fruity: Pygments
-    pygments.styles.gh_dark: Pygments
-    pygments.styles.gruvbox: Pygments
-    pygments.styles.igor: Pygments
-    pygments.styles.inkpot: Pygments
-    pygments.styles.lightbulb: Pygments
-    pygments.styles.lilypond: Pygments
-    pygments.styles.lovelace: Pygments
-    pygments.styles.manni: Pygments
-    pygments.styles.material: Pygments
-    pygments.styles.monokai: Pygments
-    pygments.styles.murphy: Pygments
-    pygments.styles.native: Pygments
-    pygments.styles.nord: Pygments
-    pygments.styles.onedark: Pygments
-    pygments.styles.paraiso_dark: Pygments
-    pygments.styles.paraiso_light: Pygments
-    pygments.styles.pastie: Pygments
-    pygments.styles.perldoc: Pygments
-    pygments.styles.rainbow_dash: Pygments
-    pygments.styles.rrt: Pygments
-    pygments.styles.sas: Pygments
-    pygments.styles.solarized: Pygments
-    pygments.styles.staroffice: Pygments
-    pygments.styles.stata_dark: Pygments
-    pygments.styles.stata_light: Pygments
-    pygments.styles.tango: Pygments
-    pygments.styles.trac: Pygments
-    pygments.styles.vim: Pygments
-    pygments.styles.vs: Pygments
-    pygments.styles.xcode: Pygments
-    pygments.styles.zenburn: Pygments
-    pygments.token: Pygments
-    pygments.unistring: Pygments
-    pygments.util: Pygments
     requests: requests
-    requests.adapters: requests
-    requests.api: requests
-    requests.auth: requests
-    requests.certs: requests
-    requests.compat: requests
-    requests.cookies: requests
-    requests.exceptions: requests
-    requests.help: requests
-    requests.hooks: requests
-    requests.models: requests
-    requests.packages: requests
-    requests.sessions: requests
-    requests.status_codes: requests
-    requests.structures: requests
-    requests.utils: requests
     snowballstemmer: snowballstemmer
-    snowballstemmer.among: snowballstemmer
-    snowballstemmer.arabic_stemmer: snowballstemmer
-    snowballstemmer.armenian_stemmer: snowballstemmer
-    snowballstemmer.basestemmer: snowballstemmer
-    snowballstemmer.basque_stemmer: snowballstemmer
-    snowballstemmer.catalan_stemmer: snowballstemmer
-    snowballstemmer.danish_stemmer: snowballstemmer
-    snowballstemmer.dutch_stemmer: snowballstemmer
-    snowballstemmer.english_stemmer: snowballstemmer
-    snowballstemmer.finnish_stemmer: snowballstemmer
-    snowballstemmer.french_stemmer: snowballstemmer
-    snowballstemmer.german_stemmer: snowballstemmer
-    snowballstemmer.greek_stemmer: snowballstemmer
-    snowballstemmer.hindi_stemmer: snowballstemmer
-    snowballstemmer.hungarian_stemmer: snowballstemmer
-    snowballstemmer.indonesian_stemmer: snowballstemmer
-    snowballstemmer.irish_stemmer: snowballstemmer
-    snowballstemmer.italian_stemmer: snowballstemmer
-    snowballstemmer.lithuanian_stemmer: snowballstemmer
-    snowballstemmer.nepali_stemmer: snowballstemmer
-    snowballstemmer.norwegian_stemmer: snowballstemmer
-    snowballstemmer.porter_stemmer: snowballstemmer
-    snowballstemmer.portuguese_stemmer: snowballstemmer
-    snowballstemmer.romanian_stemmer: snowballstemmer
-    snowballstemmer.russian_stemmer: snowballstemmer
-    snowballstemmer.serbian_stemmer: snowballstemmer
-    snowballstemmer.spanish_stemmer: snowballstemmer
-    snowballstemmer.swedish_stemmer: snowballstemmer
-    snowballstemmer.tamil_stemmer: snowballstemmer
-    snowballstemmer.turkish_stemmer: snowballstemmer
-    snowballstemmer.yiddish_stemmer: snowballstemmer
     sphinx: sphinx
-    sphinx.addnodes: sphinx
-    sphinx.application: sphinx
-    sphinx.builders: sphinx
-    sphinx.builders.changes: sphinx
-    sphinx.builders.dirhtml: sphinx
-    sphinx.builders.dummy: sphinx
-    sphinx.builders.epub3: sphinx
-    sphinx.builders.gettext: sphinx
-    sphinx.builders.html: sphinx
-    sphinx.builders.html.transforms: sphinx
-    sphinx.builders.latex: sphinx
-    sphinx.builders.latex.constants: sphinx
-    sphinx.builders.latex.nodes: sphinx
-    sphinx.builders.latex.theming: sphinx
-    sphinx.builders.latex.transforms: sphinx
-    sphinx.builders.latex.util: sphinx
-    sphinx.builders.linkcheck: sphinx
-    sphinx.builders.manpage: sphinx
-    sphinx.builders.singlehtml: sphinx
-    sphinx.builders.texinfo: sphinx
-    sphinx.builders.text: sphinx
-    sphinx.builders.xml: sphinx
-    sphinx.cmd: sphinx
-    sphinx.cmd.build: sphinx
-    sphinx.cmd.make_mode: sphinx
-    sphinx.cmd.quickstart: sphinx
-    sphinx.config: sphinx
-    sphinx.deprecation: sphinx
-    sphinx.directives: sphinx
-    sphinx.directives.code: sphinx
-    sphinx.directives.other: sphinx
-    sphinx.directives.patches: sphinx
-    sphinx.domains: sphinx
-    sphinx.domains.c: sphinx
-    sphinx.domains.changeset: sphinx
-    sphinx.domains.citation: sphinx
-    sphinx.domains.cpp: sphinx
-    sphinx.domains.index: sphinx
-    sphinx.domains.javascript: sphinx
-    sphinx.domains.math: sphinx
-    sphinx.domains.python: sphinx
-    sphinx.domains.rst: sphinx
-    sphinx.domains.std: sphinx
-    sphinx.environment: sphinx
-    sphinx.environment.adapters: sphinx
-    sphinx.environment.adapters.asset: sphinx
-    sphinx.environment.adapters.indexentries: sphinx
-    sphinx.environment.adapters.toctree: sphinx
-    sphinx.environment.collectors: sphinx
-    sphinx.environment.collectors.asset: sphinx
-    sphinx.environment.collectors.dependencies: sphinx
-    sphinx.environment.collectors.metadata: sphinx
-    sphinx.environment.collectors.title: sphinx
-    sphinx.environment.collectors.toctree: sphinx
-    sphinx.errors: sphinx
-    sphinx.events: sphinx
-    sphinx.ext: sphinx
-    sphinx.ext.apidoc: sphinx
-    sphinx.ext.autodoc: sphinx
-    sphinx.ext.autodoc.directive: sphinx
-    sphinx.ext.autodoc.importer: sphinx
-    sphinx.ext.autodoc.mock: sphinx
-    sphinx.ext.autodoc.preserve_defaults: sphinx
-    sphinx.ext.autodoc.type_comment: sphinx
-    sphinx.ext.autodoc.typehints: sphinx
-    sphinx.ext.autosectionlabel: sphinx
-    sphinx.ext.autosummary: sphinx
-    sphinx.ext.autosummary.generate: sphinx
-    sphinx.ext.coverage: sphinx
-    sphinx.ext.doctest: sphinx
-    sphinx.ext.duration: sphinx
-    sphinx.ext.extlinks: sphinx
-    sphinx.ext.githubpages: sphinx
-    sphinx.ext.graphviz: sphinx
-    sphinx.ext.ifconfig: sphinx
-    sphinx.ext.imgconverter: sphinx
-    sphinx.ext.imgmath: sphinx
-    sphinx.ext.inheritance_diagram: sphinx
-    sphinx.ext.intersphinx: sphinx
-    sphinx.ext.linkcode: sphinx
-    sphinx.ext.mathjax: sphinx
-    sphinx.ext.napoleon: sphinx
-    sphinx.ext.napoleon.docstring: sphinx
-    sphinx.ext.todo: sphinx
-    sphinx.ext.viewcode: sphinx
-    sphinx.extension: sphinx
-    sphinx.highlighting: sphinx
-    sphinx.io: sphinx
-    sphinx.jinja2glue: sphinx
-    sphinx.locale: sphinx
-    sphinx.parsers: sphinx
-    sphinx.project: sphinx
-    sphinx.pycode: sphinx
-    sphinx.pycode.ast: sphinx
-    sphinx.pycode.parser: sphinx
-    sphinx.pygments_styles: sphinx
-    sphinx.registry: sphinx
-    sphinx.roles: sphinx
-    sphinx.search: sphinx
-    sphinx.search.da: sphinx
-    sphinx.search.de: sphinx
-    sphinx.search.en: sphinx
-    sphinx.search.es: sphinx
-    sphinx.search.fi: sphinx
-    sphinx.search.fr: sphinx
-    sphinx.search.hu: sphinx
-    sphinx.search.it: sphinx
-    sphinx.search.ja: sphinx
-    sphinx.search.nl: sphinx
-    sphinx.search.no: sphinx
-    sphinx.search.pt: sphinx
-    sphinx.search.ro: sphinx
-    sphinx.search.ru: sphinx
-    sphinx.search.sv: sphinx
-    sphinx.search.tr: sphinx
-    sphinx.search.zh: sphinx
-    sphinx.testing: sphinx
-    sphinx.testing.fixtures: sphinx
-    sphinx.testing.path: sphinx
-    sphinx.testing.restructuredtext: sphinx
-    sphinx.testing.util: sphinx
-    sphinx.theming: sphinx
-    sphinx.transforms: sphinx
-    sphinx.transforms.compact_bullet_list: sphinx
-    sphinx.transforms.i18n: sphinx
-    sphinx.transforms.post_transforms: sphinx
-    sphinx.transforms.post_transforms.code: sphinx
-    sphinx.transforms.post_transforms.images: sphinx
-    sphinx.transforms.references: sphinx
-    sphinx.util: sphinx
-    sphinx.util.build_phase: sphinx
-    sphinx.util.cfamily: sphinx
-    sphinx.util.console: sphinx
-    sphinx.util.display: sphinx
-    sphinx.util.docfields: sphinx
-    sphinx.util.docstrings: sphinx
-    sphinx.util.docutils: sphinx
-    sphinx.util.exceptions: sphinx
-    sphinx.util.fileutil: sphinx
-    sphinx.util.http_date: sphinx
-    sphinx.util.i18n: sphinx
-    sphinx.util.images: sphinx
-    sphinx.util.index_entries: sphinx
-    sphinx.util.inspect: sphinx
-    sphinx.util.inventory: sphinx
-    sphinx.util.logging: sphinx
-    sphinx.util.matching: sphinx
-    sphinx.util.math: sphinx
-    sphinx.util.nodes: sphinx
-    sphinx.util.osutil: sphinx
-    sphinx.util.parallel: sphinx
-    sphinx.util.png: sphinx
-    sphinx.util.requests: sphinx
-    sphinx.util.rst: sphinx
-    sphinx.util.tags: sphinx
-    sphinx.util.template: sphinx
-    sphinx.util.texescape: sphinx
-    sphinx.util.typing: sphinx
-    sphinx.versioning: sphinx
-    sphinx.writers: sphinx
-    sphinx.writers.html: sphinx
-    sphinx.writers.html5: sphinx
-    sphinx.writers.latex: sphinx
-    sphinx.writers.manpage: sphinx
-    sphinx.writers.texinfo: sphinx
-    sphinx.writers.text: sphinx
-    sphinx.writers.xml: sphinx
     sphinxcontrib.applehelp: sphinxcontrib_applehelp
     sphinxcontrib.devhelp: sphinxcontrib_devhelp
     sphinxcontrib.htmlhelp: sphinxcontrib_htmlhelp
     sphinxcontrib.jsmath: sphinxcontrib_jsmath
-    sphinxcontrib.jsmath.version: sphinxcontrib_jsmath
     sphinxcontrib.qthelp: sphinxcontrib_qthelp
     sphinxcontrib.serializinghtml: sphinxcontrib_serializinghtml
-    sphinxcontrib.serializinghtml.jsonimpl: sphinxcontrib_serializinghtml
     urllib3: urllib3
-    urllib3.connection: urllib3
-    urllib3.connectionpool: urllib3
-    urllib3.contrib: urllib3
-    urllib3.contrib.pyopenssl: urllib3
-    urllib3.contrib.securetransport: urllib3
-    urllib3.contrib.socks: urllib3
-    urllib3.exceptions: urllib3
-    urllib3.fields: urllib3
-    urllib3.filepost: urllib3
-    urllib3.poolmanager: urllib3
-    urllib3.response: urllib3
-    urllib3.util: urllib3
-    urllib3.util.connection: urllib3
-    urllib3.util.proxy: urllib3
-    urllib3.util.request: urllib3
-    urllib3.util.response: urllib3
-    urllib3.util.retry: urllib3
-    urllib3.util.ssl_: urllib3
-    urllib3.util.ssl_match_hostname: urllib3
-    urllib3.util.ssltransport: urllib3
-    urllib3.util.timeout: urllib3
-    urllib3.util.url: urllib3
-    urllib3.util.util: urllib3
-    urllib3.util.wait: urllib3
     werkzeug: Werkzeug
-    werkzeug.datastructures: Werkzeug
-    werkzeug.debug: Werkzeug
-    werkzeug.debug.console: Werkzeug
-    werkzeug.debug.repr: Werkzeug
-    werkzeug.debug.tbtools: Werkzeug
-    werkzeug.exceptions: Werkzeug
-    werkzeug.formparser: Werkzeug
-    werkzeug.http: Werkzeug
-    werkzeug.local: Werkzeug
-    werkzeug.middleware: Werkzeug
-    werkzeug.middleware.dispatcher: Werkzeug
-    werkzeug.middleware.http_proxy: Werkzeug
-    werkzeug.middleware.lint: Werkzeug
-    werkzeug.middleware.profiler: Werkzeug
-    werkzeug.middleware.proxy_fix: Werkzeug
-    werkzeug.middleware.shared_data: Werkzeug
-    werkzeug.routing: Werkzeug
-    werkzeug.routing.converters: Werkzeug
-    werkzeug.routing.exceptions: Werkzeug
-    werkzeug.routing.map: Werkzeug
-    werkzeug.routing.matcher: Werkzeug
-    werkzeug.routing.rules: Werkzeug
-    werkzeug.sansio: Werkzeug
-    werkzeug.sansio.http: Werkzeug
-    werkzeug.sansio.multipart: Werkzeug
-    werkzeug.sansio.request: Werkzeug
-    werkzeug.sansio.response: Werkzeug
-    werkzeug.sansio.utils: Werkzeug
-    werkzeug.security: Werkzeug
-    werkzeug.serving: Werkzeug
-    werkzeug.test: Werkzeug
-    werkzeug.testapp: Werkzeug
-    werkzeug.urls: Werkzeug
-    werkzeug.user_agent: Werkzeug
-    werkzeug.utils: Werkzeug
-    werkzeug.wrappers: Werkzeug
-    werkzeug.wrappers.request: Werkzeug
-    werkzeug.wrappers.response: Werkzeug
-    werkzeug.wsgi: Werkzeug
     zipp: zipp
-    zipp.py310compat: zipp
   pip_repository:
     name: pip
-integrity: 4658c69530ba1ee117da0c963c9c671041e1c470d938c31cdbbfccc21dd259cb
+integrity: 19c0e03a9cf1d6bbb2dfe301325fefc59a30c3f967f84c2f1baaf915c2805da7
diff --git a/examples/build_file_generation/random_number_generator/__init__.py b/examples/build_file_generation/random_number_generator/__init__.py
index bbdfb4c..4101095 100644
--- a/examples/build_file_generation/random_number_generator/__init__.py
+++ b/examples/build_file_generation/random_number_generator/__init__.py
@@ -11,4 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
diff --git a/examples/build_file_generation/random_number_generator/__test__.py b/examples/build_file_generation/random_number_generator/__test__.py
index 8cfb235..5facfee 100644
--- a/examples/build_file_generation/random_number_generator/__test__.py
+++ b/examples/build_file_generation/random_number_generator/__test__.py
@@ -13,13 +13,16 @@
 # limitations under the License.
 
 import unittest
+
 import random_number_generator.generate_random_number as generate_random_number
 
+
 class TestRandomNumberGenerator(unittest.TestCase):
     def test_generate_random_number(self):
         number = generate_random_number.generate_random_number()
         self.assertGreaterEqual(number, 1)
         self.assertLessEqual(number, 10)
-        
-if __name__ == '__main__':
+
+
+if __name__ == "__main__":
     unittest.main()
diff --git a/examples/build_file_generation/random_number_generator/generate_random_number.py b/examples/build_file_generation/random_number_generator/generate_random_number.py
index e198b5b..d551e33 100644
--- a/examples/build_file_generation/random_number_generator/generate_random_number.py
+++ b/examples/build_file_generation/random_number_generator/generate_random_number.py
@@ -14,6 +14,7 @@
 
 import random
 
-"""Generate a random number"""
+
 def generate_random_number():
+    """Generate a random number"""
     return random.randint(1, 10)
diff --git a/examples/bzlmod/.bazelrc b/examples/bzlmod/.bazelrc
index 6f557e6..578342d 100644
--- a/examples/bzlmod/.bazelrc
+++ b/examples/bzlmod/.bazelrc
@@ -1,4 +1,4 @@
-common --experimental_enable_bzlmod
+common --enable_bzlmod
 
 coverage --java_runtime_version=remotejdk_11
 
@@ -6,5 +6,3 @@
 
 # Windows requires these for multi-python support:
 build --enable_runfiles
-
-startup --windows_enable_symlinks
diff --git a/examples/bzlmod/.bazelversion b/examples/bzlmod/.bazelversion
deleted file mode 100644
index 09b254e..0000000
--- a/examples/bzlmod/.bazelversion
+++ /dev/null
@@ -1 +0,0 @@
-6.0.0
diff --git a/examples/bzlmod/MODULE.bazel b/examples/bzlmod/MODULE.bazel
index e49b586..3da17a6 100644
--- a/examples/bzlmod/MODULE.bazel
+++ b/examples/bzlmod/MODULE.bazel
@@ -15,7 +15,7 @@
 bazel_dep(name = "rules_proto", version = "5.3.0-21.7")
 
 # (py_proto_library specific) Add the protobuf library for well-known types (e.g. `Any`, `Timestamp`, etc)
-bazel_dep(name = "protobuf", version = "21.7", repo_name = "com_google_protobuf")
+bazel_dep(name = "protobuf", version = "24.4", repo_name = "com_google_protobuf")
 
 # We next initialize the python toolchain using the extension.
 # You can set different Python versions in this block.
@@ -43,6 +43,13 @@
 # rules based on the `python_version` arg values.
 use_repo(python, "python_3_10", "python_3_9", "python_versions")
 
+# EXPERIMENTAL: This is experimental and may be removed without notice
+uv = use_extension("@rules_python//python/uv:extensions.bzl", "uv")
+uv.toolchain(uv_version = "0.2.23")
+use_repo(uv, "uv_toolchains")
+
+register_toolchains("@uv_toolchains//:all")
+
 # This extension allows a user to create modifications to how rules_python
 # creates different wheel repositories.  Different attributes allow the user
 # to modify the BUILD file, and copy files.
@@ -94,6 +101,20 @@
 # Alternatively, `python_interpreter_target` can be used to directly specify
 # the Python interpreter to run to resolve dependencies.
 pip.parse(
+    # We can use `envsubst in the above
+    envsubst = ["PIP_INDEX_URL"],
+    # Use the bazel downloader to query the simple API for downloading the sources
+    # Note, that we can use envsubst for this value.
+    experimental_index_url = "${PIP_INDEX_URL:-https://pypi.org/simple}",
+    # One can also select a particular index for a particular package.
+    # This ensures that the setup is resistant against confusion attacks.
+    # experimental_index_url_overrides = {
+    #    "my_package": "https://different-index-url.com",
+    # },
+    # Or you can specify extra indexes like with `pip`:
+    # experimental_extra_index_urls = [
+    #    "https://different-index-url.com",
+    # ],
     experimental_requirement_cycles = {
         "sphinx": [
             "sphinx",
@@ -107,14 +128,24 @@
     # You can use one of the values below to specify the target platform
     # to generate the dependency graph for.
     experimental_target_platforms = [
-        "all",
-        "linux_*",
-        "host",
+        # Specifying the target platforms explicitly
+        "cp39_linux_x86_64",
+        "cp39_linux_*",
+        "cp39_*",
     ],
     hub_name = "pip",
     python_version = "3.9",
-    requirements_lock = "//:requirements_lock_3_9.txt",
-    requirements_windows = "//:requirements_windows_3_9.txt",
+    # The requirements files for each platform that we want to support.
+    requirements_by_platform = {
+        # Default requirements file for needs to explicitly provide the platforms
+        "//:requirements_lock_3_9.txt": "linux_*,osx_*",
+        # This API allows one to specify additional platforms that the users
+        # configure the toolchains for themselves. In this example we add
+        # `windows_aarch64` to illustrate that `rules_python` won't fail to
+        # process the value, but it does not mean that this example will work
+        # on Windows ARM.
+        "//:requirements_windows_3_9.txt": "windows_x86_64,windows_aarch64",
+    },
     # These modifications were created above and we
     # are providing pip.parse with the label of the mod
     # and the name of the wheel.
@@ -137,8 +168,13 @@
     # You can use one of the values below to specify the target platform
     # to generate the dependency graph for.
     experimental_target_platforms = [
-        "all",
+        # Using host python version
         "linux_*",
+        "osx_*",
+        "windows_*",
+        # Or specifying an exact platform
+        "linux_x86_64",
+        # Or the following to get the `host` platform only
         "host",
     ],
     hub_name = "pip",
@@ -173,14 +209,3 @@
     module_name = "other_module",
     path = "other_module",
 )
-
-# =====
-# Config for testing duplicate packages in requirements
-# =====
-#
-pip.parse(
-    hub_name = "dupe_requirements",
-    python_version = "3.9",  # Must match whatever is marked is_default=True
-    requirements_lock = "//tests/dupe_requirements:requirements.txt",
-)
-use_repo(pip, "dupe_requirements")
diff --git a/examples/bzlmod/MODULE.bazel.lock b/examples/bzlmod/MODULE.bazel.lock
deleted file mode 100644
index dbdc3b1..0000000
--- a/examples/bzlmod/MODULE.bazel.lock
+++ /dev/null
@@ -1,4157 +0,0 @@
-{
-  "lockFileVersion": 3,
-  "moduleFileHash": "e369c446c373fa9ee4c990e4e4b92242b7c3c555931d533880da24aa248d8997",
-  "flags": {
-    "cmdRegistries": [
-      "https://bcr.bazel.build/"
-    ],
-    "cmdModuleOverrides": {},
-    "allowedYankedVersions": [],
-    "envVarAllowedYankedVersions": "",
-    "ignoreDevDependency": false,
-    "directDependenciesMode": "WARNING",
-    "compatibilityMode": "ERROR"
-  },
-  "localOverrideHashes": {
-    "other_module": "a923862b93886a355d86edd0b07294b418337deafb325ca55b2b20ace6ab48d3",
-    "rules_python": "9c72d3982a66312a41d6e5e0480248e28ad81ffd240675f06b38775f2759d027",
-    "bazel_tools": "922ea6752dc9105de5af957f7a99a6933c0a6a712d23df6aad16a9c399f7e787"
-  },
-  "moduleDepGraph": {
-    "<root>": {
-      "name": "example_bzlmod",
-      "version": "0.0.0",
-      "key": "<root>",
-      "repoName": "example_bzlmod",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@rules_python//python/extensions:python.bzl",
-          "extensionName": "python",
-          "usingModule": "<root>",
-          "location": {
-            "file": "@@//:MODULE.bazel",
-            "line": 16,
-            "column": 23
-          },
-          "imports": {
-            "python_3_10": "python_3_10",
-            "python_3_9": "python_3_9",
-            "python_versions": "python_versions"
-          },
-          "devImports": [],
-          "tags": [
-            {
-              "tagName": "toolchain",
-              "attributeValues": {
-                "configure_coverage_tool": true,
-                "is_default": true,
-                "python_version": "3.9"
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@//:MODULE.bazel",
-                "line": 17,
-                "column": 17
-              }
-            },
-            {
-              "tagName": "toolchain",
-              "attributeValues": {
-                "configure_coverage_tool": true,
-                "python_version": "3.10"
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@//:MODULE.bazel",
-                "line": 29,
-                "column": 17
-              }
-            }
-          ],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        },
-        {
-          "extensionBzlFile": "@rules_python//python/extensions:pip.bzl",
-          "extensionName": "pip",
-          "usingModule": "<root>",
-          "location": {
-            "file": "@@//:MODULE.bazel",
-            "line": 47,
-            "column": 20
-          },
-          "imports": {
-            "whl_mods_hub": "whl_mods_hub",
-            "pip": "pip"
-          },
-          "devImports": [],
-          "tags": [
-            {
-              "tagName": "whl_mods",
-              "attributeValues": {
-                "additive_build_content_file": "//whl_mods:appended_build_content.BUILD",
-                "data": [
-                  ":generated_file"
-                ],
-                "hub_name": "whl_mods_hub",
-                "whl_name": "requests"
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@//:MODULE.bazel",
-                "line": 50,
-                "column": 13
-              }
-            },
-            {
-              "tagName": "whl_mods",
-              "attributeValues": {
-                "additive_build_content": "load(\"@bazel_skylib//rules:write_file.bzl\", \"write_file\")\nwrite_file(\n    name = \"generated_file\",\n    out = \"generated_file.txt\",\n    content = [\"Hello world from build content file\"],\n)\n",
-                "copy_executables": {
-                  "'@@//whl_mods:data/copy_executable.py'": "copied_content/executable.py"
-                },
-                "copy_files": {
-                  "'@@//whl_mods:data/copy_file.txt'": "copied_content/file.txt"
-                },
-                "data": [
-                  ":generated_file"
-                ],
-                "data_exclude_glob": [
-                  "site-packages/*.dist-info/WHEEL"
-                ],
-                "hub_name": "whl_mods_hub",
-                "whl_name": "wheel"
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@//:MODULE.bazel",
-                "line": 69,
-                "column": 13
-              }
-            },
-            {
-              "tagName": "parse",
-              "attributeValues": {
-                "hub_name": "pip",
-                "python_version": "3.9",
-                "requirements_lock": "//:requirements_lock_3_9.txt",
-                "requirements_windows": "//:requirements_windows_3_9.txt",
-                "whl_modifications": {
-                  "@whl_mods_hub//:requests.json": "requests",
-                  "@whl_mods_hub//:wheel.json": "wheel"
-                }
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@//:MODULE.bazel",
-                "line": 90,
-                "column": 10
-              }
-            },
-            {
-              "tagName": "parse",
-              "attributeValues": {
-                "hub_name": "pip",
-                "python_version": "3.10",
-                "requirements_lock": "//:requirements_lock_3_10.txt",
-                "requirements_windows": "//:requirements_windows_3_10.txt",
-                "whl_modifications": {
-                  "@whl_mods_hub//:requests.json": "requests",
-                  "@whl_mods_hub//:wheel.json": "wheel"
-                }
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@//:MODULE.bazel",
-                "line": 103,
-                "column": 10
-              }
-            }
-          ],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "bazel_skylib": "[email protected]",
-        "rules_python": "rules_python@_",
-        "our_other_module": "other_module@_",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      }
-    },
-    "[email protected]": {
-      "name": "bazel_skylib",
-      "version": "1.4.1",
-      "key": "[email protected]",
-      "repoName": "bazel_skylib",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [
-        "//toolchains/unittest:cmd_toolchain",
-        "//toolchains/unittest:bash_toolchain"
-      ],
-      "extensionUsages": [],
-      "deps": {
-        "platforms": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "bazel_skylib~1.4.1",
-          "urls": [
-            "https://github.com/bazelbuild/bazel-skylib/releases/download/1.4.1/bazel-skylib-1.4.1.tar.gz"
-          ],
-          "integrity": "sha256-uKFSeQF3QYCvx5iusoxGNL3M8ZxNmOe90c550f6aqtc=",
-          "strip_prefix": "",
-          "remote_patches": {},
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "rules_python@_": {
-      "name": "rules_python",
-      "version": "0.0.0",
-      "key": "rules_python@_",
-      "repoName": "rules_python",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [
-        "@pythons_hub//:all"
-      ],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@rules_python//python/extensions/private:internal_deps.bzl",
-          "extensionName": "internal_deps",
-          "usingModule": "rules_python@_",
-          "location": {
-            "file": "@@rules_python~override//:MODULE.bazel",
-            "line": 15,
-            "column": 30
-          },
-          "imports": {
-            "rules_python_internal": "rules_python_internal",
-            "pypi__build": "pypi__build",
-            "pypi__click": "pypi__click",
-            "pypi__colorama": "pypi__colorama",
-            "pypi__importlib_metadata": "pypi__importlib_metadata",
-            "pypi__installer": "pypi__installer",
-            "pypi__more_itertools": "pypi__more_itertools",
-            "pypi__packaging": "pypi__packaging",
-            "pypi__pep517": "pypi__pep517",
-            "pypi__pip": "pypi__pip",
-            "pypi__pip_tools": "pypi__pip_tools",
-            "pypi__pyproject_hooks": "pypi__pyproject_hooks",
-            "pypi__setuptools": "pypi__setuptools",
-            "pypi__tomli": "pypi__tomli",
-            "pypi__wheel": "pypi__wheel",
-            "pypi__zipp": "pypi__zipp"
-          },
-          "devImports": [],
-          "tags": [
-            {
-              "tagName": "install",
-              "attributeValues": {},
-              "devDependency": false,
-              "location": {
-                "file": "@@rules_python~override//:MODULE.bazel",
-                "line": 16,
-                "column": 22
-              }
-            }
-          ],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        },
-        {
-          "extensionBzlFile": "@rules_python//python/extensions:python.bzl",
-          "extensionName": "python",
-          "usingModule": "rules_python@_",
-          "location": {
-            "file": "@@rules_python~override//:MODULE.bazel",
-            "line": 41,
-            "column": 23
-          },
-          "imports": {
-            "pythons_hub": "pythons_hub"
-          },
-          "devImports": [],
-          "tags": [
-            {
-              "tagName": "toolchain",
-              "attributeValues": {
-                "is_default": true,
-                "python_version": "3.11"
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@rules_python~override//:MODULE.bazel",
-                "line": 47,
-                "column": 17
-              }
-            }
-          ],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "bazel_features": "[email protected]",
-        "bazel_skylib": "[email protected]",
-        "platforms": "[email protected]",
-        "rules_proto": "[email protected]",
-        "com_google_protobuf": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      }
-    },
-    "other_module@_": {
-      "name": "other_module",
-      "version": "",
-      "key": "other_module@_",
-      "repoName": "other_module",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@rules_python//python/extensions:python.bzl",
-          "extensionName": "python",
-          "usingModule": "other_module@_",
-          "location": {
-            "file": "@@other_module~override//:MODULE.bazel",
-            "line": 27,
-            "column": 23
-          },
-          "imports": {
-            "python_versions": "python_versions",
-            "python_3_9": "python_3_9",
-            "python_3_11": "python_3_11"
-          },
-          "devImports": [],
-          "tags": [
-            {
-              "tagName": "toolchain",
-              "attributeValues": {
-                "configure_coverage_tool": true,
-                "python_version": "3.9"
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@other_module~override//:MODULE.bazel",
-                "line": 28,
-                "column": 17
-              }
-            },
-            {
-              "tagName": "toolchain",
-              "attributeValues": {
-                "configure_coverage_tool": true,
-                "is_default": true,
-                "python_version": "3.11"
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@other_module~override//:MODULE.bazel",
-                "line": 32,
-                "column": 17
-              }
-            }
-          ],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        },
-        {
-          "extensionBzlFile": "@rules_python//python/extensions:pip.bzl",
-          "extensionName": "pip",
-          "usingModule": "other_module@_",
-          "location": {
-            "file": "@@other_module~override//:MODULE.bazel",
-            "line": 47,
-            "column": 20
-          },
-          "imports": {
-            "other_module_pip": "other_module_pip"
-          },
-          "devImports": [],
-          "tags": [
-            {
-              "tagName": "parse",
-              "attributeValues": {
-                "hub_name": "other_module_pip",
-                "python_version": "3.11",
-                "requirements_lock": ":requirements_lock_3_11.txt"
-              },
-              "devDependency": false,
-              "location": {
-                "file": "@@other_module~override//:MODULE.bazel",
-                "line": 48,
-                "column": 10
-              }
-            }
-          ],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "rules_python": "rules_python@_",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      }
-    },
-    "bazel_tools@_": {
-      "name": "bazel_tools",
-      "version": "",
-      "key": "bazel_tools@_",
-      "repoName": "bazel_tools",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [
-        "@local_config_cc_toolchains//:all",
-        "@local_config_sh//:local_sh_toolchain"
-      ],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@bazel_tools//tools/cpp:cc_configure.bzl",
-          "extensionName": "cc_configure_extension",
-          "usingModule": "bazel_tools@_",
-          "location": {
-            "file": "@@bazel_tools//:MODULE.bazel",
-            "line": 17,
-            "column": 29
-          },
-          "imports": {
-            "local_config_cc": "local_config_cc",
-            "local_config_cc_toolchains": "local_config_cc_toolchains"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        },
-        {
-          "extensionBzlFile": "@bazel_tools//tools/osx:xcode_configure.bzl",
-          "extensionName": "xcode_configure_extension",
-          "usingModule": "bazel_tools@_",
-          "location": {
-            "file": "@@bazel_tools//:MODULE.bazel",
-            "line": 21,
-            "column": 32
-          },
-          "imports": {
-            "local_config_xcode": "local_config_xcode"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        },
-        {
-          "extensionBzlFile": "@rules_java//java:extensions.bzl",
-          "extensionName": "toolchains",
-          "usingModule": "bazel_tools@_",
-          "location": {
-            "file": "@@bazel_tools//:MODULE.bazel",
-            "line": 24,
-            "column": 32
-          },
-          "imports": {
-            "local_jdk": "local_jdk",
-            "remote_java_tools": "remote_java_tools",
-            "remote_java_tools_linux": "remote_java_tools_linux",
-            "remote_java_tools_windows": "remote_java_tools_windows",
-            "remote_java_tools_darwin_x86_64": "remote_java_tools_darwin_x86_64",
-            "remote_java_tools_darwin_arm64": "remote_java_tools_darwin_arm64"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        },
-        {
-          "extensionBzlFile": "@bazel_tools//tools/sh:sh_configure.bzl",
-          "extensionName": "sh_configure_extension",
-          "usingModule": "bazel_tools@_",
-          "location": {
-            "file": "@@bazel_tools//:MODULE.bazel",
-            "line": 35,
-            "column": 39
-          },
-          "imports": {
-            "local_config_sh": "local_config_sh"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        },
-        {
-          "extensionBzlFile": "@bazel_tools//tools/test:extensions.bzl",
-          "extensionName": "remote_coverage_tools_extension",
-          "usingModule": "bazel_tools@_",
-          "location": {
-            "file": "@@bazel_tools//:MODULE.bazel",
-            "line": 39,
-            "column": 48
-          },
-          "imports": {
-            "remote_coverage_tools": "remote_coverage_tools"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        },
-        {
-          "extensionBzlFile": "@bazel_tools//tools/android:android_extensions.bzl",
-          "extensionName": "remote_android_tools_extensions",
-          "usingModule": "bazel_tools@_",
-          "location": {
-            "file": "@@bazel_tools//:MODULE.bazel",
-            "line": 42,
-            "column": 42
-          },
-          "imports": {
-            "android_gmaven_r8": "android_gmaven_r8",
-            "android_tools": "android_tools"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "rules_cc": "[email protected]",
-        "rules_java": "[email protected]",
-        "rules_license": "[email protected]",
-        "rules_proto": "[email protected]",
-        "rules_python": "rules_python@_",
-        "platforms": "[email protected]",
-        "com_google_protobuf": "[email protected]",
-        "zlib": "[email protected]",
-        "build_bazel_apple_support": "[email protected]",
-        "local_config_platform": "local_config_platform@_"
-      }
-    },
-    "local_config_platform@_": {
-      "name": "local_config_platform",
-      "version": "",
-      "key": "local_config_platform@_",
-      "repoName": "local_config_platform",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "platforms": "[email protected]",
-        "bazel_tools": "bazel_tools@_"
-      }
-    },
-    "[email protected]": {
-      "name": "platforms",
-      "version": "0.0.7",
-      "key": "[email protected]",
-      "repoName": "platforms",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "rules_license": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "platforms",
-          "urls": [
-            "https://github.com/bazelbuild/platforms/releases/download/0.0.7/platforms-0.0.7.tar.gz"
-          ],
-          "integrity": "sha256-OlYcmee9vpFzqmU/1Xn+hJ8djWc5V4CrR3Cx84FDHVE=",
-          "strip_prefix": "",
-          "remote_patches": {},
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "bazel_features",
-      "version": "1.1.0",
-      "key": "[email protected]",
-      "repoName": "bazel_features",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@bazel_features//private:extensions.bzl",
-          "extensionName": "version_extension",
-          "usingModule": "[email protected]",
-          "location": {
-            "file": "https://bcr.bazel.build/modules/bazel_features/1.1.0/MODULE.bazel",
-            "line": 6,
-            "column": 24
-          },
-          "imports": {
-            "bazel_features_globals": "bazel_features_globals",
-            "bazel_features_version": "bazel_features_version"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "bazel_features~1.1.0",
-          "urls": [
-            "https://github.com/bazel-contrib/bazel_features/releases/download/v1.1.0/bazel_features-v1.1.0.tar.gz"
-          ],
-          "integrity": "sha256-4hD6q1dkP7Z1Lwt/DRIJdqKZ1dqe0g4gEp7hE0o8/Hw=",
-          "strip_prefix": "bazel_features-1.1.0",
-          "remote_patches": {
-            "https://bcr.bazel.build/modules/bazel_features/1.1.0/patches/module_dot_bazel_version.patch": "sha256-o16WYfVZruIX5FGE8sATXKb9PLRpH26dbAVdbKPKVRk="
-          },
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "rules_proto",
-      "version": "5.3.0-21.7",
-      "key": "[email protected]",
-      "repoName": "rules_proto",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "bazel_skylib": "[email protected]",
-        "com_google_protobuf": "[email protected]",
-        "rules_cc": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "rules_proto~5.3.0-21.7",
-          "urls": [
-            "https://github.com/bazelbuild/rules_proto/archive/refs/tags/5.3.0-21.7.tar.gz"
-          ],
-          "integrity": "sha256-3D+yBqLLNEG0heseQjFlsjEjWh6psDG0Qzz3vB+kYN0=",
-          "strip_prefix": "rules_proto-5.3.0-21.7",
-          "remote_patches": {},
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "protobuf",
-      "version": "21.7",
-      "key": "[email protected]",
-      "repoName": "protobuf",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@rules_jvm_external//:extensions.bzl",
-          "extensionName": "maven",
-          "usingModule": "[email protected]",
-          "location": {
-            "file": "https://bcr.bazel.build/modules/protobuf/21.7/MODULE.bazel",
-            "line": 22,
-            "column": 22
-          },
-          "imports": {
-            "maven": "maven"
-          },
-          "devImports": [],
-          "tags": [
-            {
-              "tagName": "install",
-              "attributeValues": {
-                "name": "maven",
-                "artifacts": [
-                  "com.google.code.findbugs:jsr305:3.0.2",
-                  "com.google.code.gson:gson:2.8.9",
-                  "com.google.errorprone:error_prone_annotations:2.3.2",
-                  "com.google.j2objc:j2objc-annotations:1.3",
-                  "com.google.guava:guava:31.1-jre",
-                  "com.google.guava:guava-testlib:31.1-jre",
-                  "com.google.truth:truth:1.1.2",
-                  "junit:junit:4.13.2",
-                  "org.mockito:mockito-core:4.3.1"
-                ]
-              },
-              "devDependency": false,
-              "location": {
-                "file": "https://bcr.bazel.build/modules/protobuf/21.7/MODULE.bazel",
-                "line": 24,
-                "column": 14
-              }
-            }
-          ],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "bazel_skylib": "[email protected]",
-        "rules_python": "rules_python@_",
-        "rules_cc": "[email protected]",
-        "rules_proto": "[email protected]",
-        "rules_java": "[email protected]",
-        "rules_pkg": "[email protected]",
-        "com_google_abseil": "[email protected]",
-        "zlib": "[email protected]",
-        "upb": "[email protected]",
-        "rules_jvm_external": "[email protected]",
-        "com_google_googletest": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "protobuf~21.7",
-          "urls": [
-            "https://github.com/protocolbuffers/protobuf/releases/download/v21.7/protobuf-all-21.7.zip"
-          ],
-          "integrity": "sha256-VJOiH17T/FAuZv7GuUScBqVRztYwAvpIkDxA36jeeko=",
-          "strip_prefix": "protobuf-21.7",
-          "remote_patches": {
-            "https://bcr.bazel.build/modules/protobuf/21.7/patches/add_module_dot_bazel.patch": "sha256-q3V2+eq0v2XF0z8z+V+QF4cynD6JvHI1y3kI/+rzl5s=",
-            "https://bcr.bazel.build/modules/protobuf/21.7/patches/add_module_dot_bazel_for_examples.patch": "sha256-O7YP6s3lo/1opUiO0jqXYORNHdZ/2q3hjz1QGy8QdIU=",
-            "https://bcr.bazel.build/modules/protobuf/21.7/patches/relative_repo_names.patch": "sha256-RK9RjW8T5UJNG7flIrnFiNE9vKwWB+8uWWtJqXYT0w4=",
-            "https://bcr.bazel.build/modules/protobuf/21.7/patches/add_missing_files.patch": "sha256-Hyne4DG2u5bXcWHNxNMirA2QFAe/2Cl8oMm1XJdkQIY="
-          },
-          "remote_patch_strip": 1
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "rules_cc",
-      "version": "0.0.9",
-      "key": "[email protected]",
-      "repoName": "rules_cc",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [
-        "@local_config_cc_toolchains//:all"
-      ],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@bazel_tools//tools/cpp:cc_configure.bzl",
-          "extensionName": "cc_configure_extension",
-          "usingModule": "[email protected]",
-          "location": {
-            "file": "https://bcr.bazel.build/modules/rules_cc/0.0.9/MODULE.bazel",
-            "line": 9,
-            "column": 29
-          },
-          "imports": {
-            "local_config_cc_toolchains": "local_config_cc_toolchains"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "platforms": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "rules_cc~0.0.9",
-          "urls": [
-            "https://github.com/bazelbuild/rules_cc/releases/download/0.0.9/rules_cc-0.0.9.tar.gz"
-          ],
-          "integrity": "sha256-IDeHW5pEVtzkp50RKorohbvEqtlo5lh9ym5k86CQDN8=",
-          "strip_prefix": "rules_cc-0.0.9",
-          "remote_patches": {
-            "https://bcr.bazel.build/modules/rules_cc/0.0.9/patches/module_dot_bazel_version.patch": "sha256-mM+qzOI0SgAdaJBlWOSMwMPKpaA9b7R37Hj/tp5bb4g="
-          },
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "rules_java",
-      "version": "7.1.0",
-      "key": "[email protected]",
-      "repoName": "rules_java",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [
-        "//toolchains:all",
-        "@local_jdk//:runtime_toolchain_definition",
-        "@local_jdk//:bootstrap_runtime_toolchain_definition",
-        "@remotejdk11_linux_toolchain_config_repo//:all",
-        "@remotejdk11_linux_aarch64_toolchain_config_repo//:all",
-        "@remotejdk11_linux_ppc64le_toolchain_config_repo//:all",
-        "@remotejdk11_linux_s390x_toolchain_config_repo//:all",
-        "@remotejdk11_macos_toolchain_config_repo//:all",
-        "@remotejdk11_macos_aarch64_toolchain_config_repo//:all",
-        "@remotejdk11_win_toolchain_config_repo//:all",
-        "@remotejdk11_win_arm64_toolchain_config_repo//:all",
-        "@remotejdk17_linux_toolchain_config_repo//:all",
-        "@remotejdk17_linux_aarch64_toolchain_config_repo//:all",
-        "@remotejdk17_linux_ppc64le_toolchain_config_repo//:all",
-        "@remotejdk17_linux_s390x_toolchain_config_repo//:all",
-        "@remotejdk17_macos_toolchain_config_repo//:all",
-        "@remotejdk17_macos_aarch64_toolchain_config_repo//:all",
-        "@remotejdk17_win_toolchain_config_repo//:all",
-        "@remotejdk17_win_arm64_toolchain_config_repo//:all",
-        "@remotejdk21_linux_toolchain_config_repo//:all",
-        "@remotejdk21_linux_aarch64_toolchain_config_repo//:all",
-        "@remotejdk21_macos_toolchain_config_repo//:all",
-        "@remotejdk21_macos_aarch64_toolchain_config_repo//:all",
-        "@remotejdk21_win_toolchain_config_repo//:all"
-      ],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@rules_java//java:extensions.bzl",
-          "extensionName": "toolchains",
-          "usingModule": "[email protected]",
-          "location": {
-            "file": "https://bcr.bazel.build/modules/rules_java/7.1.0/MODULE.bazel",
-            "line": 19,
-            "column": 27
-          },
-          "imports": {
-            "remote_java_tools": "remote_java_tools",
-            "remote_java_tools_linux": "remote_java_tools_linux",
-            "remote_java_tools_windows": "remote_java_tools_windows",
-            "remote_java_tools_darwin_x86_64": "remote_java_tools_darwin_x86_64",
-            "remote_java_tools_darwin_arm64": "remote_java_tools_darwin_arm64",
-            "local_jdk": "local_jdk",
-            "remotejdk11_linux_toolchain_config_repo": "remotejdk11_linux_toolchain_config_repo",
-            "remotejdk11_linux_aarch64_toolchain_config_repo": "remotejdk11_linux_aarch64_toolchain_config_repo",
-            "remotejdk11_linux_ppc64le_toolchain_config_repo": "remotejdk11_linux_ppc64le_toolchain_config_repo",
-            "remotejdk11_linux_s390x_toolchain_config_repo": "remotejdk11_linux_s390x_toolchain_config_repo",
-            "remotejdk11_macos_toolchain_config_repo": "remotejdk11_macos_toolchain_config_repo",
-            "remotejdk11_macos_aarch64_toolchain_config_repo": "remotejdk11_macos_aarch64_toolchain_config_repo",
-            "remotejdk11_win_toolchain_config_repo": "remotejdk11_win_toolchain_config_repo",
-            "remotejdk11_win_arm64_toolchain_config_repo": "remotejdk11_win_arm64_toolchain_config_repo",
-            "remotejdk17_linux_toolchain_config_repo": "remotejdk17_linux_toolchain_config_repo",
-            "remotejdk17_linux_aarch64_toolchain_config_repo": "remotejdk17_linux_aarch64_toolchain_config_repo",
-            "remotejdk17_linux_ppc64le_toolchain_config_repo": "remotejdk17_linux_ppc64le_toolchain_config_repo",
-            "remotejdk17_linux_s390x_toolchain_config_repo": "remotejdk17_linux_s390x_toolchain_config_repo",
-            "remotejdk17_macos_toolchain_config_repo": "remotejdk17_macos_toolchain_config_repo",
-            "remotejdk17_macos_aarch64_toolchain_config_repo": "remotejdk17_macos_aarch64_toolchain_config_repo",
-            "remotejdk17_win_toolchain_config_repo": "remotejdk17_win_toolchain_config_repo",
-            "remotejdk17_win_arm64_toolchain_config_repo": "remotejdk17_win_arm64_toolchain_config_repo",
-            "remotejdk21_linux_toolchain_config_repo": "remotejdk21_linux_toolchain_config_repo",
-            "remotejdk21_linux_aarch64_toolchain_config_repo": "remotejdk21_linux_aarch64_toolchain_config_repo",
-            "remotejdk21_macos_toolchain_config_repo": "remotejdk21_macos_toolchain_config_repo",
-            "remotejdk21_macos_aarch64_toolchain_config_repo": "remotejdk21_macos_aarch64_toolchain_config_repo",
-            "remotejdk21_win_toolchain_config_repo": "remotejdk21_win_toolchain_config_repo"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "platforms": "[email protected]",
-        "rules_cc": "[email protected]",
-        "bazel_skylib": "[email protected]",
-        "rules_proto": "[email protected]",
-        "rules_license": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "rules_java~7.1.0",
-          "urls": [
-            "https://github.com/bazelbuild/rules_java/releases/download/7.1.0/rules_java-7.1.0.tar.gz"
-          ],
-          "integrity": "sha256-o3pOX2OrgnFuXdau75iO2EYcegC46TYnImKJn1h81OE=",
-          "strip_prefix": "",
-          "remote_patches": {},
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "rules_license",
-      "version": "0.0.7",
-      "key": "[email protected]",
-      "repoName": "rules_license",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "rules_license~0.0.7",
-          "urls": [
-            "https://github.com/bazelbuild/rules_license/releases/download/0.0.7/rules_license-0.0.7.tar.gz"
-          ],
-          "integrity": "sha256-RTHezLkTY5ww5cdRKgVNXYdWmNrrddjPkPKEN1/nw2A=",
-          "strip_prefix": "",
-          "remote_patches": {},
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "zlib",
-      "version": "1.3",
-      "key": "[email protected]",
-      "repoName": "zlib",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "platforms": "[email protected]",
-        "rules_cc": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "zlib~1.3",
-          "urls": [
-            "https://github.com/madler/zlib/releases/download/v1.3/zlib-1.3.tar.gz"
-          ],
-          "integrity": "sha256-/wukwpIBPbwnUws6geH5qBPNOd4Byl4Pi/NVcC76WT4=",
-          "strip_prefix": "zlib-1.3",
-          "remote_patches": {
-            "https://bcr.bazel.build/modules/zlib/1.3/patches/add_build_file.patch": "sha256-Ei+FYaaOo7A3jTKunMEodTI0Uw5NXQyZEcboMC8JskY=",
-            "https://bcr.bazel.build/modules/zlib/1.3/patches/module_dot_bazel.patch": "sha256-fPWLM+2xaF/kuy+kZc1YTfW6hNjrkG400Ho7gckuyJk="
-          },
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "apple_support",
-      "version": "1.5.0",
-      "key": "[email protected]",
-      "repoName": "build_bazel_apple_support",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [
-        "@local_config_apple_cc_toolchains//:all"
-      ],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@build_bazel_apple_support//crosstool:setup.bzl",
-          "extensionName": "apple_cc_configure_extension",
-          "usingModule": "[email protected]",
-          "location": {
-            "file": "https://bcr.bazel.build/modules/apple_support/1.5.0/MODULE.bazel",
-            "line": 17,
-            "column": 35
-          },
-          "imports": {
-            "local_config_apple_cc": "local_config_apple_cc",
-            "local_config_apple_cc_toolchains": "local_config_apple_cc_toolchains"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "bazel_skylib": "[email protected]",
-        "platforms": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "apple_support~1.5.0",
-          "urls": [
-            "https://github.com/bazelbuild/apple_support/releases/download/1.5.0/apple_support.1.5.0.tar.gz"
-          ],
-          "integrity": "sha256-miM41vja0yRPgj8txghKA+TQ+7J8qJLclw5okNW0gYQ=",
-          "strip_prefix": "",
-          "remote_patches": {},
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "rules_pkg",
-      "version": "0.7.0",
-      "key": "[email protected]",
-      "repoName": "rules_pkg",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "rules_python": "rules_python@_",
-        "bazel_skylib": "[email protected]",
-        "rules_license": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "rules_pkg~0.7.0",
-          "urls": [
-            "https://github.com/bazelbuild/rules_pkg/releases/download/0.7.0/rules_pkg-0.7.0.tar.gz"
-          ],
-          "integrity": "sha256-iimOgydi7aGDBZfWT+fbWBeKqEzVkm121bdE1lWJQcI=",
-          "strip_prefix": "",
-          "remote_patches": {
-            "https://bcr.bazel.build/modules/rules_pkg/0.7.0/patches/module_dot_bazel.patch": "sha256-4OaEPZwYF6iC71ZTDg6MJ7LLqX7ZA0/kK4mT+4xKqiE="
-          },
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "abseil-cpp",
-      "version": "20211102.0",
-      "key": "[email protected]",
-      "repoName": "abseil-cpp",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "rules_cc": "[email protected]",
-        "platforms": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "abseil-cpp~20211102.0",
-          "urls": [
-            "https://github.com/abseil/abseil-cpp/archive/refs/tags/20211102.0.tar.gz"
-          ],
-          "integrity": "sha256-3PcbnLqNwMqZQMSzFqDHlr6Pq0KwcLtrfKtitI8OZsQ=",
-          "strip_prefix": "abseil-cpp-20211102.0",
-          "remote_patches": {
-            "https://bcr.bazel.build/modules/abseil-cpp/20211102.0/patches/module_dot_bazel.patch": "sha256-4izqopgGCey4jVZzl/w3M2GVPNohjh2B5TmbThZNvPY="
-          },
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "upb",
-      "version": "0.0.0-20220923-a547704",
-      "key": "[email protected]",
-      "repoName": "upb",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "bazel_skylib": "[email protected]",
-        "rules_proto": "[email protected]",
-        "com_google_protobuf": "[email protected]",
-        "com_google_absl": "[email protected]",
-        "platforms": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "upb~0.0.0-20220923-a547704",
-          "urls": [
-            "https://github.com/protocolbuffers/upb/archive/a5477045acaa34586420942098f5fecd3570f577.tar.gz"
-          ],
-          "integrity": "sha256-z39x6v+QskwaKLSWRan/A6mmwecTQpHOcJActj5zZLU=",
-          "strip_prefix": "upb-a5477045acaa34586420942098f5fecd3570f577",
-          "remote_patches": {
-            "https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/patches/module_dot_bazel.patch": "sha256-wH4mNS6ZYy+8uC0HoAft/c7SDsq2Kxf+J8dUakXhaB0="
-          },
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "rules_jvm_external",
-      "version": "4.4.2",
-      "key": "[email protected]",
-      "repoName": "rules_jvm_external",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [
-        {
-          "extensionBzlFile": "@rules_jvm_external//:non-module-deps.bzl",
-          "extensionName": "non_module_deps",
-          "usingModule": "[email protected]",
-          "location": {
-            "file": "https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/MODULE.bazel",
-            "line": 9,
-            "column": 32
-          },
-          "imports": {
-            "io_bazel_rules_kotlin": "io_bazel_rules_kotlin"
-          },
-          "devImports": [],
-          "tags": [],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        },
-        {
-          "extensionBzlFile": ":extensions.bzl",
-          "extensionName": "maven",
-          "usingModule": "[email protected]",
-          "location": {
-            "file": "https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/MODULE.bazel",
-            "line": 16,
-            "column": 22
-          },
-          "imports": {
-            "rules_jvm_external_deps": "rules_jvm_external_deps"
-          },
-          "devImports": [],
-          "tags": [
-            {
-              "tagName": "install",
-              "attributeValues": {
-                "name": "rules_jvm_external_deps",
-                "artifacts": [
-                  "com.google.cloud:google-cloud-core:1.93.10",
-                  "com.google.cloud:google-cloud-storage:1.113.4",
-                  "com.google.code.gson:gson:2.9.0",
-                  "org.apache.maven:maven-artifact:3.8.6",
-                  "software.amazon.awssdk:s3:2.17.183"
-                ],
-                "lock_file": "@rules_jvm_external//:rules_jvm_external_deps_install.json"
-              },
-              "devDependency": false,
-              "location": {
-                "file": "https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/MODULE.bazel",
-                "line": 18,
-                "column": 14
-              }
-            }
-          ],
-          "hasDevUseExtension": false,
-          "hasNonDevUseExtension": true
-        }
-      ],
-      "deps": {
-        "bazel_skylib": "[email protected]",
-        "io_bazel_stardoc": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "rules_jvm_external~4.4.2",
-          "urls": [
-            "https://github.com/bazelbuild/rules_jvm_external/archive/refs/tags/4.4.2.zip"
-          ],
-          "integrity": "sha256-c1YC9QgT6y6pPKP15DsZWb2AshO4NqB6YqKddXZwt3s=",
-          "strip_prefix": "rules_jvm_external-4.4.2",
-          "remote_patches": {},
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "googletest",
-      "version": "1.11.0",
-      "key": "[email protected]",
-      "repoName": "googletest",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "com_google_absl": "[email protected]",
-        "platforms": "[email protected]",
-        "rules_cc": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "googletest~1.11.0",
-          "urls": [
-            "https://github.com/google/googletest/archive/refs/tags/release-1.11.0.tar.gz"
-          ],
-          "integrity": "sha256-tIcL8SH/d5W6INILzdhie44Ijy0dqymaAxwQNO3ck9U=",
-          "strip_prefix": "googletest-release-1.11.0",
-          "remote_patches": {
-            "https://bcr.bazel.build/modules/googletest/1.11.0/patches/module_dot_bazel.patch": "sha256-HuahEdI/n8KCI071sN3CEziX+7qP/Ec77IWayYunLP0="
-          },
-          "remote_patch_strip": 0
-        }
-      }
-    },
-    "[email protected]": {
-      "name": "stardoc",
-      "version": "0.5.1",
-      "key": "[email protected]",
-      "repoName": "stardoc",
-      "executionPlatformsToRegister": [],
-      "toolchainsToRegister": [],
-      "extensionUsages": [],
-      "deps": {
-        "bazel_skylib": "[email protected]",
-        "rules_java": "[email protected]",
-        "bazel_tools": "bazel_tools@_",
-        "local_config_platform": "local_config_platform@_"
-      },
-      "repoSpec": {
-        "bzlFile": "@bazel_tools//tools/build_defs/repo:http.bzl",
-        "ruleClassName": "http_archive",
-        "attributes": {
-          "name": "stardoc~0.5.1",
-          "urls": [
-            "https://github.com/bazelbuild/stardoc/releases/download/0.5.1/stardoc-0.5.1.tar.gz"
-          ],
-          "integrity": "sha256-qoFNrgrEALurLoiB+ZFcb0fElmS/CHxAmhX5BDjSwj4=",
-          "strip_prefix": "",
-          "remote_patches": {
-            "https://bcr.bazel.build/modules/stardoc/0.5.1/patches/module_dot_bazel.patch": "sha256-UAULCuTpJE7SG0YrR9XLjMfxMRmbP+za3uW9ONZ5rjI="
-          },
-          "remote_patch_strip": 0
-        }
-      }
-    }
-  },
-  "moduleExtensions": {
-    "@@apple_support~1.5.0//crosstool:setup.bzl%apple_cc_configure_extension": {
-      "general": {
-        "bzlTransitiveDigest": "pMLFCYaRPkgXPQ8vtuNkMfiHfPmRBy6QJfnid4sWfv0=",
-        "accumulatedFileDigests": {},
-        "envVariables": {},
-        "generatedRepoSpecs": {
-          "local_config_apple_cc": {
-            "bzlFile": "@@apple_support~1.5.0//crosstool:setup.bzl",
-            "ruleClassName": "_apple_cc_autoconf",
-            "attributes": {
-              "name": "apple_support~1.5.0~apple_cc_configure_extension~local_config_apple_cc"
-            }
-          },
-          "local_config_apple_cc_toolchains": {
-            "bzlFile": "@@apple_support~1.5.0//crosstool:setup.bzl",
-            "ruleClassName": "_apple_cc_autoconf_toolchains",
-            "attributes": {
-              "name": "apple_support~1.5.0~apple_cc_configure_extension~local_config_apple_cc_toolchains"
-            }
-          }
-        }
-      }
-    },
-    "@@bazel_features~1.1.0//private:extensions.bzl%version_extension": {
-      "general": {
-        "bzlTransitiveDigest": "LKmXjK1avT44pRhO3x6Hplu1mU9qrNOaHP+/tJ0VFfE=",
-        "accumulatedFileDigests": {},
-        "envVariables": {},
-        "generatedRepoSpecs": {
-          "bazel_features_version": {
-            "bzlFile": "@@bazel_features~1.1.0//private:version_repo.bzl",
-            "ruleClassName": "version_repo",
-            "attributes": {
-              "name": "bazel_features~1.1.0~version_extension~bazel_features_version"
-            }
-          },
-          "bazel_features_globals": {
-            "bzlFile": "@@bazel_features~1.1.0//private:globals_repo.bzl",
-            "ruleClassName": "globals_repo",
-            "attributes": {
-              "name": "bazel_features~1.1.0~version_extension~bazel_features_globals",
-              "globals": {
-                "RunEnvironmentInfo": "5.3.0",
-                "DefaultInfo": "0.0.1",
-                "__TestingOnly_NeverAvailable": "1000000000.0.0"
-              }
-            }
-          }
-        }
-      }
-    },
-    "@@bazel_tools//tools/cpp:cc_configure.bzl%cc_configure_extension": {
-      "general": {
-        "bzlTransitiveDigest": "O9sf6ilKWU9Veed02jG9o2HM/xgV/UAyciuFBuxrFRY=",
-        "accumulatedFileDigests": {},
-        "envVariables": {},
-        "generatedRepoSpecs": {
-          "local_config_cc": {
-            "bzlFile": "@@bazel_tools//tools/cpp:cc_configure.bzl",
-            "ruleClassName": "cc_autoconf",
-            "attributes": {
-              "name": "bazel_tools~cc_configure_extension~local_config_cc"
-            }
-          },
-          "local_config_cc_toolchains": {
-            "bzlFile": "@@bazel_tools//tools/cpp:cc_configure.bzl",
-            "ruleClassName": "cc_autoconf_toolchains",
-            "attributes": {
-              "name": "bazel_tools~cc_configure_extension~local_config_cc_toolchains"
-            }
-          }
-        }
-      }
-    },
-    "@@bazel_tools//tools/osx:xcode_configure.bzl%xcode_configure_extension": {
-      "general": {
-        "bzlTransitiveDigest": "Qh2bWTU6QW6wkrd87qrU4YeY+SG37Nvw3A0PR4Y0L2Y=",
-        "accumulatedFileDigests": {},
-        "envVariables": {},
-        "generatedRepoSpecs": {
-          "local_config_xcode": {
-            "bzlFile": "@@bazel_tools//tools/osx:xcode_configure.bzl",
-            "ruleClassName": "xcode_autoconf",
-            "attributes": {
-              "name": "bazel_tools~xcode_configure_extension~local_config_xcode",
-              "xcode_locator": "@bazel_tools//tools/osx:xcode_locator.m",
-              "remote_xcode": ""
-            }
-          }
-        }
-      }
-    },
-    "@@bazel_tools//tools/sh:sh_configure.bzl%sh_configure_extension": {
-      "general": {
-        "bzlTransitiveDigest": "hp4NgmNjEg5+xgvzfh6L83bt9/aiiWETuNpwNuF1MSU=",
-        "accumulatedFileDigests": {},
-        "envVariables": {},
-        "generatedRepoSpecs": {
-          "local_config_sh": {
-            "bzlFile": "@@bazel_tools//tools/sh:sh_configure.bzl",
-            "ruleClassName": "sh_config",
-            "attributes": {
-              "name": "bazel_tools~sh_configure_extension~local_config_sh"
-            }
-          }
-        }
-      }
-    },
-    "@@rules_java~7.1.0//java:extensions.bzl%toolchains": {
-      "general": {
-        "bzlTransitiveDigest": "iUIRqCK7tkhvcDJCAfPPqSd06IHG0a8HQD0xeQyVAqw=",
-        "accumulatedFileDigests": {},
-        "envVariables": {},
-        "generatedRepoSpecs": {
-          "remotejdk21_linux_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_linux_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_21\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"21\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_linux//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_linux//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk17_linux_s390x_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_linux_s390x_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_17\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"17\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:s390x\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_linux_s390x//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:s390x\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_linux_s390x//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk17_macos_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_macos_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_17\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"17\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_macos//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_macos//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk21_macos_aarch64_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_macos_aarch64_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_21\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"21\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_macos_aarch64//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_macos_aarch64//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk17_linux_aarch64_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_linux_aarch64_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_17\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"17\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_linux_aarch64//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_linux_aarch64//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk21_macos_aarch64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_macos_aarch64",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 21,\n)\n",
-              "sha256": "2a7a99a3ea263dbd8d32a67d1e6e363ba8b25c645c826f5e167a02bbafaff1fa",
-              "strip_prefix": "zulu21.28.85-ca-jdk21.0.0-macosx_aarch64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-macosx_aarch64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-macosx_aarch64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk17_linux_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_linux_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_17\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"17\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_linux//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_linux//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk17_macos_aarch64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_macos_aarch64",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 17,\n)\n",
-              "sha256": "314b04568ec0ae9b36ba03c9cbd42adc9e1265f74678923b19297d66eb84dcca",
-              "strip_prefix": "zulu17.44.53-ca-jdk17.0.8.1-macosx_aarch64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-macosx_aarch64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-macosx_aarch64.tar.gz"
-              ]
-            }
-          },
-          "remote_java_tools_windows": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remote_java_tools_windows",
-              "sha256": "c5c70c214a350f12cbf52da8270fa43ba629b795f3dd328028a38f8f0d39c2a1",
-              "urls": [
-                "https://mirror.bazel.build/bazel_java_tools/releases/java/v13.1/java_tools_windows-v13.1.zip",
-                "https://github.com/bazelbuild/java_tools/releases/download/java_v13.1/java_tools_windows-v13.1.zip"
-              ]
-            }
-          },
-          "remotejdk11_win": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_win",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 11,\n)\n",
-              "sha256": "43408193ce2fa0862819495b5ae8541085b95660153f2adcf91a52d3a1710e83",
-              "strip_prefix": "zulu11.66.15-ca-jdk11.0.20-win_x64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-win_x64.zip",
-                "https://cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-win_x64.zip"
-              ]
-            }
-          },
-          "remotejdk11_win_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_win_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_11\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"11\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_win//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_win//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk11_linux_aarch64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_linux_aarch64",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 11,\n)\n",
-              "sha256": "54174439f2b3fddd11f1048c397fe7bb45d4c9d66d452d6889b013d04d21c4de",
-              "strip_prefix": "zulu11.66.15-ca-jdk11.0.20-linux_aarch64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-linux_aarch64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-linux_aarch64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk17_linux": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_linux",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 17,\n)\n",
-              "sha256": "b9482f2304a1a68a614dfacddcf29569a72f0fac32e6c74f83dc1b9a157b8340",
-              "strip_prefix": "zulu17.44.53-ca-jdk17.0.8.1-linux_x64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-linux_x64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-linux_x64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk11_linux_s390x_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_linux_s390x_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_11\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"11\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:s390x\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_linux_s390x//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:s390x\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_linux_s390x//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk11_linux_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_linux_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_11\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"11\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_linux//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_linux//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk11_macos": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_macos",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 11,\n)\n",
-              "sha256": "bcaab11cfe586fae7583c6d9d311c64384354fb2638eb9a012eca4c3f1a1d9fd",
-              "strip_prefix": "zulu11.66.15-ca-jdk11.0.20-macosx_x64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-macosx_x64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-macosx_x64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk11_win_arm64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_win_arm64",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 11,\n)\n",
-              "sha256": "b8a28e6e767d90acf793ea6f5bed0bb595ba0ba5ebdf8b99f395266161e53ec2",
-              "strip_prefix": "jdk-11.0.13+8",
-              "urls": [
-                "https://mirror.bazel.build/aka.ms/download-jdk/microsoft-jdk-11.0.13.8.1-windows-aarch64.zip"
-              ]
-            }
-          },
-          "remotejdk17_macos": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_macos",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 17,\n)\n",
-              "sha256": "640453e8afe8ffe0fb4dceb4535fb50db9c283c64665eebb0ba68b19e65f4b1f",
-              "strip_prefix": "zulu17.44.53-ca-jdk17.0.8.1-macosx_x64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-macosx_x64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-macosx_x64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk21_macos": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_macos",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 21,\n)\n",
-              "sha256": "9639b87db586d0c89f7a9892ae47f421e442c64b97baebdff31788fbe23265bd",
-              "strip_prefix": "zulu21.28.85-ca-jdk21.0.0-macosx_x64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-macosx_x64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-macosx_x64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk21_macos_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_macos_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_21\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"21\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_macos//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_macos//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk17_macos_aarch64_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_macos_aarch64_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_17\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"17\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_macos_aarch64//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_macos_aarch64//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk17_win": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_win",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 17,\n)\n",
-              "sha256": "192f2afca57701de6ec496234f7e45d971bf623ff66b8ee4a5c81582054e5637",
-              "strip_prefix": "zulu17.44.53-ca-jdk17.0.8.1-win_x64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-win_x64.zip",
-                "https://cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-win_x64.zip"
-              ]
-            }
-          },
-          "remotejdk11_macos_aarch64_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_macos_aarch64_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_11\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"11\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_macos_aarch64//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_macos_aarch64//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk11_linux_ppc64le_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_linux_ppc64le_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_11\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"11\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:ppc\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_linux_ppc64le//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:ppc\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_linux_ppc64le//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk21_linux": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_linux",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 21,\n)\n",
-              "sha256": "0c0eadfbdc47a7ca64aeab51b9c061f71b6e4d25d2d87674512e9b6387e9e3a6",
-              "strip_prefix": "zulu21.28.85-ca-jdk21.0.0-linux_x64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-linux_x64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-linux_x64.tar.gz"
-              ]
-            }
-          },
-          "remote_java_tools_linux": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remote_java_tools_linux",
-              "sha256": "d134da9b04c9023fb6e56a5d4bffccee73f7bc9572ddc4e747778dacccd7a5a7",
-              "urls": [
-                "https://mirror.bazel.build/bazel_java_tools/releases/java/v13.1/java_tools_linux-v13.1.zip",
-                "https://github.com/bazelbuild/java_tools/releases/download/java_v13.1/java_tools_linux-v13.1.zip"
-              ]
-            }
-          },
-          "remotejdk21_win": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_win",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 21,\n)\n",
-              "sha256": "e9959d500a0d9a7694ac243baf657761479da132f0f94720cbffd092150bd802",
-              "strip_prefix": "zulu21.28.85-ca-jdk21.0.0-win_x64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-win_x64.zip",
-                "https://cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-win_x64.zip"
-              ]
-            }
-          },
-          "remotejdk21_linux_aarch64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_linux_aarch64",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 21,\n)\n",
-              "sha256": "1fb64b8036c5d463d8ab59af06bf5b6b006811e6012e3b0eb6bccf57f1c55835",
-              "strip_prefix": "zulu21.28.85-ca-jdk21.0.0-linux_aarch64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-linux_aarch64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu21.28.85-ca-jdk21.0.0-linux_aarch64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk11_linux_aarch64_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_linux_aarch64_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_11\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"11\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_linux_aarch64//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_linux_aarch64//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk11_linux_s390x": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_linux_s390x",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 11,\n)\n",
-              "sha256": "a58fc0361966af0a5d5a31a2d8a208e3c9bb0f54f345596fd80b99ea9a39788b",
-              "strip_prefix": "jdk-11.0.15+10",
-              "urls": [
-                "https://mirror.bazel.build/github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.15+10/OpenJDK11U-jdk_s390x_linux_hotspot_11.0.15_10.tar.gz",
-                "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.15+10/OpenJDK11U-jdk_s390x_linux_hotspot_11.0.15_10.tar.gz"
-              ]
-            }
-          },
-          "remotejdk17_linux_aarch64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_linux_aarch64",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 17,\n)\n",
-              "sha256": "6531cef61e416d5a7b691555c8cf2bdff689201b8a001ff45ab6740062b44313",
-              "strip_prefix": "zulu17.44.53-ca-jdk17.0.8.1-linux_aarch64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-linux_aarch64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-linux_aarch64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk17_win_arm64_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_win_arm64_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_17\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"17\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:arm64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_win_arm64//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:arm64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_win_arm64//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk11_linux": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_linux",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 11,\n)\n",
-              "sha256": "a34b404f87a08a61148b38e1416d837189e1df7a040d949e743633daf4695a3c",
-              "strip_prefix": "zulu11.66.15-ca-jdk11.0.20-linux_x64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-linux_x64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-linux_x64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk11_macos_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_macos_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_11\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"11\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_macos//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:macos\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_macos//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk17_linux_ppc64le_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_linux_ppc64le_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_17\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"17\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:ppc\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_linux_ppc64le//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:ppc\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_linux_ppc64le//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk17_win_arm64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_win_arm64",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 17,\n)\n",
-              "sha256": "6802c99eae0d788e21f52d03cab2e2b3bf42bc334ca03cbf19f71eb70ee19f85",
-              "strip_prefix": "zulu17.44.53-ca-jdk17.0.8.1-win_aarch64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-win_aarch64.zip",
-                "https://cdn.azul.com/zulu/bin/zulu17.44.53-ca-jdk17.0.8.1-win_aarch64.zip"
-              ]
-            }
-          },
-          "remote_java_tools_darwin_arm64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remote_java_tools_darwin_arm64",
-              "sha256": "dab5bb87ec43e980faea6e1cec14bafb217b8e2f5346f53aa784fd715929a930",
-              "urls": [
-                "https://mirror.bazel.build/bazel_java_tools/releases/java/v13.1/java_tools_darwin_arm64-v13.1.zip",
-                "https://github.com/bazelbuild/java_tools/releases/download/java_v13.1/java_tools_darwin_arm64-v13.1.zip"
-              ]
-            }
-          },
-          "remotejdk17_linux_ppc64le": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_linux_ppc64le",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 17,\n)\n",
-              "sha256": "00a4c07603d0218cd678461b5b3b7e25b3253102da4022d31fc35907f21a2efd",
-              "strip_prefix": "jdk-17.0.8.1+1",
-              "urls": [
-                "https://mirror.bazel.build/github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.8.1%2B1/OpenJDK17U-jdk_ppc64le_linux_hotspot_17.0.8.1_1.tar.gz",
-                "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.8.1%2B1/OpenJDK17U-jdk_ppc64le_linux_hotspot_17.0.8.1_1.tar.gz"
-              ]
-            }
-          },
-          "remotejdk21_linux_aarch64_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_linux_aarch64_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_21\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"21\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_linux_aarch64//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:linux\", \"@platforms//cpu:aarch64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_linux_aarch64//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk11_win_arm64_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_win_arm64_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_11\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"11\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:arm64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_win_arm64//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:arm64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk11_win_arm64//:jdk\",\n)\n"
-            }
-          },
-          "local_jdk": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:local_java_repository.bzl",
-            "ruleClassName": "_local_java_repository_rule",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~local_jdk",
-              "java_home": "",
-              "version": "",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = {RUNTIME_VERSION},\n)\n"
-            }
-          },
-          "remote_java_tools_darwin_x86_64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remote_java_tools_darwin_x86_64",
-              "sha256": "0db40d8505a2b65ef0ed46e4256757807db8162f7acff16225be57c1d5726dbc",
-              "urls": [
-                "https://mirror.bazel.build/bazel_java_tools/releases/java/v13.1/java_tools_darwin_x86_64-v13.1.zip",
-                "https://github.com/bazelbuild/java_tools/releases/download/java_v13.1/java_tools_darwin_x86_64-v13.1.zip"
-              ]
-            }
-          },
-          "remote_java_tools": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remote_java_tools",
-              "sha256": "286bdbbd66e616fc4ed3f90101418729a73baa7e8c23a98ffbef558f74c0ad14",
-              "urls": [
-                "https://mirror.bazel.build/bazel_java_tools/releases/java/v13.1/java_tools-v13.1.zip",
-                "https://github.com/bazelbuild/java_tools/releases/download/java_v13.1/java_tools-v13.1.zip"
-              ]
-            }
-          },
-          "remotejdk17_linux_s390x": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_linux_s390x",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 17,\n)\n",
-              "sha256": "ffacba69c6843d7ca70d572489d6cc7ab7ae52c60f0852cedf4cf0d248b6fc37",
-              "strip_prefix": "jdk-17.0.8.1+1",
-              "urls": [
-                "https://mirror.bazel.build/github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.8.1%2B1/OpenJDK17U-jdk_s390x_linux_hotspot_17.0.8.1_1.tar.gz",
-                "https://github.com/adoptium/temurin17-binaries/releases/download/jdk-17.0.8.1%2B1/OpenJDK17U-jdk_s390x_linux_hotspot_17.0.8.1_1.tar.gz"
-              ]
-            }
-          },
-          "remotejdk17_win_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk17_win_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_17\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"17\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_win//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk17_win//:jdk\",\n)\n"
-            }
-          },
-          "remotejdk11_linux_ppc64le": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_linux_ppc64le",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 11,\n)\n",
-              "sha256": "a8fba686f6eb8ae1d1a9566821dbd5a85a1108b96ad857fdbac5c1e4649fc56f",
-              "strip_prefix": "jdk-11.0.15+10",
-              "urls": [
-                "https://mirror.bazel.build/github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.15+10/OpenJDK11U-jdk_ppc64le_linux_hotspot_11.0.15_10.tar.gz",
-                "https://github.com/adoptium/temurin11-binaries/releases/download/jdk-11.0.15+10/OpenJDK11U-jdk_ppc64le_linux_hotspot_11.0.15_10.tar.gz"
-              ]
-            }
-          },
-          "remotejdk11_macos_aarch64": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk11_macos_aarch64",
-              "build_file_content": "load(\"@rules_java//java:defs.bzl\", \"java_runtime\")\n\npackage(default_visibility = [\"//visibility:public\"])\n\nexports_files([\"WORKSPACE\", \"BUILD.bazel\"])\n\nfilegroup(\n    name = \"jre\",\n    srcs = glob(\n        [\n            \"jre/bin/**\",\n            \"jre/lib/**\",\n        ],\n        allow_empty = True,\n        # In some configurations, Java browser plugin is considered harmful and\n        # common antivirus software blocks access to npjp2.dll interfering with Bazel,\n        # so do not include it in JRE on Windows.\n        exclude = [\"jre/bin/plugin2/**\"],\n    ),\n)\n\nfilegroup(\n    name = \"jdk-bin\",\n    srcs = glob(\n        [\"bin/**\"],\n        # The JDK on Windows sometimes contains a directory called\n        # \"%systemroot%\", which is not a valid label.\n        exclude = [\"**/*%*/**\"],\n    ),\n)\n\n# This folder holds security policies.\nfilegroup(\n    name = \"jdk-conf\",\n    srcs = glob(\n        [\"conf/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-include\",\n    srcs = glob(\n        [\"include/**\"],\n        allow_empty = True,\n    ),\n)\n\nfilegroup(\n    name = \"jdk-lib\",\n    srcs = glob(\n        [\"lib/**\", \"release\"],\n        allow_empty = True,\n        exclude = [\n            \"lib/missioncontrol/**\",\n            \"lib/visualvm/**\",\n        ],\n    ),\n)\n\njava_runtime(\n    name = \"jdk\",\n    srcs = [\n        \":jdk-bin\",\n        \":jdk-conf\",\n        \":jdk-include\",\n        \":jdk-lib\",\n        \":jre\",\n    ],\n    # Provide the 'java` binary explicitly so that the correct path is used by\n    # Bazel even when the host platform differs from the execution platform.\n    # Exactly one of the two globs will be empty depending on the host platform.\n    # When --incompatible_disallow_empty_glob is enabled, each individual empty\n    # glob will fail without allow_empty = True, even if the overall result is\n    # non-empty.\n    java = glob([\"bin/java.exe\", \"bin/java\"], allow_empty = True)[0],\n    version = 11,\n)\n",
-              "sha256": "7632bc29f8a4b7d492b93f3bc75a7b61630894db85d136456035ab2a24d38885",
-              "strip_prefix": "zulu11.66.15-ca-jdk11.0.20-macosx_aarch64",
-              "urls": [
-                "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-macosx_aarch64.tar.gz",
-                "https://cdn.azul.com/zulu/bin/zulu11.66.15-ca-jdk11.0.20-macosx_aarch64.tar.gz"
-              ]
-            }
-          },
-          "remotejdk21_win_toolchain_config_repo": {
-            "bzlFile": "@@rules_java~7.1.0//toolchains:remote_java_repository.bzl",
-            "ruleClassName": "_toolchain_config",
-            "attributes": {
-              "name": "rules_java~7.1.0~toolchains~remotejdk21_win_toolchain_config_repo",
-              "build_file": "\nconfig_setting(\n    name = \"prefix_version_setting\",\n    values = {\"java_runtime_version\": \"remotejdk_21\"},\n    visibility = [\"//visibility:private\"],\n)\nconfig_setting(\n    name = \"version_setting\",\n    values = {\"java_runtime_version\": \"21\"},\n    visibility = [\"//visibility:private\"],\n)\nalias(\n    name = \"version_or_prefix_version_setting\",\n    actual = select({\n        \":version_setting\": \":version_setting\",\n        \"//conditions:default\": \":prefix_version_setting\",\n    }),\n    visibility = [\"//visibility:private\"],\n)\ntoolchain(\n    name = \"toolchain\",\n    target_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_win//:jdk\",\n)\ntoolchain(\n    name = \"bootstrap_runtime_toolchain\",\n    # These constraints are not required for correctness, but prevent fetches of remote JDK for\n    # different architectures. As every Java compilation toolchain depends on a bootstrap runtime in\n    # the same configuration, this constraint will not result in toolchain resolution failures.\n    exec_compatible_with = [\"@platforms//os:windows\", \"@platforms//cpu:x86_64\"],\n    target_settings = [\":version_or_prefix_version_setting\"],\n    toolchain_type = \"@bazel_tools//tools/jdk:bootstrap_runtime_toolchain_type\",\n    toolchain = \"@remotejdk21_win//:jdk\",\n)\n"
-            }
-          }
-        }
-      }
-    },
-    "@@rules_python~override//python/extensions:pip.bzl%pip": {
-      "os:linux,arch:amd64": {
-        "bzlTransitiveDigest": "1T9ZEQ+rBH1TjZEfm65J0aKyfLCqdjxklv9pEmkBMRM=",
-        "accumulatedFileDigests": {
-          "@@//:requirements_lock_3_10.txt": "8a691c254cde6884cef3fd9f75479cc10118b6886f6ba44721501f37e1266c1c",
-          "@@//whl_mods:appended_build_content.BUILD": "275102ec5574229531650038302ac51025d72e7f7a5c65c01cf33a96d1e646dd",
-          "@@other_module~override//:requirements_lock_3_11.txt": "a7d0061366569043d5efcf80e34a32c732679367cb3c831c4cdc606adc36d314",
-          "@@//:requirements_lock_3_9.txt": "a47c1883329eb4505e1a986252ee5d3be7da0c315462442f42fec381b5dda2be"
-        },
-        "envVariables": {},
-        "generatedRepoSpecs": {
-          "pip_39_tomli": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_tomli",
-              "requirement": "tomli==2.0.1     --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc     --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_lazy_object_proxy": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_lazy_object_proxy",
-              "requirement": "lazy-object-proxy==1.8.0     --hash=sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada     --hash=sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d     --hash=sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7     --hash=sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe     --hash=sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd     --hash=sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c     --hash=sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858     --hash=sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288     --hash=sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec     --hash=sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f     --hash=sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891     --hash=sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c     --hash=sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25     --hash=sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156     --hash=sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8     --hash=sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f     --hash=sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e     --hash=sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0     --hash=sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_typing_extensions": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_typing_extensions",
-              "requirement": "typing-extensions==4.6.3     --hash=sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26     --hash=sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_idna": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_idna",
-              "requirement": "idna==2.10     --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6     --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_isort": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_isort",
-              "requirement": "isort==5.12.0     --hash=sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504     --hash=sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_astroid": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_astroid",
-              "requirement": "astroid==2.13.5     --hash=sha256:6891f444625b6edb2ac798829b689e95297e100ddf89dbed5a8c610e34901501     --hash=sha256:df164d5ac811b9f44105a72b8f9d5edfb7b5b2d7e979b04ea377a77b3229114a",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_tabulate": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_tabulate",
-              "requirement": "tabulate==0.9.0     --hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c     --hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_wrapt": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_wrapt",
-              "requirement": "wrapt==1.15.0     --hash=sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0     --hash=sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420     --hash=sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a     --hash=sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c     --hash=sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079     --hash=sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923     --hash=sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f     --hash=sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1     --hash=sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8     --hash=sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86     --hash=sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0     --hash=sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364     --hash=sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e     --hash=sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c     --hash=sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e     --hash=sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c     --hash=sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727     --hash=sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff     --hash=sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e     --hash=sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29     --hash=sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7     --hash=sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72     --hash=sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475     --hash=sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a     --hash=sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317     --hash=sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2     --hash=sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd     --hash=sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640     --hash=sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98     --hash=sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248     --hash=sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e     --hash=sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d     --hash=sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec     --hash=sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1     --hash=sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e     --hash=sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9     --hash=sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92     --hash=sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb     --hash=sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094     --hash=sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46     --hash=sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29     --hash=sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd     --hash=sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705     --hash=sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8     --hash=sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975     --hash=sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb     --hash=sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e     --hash=sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b     --hash=sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418     --hash=sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019     --hash=sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1     --hash=sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba     --hash=sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6     --hash=sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2     --hash=sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3     --hash=sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7     --hash=sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752     --hash=sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416     --hash=sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f     --hash=sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1     --hash=sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc     --hash=sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145     --hash=sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee     --hash=sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a     --hash=sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7     --hash=sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b     --hash=sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653     --hash=sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0     --hash=sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90     --hash=sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29     --hash=sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6     --hash=sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034     --hash=sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09     --hash=sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559     --hash=sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "pip_hub_repository_bzlmod",
-            "attributes": {
-              "name": "rules_python~override~pip~pip",
-              "repo_name": "pip",
-              "whl_map": {
-                "astroid": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "certifi": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "chardet": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "dill": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "idna": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "isort": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "lazy_object_proxy": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "mccabe": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "pathspec": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "platformdirs": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "pylint": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "pylint_print": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "python_dateutil": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "python_magic": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "pyyaml": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "requests": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "s3cmd": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "six": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "tabulate": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "tomli": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "tomlkit": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "typing_extensions": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "urllib3": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "websockets": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "wheel": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "wrapt": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "yamllint": [
-                  "3.9.18",
-                  "3.10.13"
-                ],
-                "setuptools": [
-                  "3.9.18"
-                ]
-              },
-              "default_version": "3.9.18"
-            }
-          },
-          "pip_39_websockets": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_websockets",
-              "requirement": "websockets==11.0.3     --hash=sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd     --hash=sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f     --hash=sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998     --hash=sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82     --hash=sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788     --hash=sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa     --hash=sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f     --hash=sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4     --hash=sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7     --hash=sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f     --hash=sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd     --hash=sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69     --hash=sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb     --hash=sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b     --hash=sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016     --hash=sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac     --hash=sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4     --hash=sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb     --hash=sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99     --hash=sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e     --hash=sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54     --hash=sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf     --hash=sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007     --hash=sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3     --hash=sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6     --hash=sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86     --hash=sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1     --hash=sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61     --hash=sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11     --hash=sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8     --hash=sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f     --hash=sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931     --hash=sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526     --hash=sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016     --hash=sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae     --hash=sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd     --hash=sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b     --hash=sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311     --hash=sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af     --hash=sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152     --hash=sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288     --hash=sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de     --hash=sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97     --hash=sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d     --hash=sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d     --hash=sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca     --hash=sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0     --hash=sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9     --hash=sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b     --hash=sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e     --hash=sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128     --hash=sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d     --hash=sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c     --hash=sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5     --hash=sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6     --hash=sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b     --hash=sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b     --hash=sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280     --hash=sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c     --hash=sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c     --hash=sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f     --hash=sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20     --hash=sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8     --hash=sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb     --hash=sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602     --hash=sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf     --hash=sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0     --hash=sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74     --hash=sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0     --hash=sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_chardet": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_chardet",
-              "requirement": "chardet==4.0.0     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa     --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_s3cmd": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_s3cmd",
-              "requirement": "s3cmd==2.1.0     --hash=sha256:49cd23d516b17974b22b611a95ce4d93fe326feaa07320bd1d234fed68cbccfa     --hash=sha256:966b0a494a916fc3b4324de38f089c86c70ee90e8e1cae6d59102103a4c0cc03",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_python_dateutil": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_python_dateutil",
-              "requirement": "python-dateutil==2.8.2     --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86     --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_requests": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_requests",
-              "requirement": "requests==2.25.1     --hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804     --hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "annotation": "@@rules_python~override~pip~whl_mods_hub//:requests.json",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_tomli": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_tomli",
-              "requirement": "tomli==2.0.1     --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc     --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_setuptools": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_setuptools",
-              "requirement": "setuptools==65.6.3     --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54     --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_idna": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_idna",
-              "requirement": "idna==2.10     --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6     --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_python_dateutil": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_python_dateutil",
-              "requirement": "python-dateutil==2.8.2     --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86     --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_typing_extensions": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_typing_extensions",
-              "requirement": "typing-extensions==4.4.0     --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa     --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_python_magic": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_python_magic",
-              "requirement": "python-magic==0.4.27     --hash=sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b     --hash=sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_tomlkit": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_tomlkit",
-              "requirement": "tomlkit==0.11.6     --hash=sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b     --hash=sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_platformdirs": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_platformdirs",
-              "requirement": "platformdirs==3.5.1     --hash=sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f     --hash=sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_pylint_print": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_pylint_print",
-              "requirement": "pylint-print==1.0.1     --hash=sha256:30aa207e9718ebf4ceb47fb87012092e6d8743aab932aa07aa14a73e750ad3d0     --hash=sha256:a2b2599e7887b93e551db2624c523c1e6e9e58c3be8416cd98d41e4427e2669b",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_astroid": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_astroid",
-              "requirement": "astroid==2.12.13     --hash=sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907     --hash=sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_pyyaml": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_pyyaml",
-              "requirement": "pyyaml==6.0     --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf     --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293     --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b     --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57     --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b     --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4     --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07     --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba     --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9     --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287     --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513     --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0     --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782     --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0     --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92     --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f     --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2     --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc     --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1     --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c     --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86     --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4     --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c     --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34     --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b     --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d     --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c     --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb     --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7     --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737     --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3     --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d     --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358     --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53     --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78     --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803     --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a     --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f     --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174     --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_tabulate": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_tabulate",
-              "requirement": "tabulate==0.9.0     --hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c     --hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_dill": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_dill",
-              "requirement": "dill==0.3.6     --hash=sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0     --hash=sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_lazy_object_proxy": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_lazy_object_proxy",
-              "requirement": "lazy-object-proxy==1.9.0     --hash=sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382     --hash=sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82     --hash=sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9     --hash=sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494     --hash=sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46     --hash=sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30     --hash=sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63     --hash=sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4     --hash=sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae     --hash=sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be     --hash=sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701     --hash=sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd     --hash=sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006     --hash=sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a     --hash=sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586     --hash=sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8     --hash=sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821     --hash=sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07     --hash=sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b     --hash=sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171     --hash=sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b     --hash=sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2     --hash=sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7     --hash=sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4     --hash=sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8     --hash=sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e     --hash=sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f     --hash=sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda     --hash=sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4     --hash=sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e     --hash=sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671     --hash=sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11     --hash=sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455     --hash=sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734     --hash=sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb     --hash=sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_pylint": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_pylint",
-              "requirement": "pylint==2.15.10     --hash=sha256:9df0d07e8948a1c3ffa3b6e2d7e6e63d9fb457c5da5b961ed63106594780cc7e     --hash=sha256:b3dc5ef7d33858f297ac0d06cc73862f01e4f2e74025ec3eff347ce0bc60baf5",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_urllib3": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_urllib3",
-              "requirement": "urllib3==1.26.13     --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc     --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_certifi": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_certifi",
-              "requirement": "certifi==2022.12.7     --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3     --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_tomlkit": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_tomlkit",
-              "requirement": "tomlkit==0.11.8     --hash=sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171     --hash=sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "other_module_pip_311_absl_py": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~other_module_pip_311_absl_py",
-              "requirement": "absl-py==1.4.0     --hash=sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47     --hash=sha256:d2c244d01048ba476e7c080bd2c6df5e141d211de80223460d5b3b8a2a58433d",
-              "repo": "other_module_pip_311",
-              "repo_prefix": "other_module_pip_311_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_11_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_yamllint": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_yamllint",
-              "requirement": "yamllint==1.32.0     --hash=sha256:d01dde008c65de5b235188ab3110bebc59d18e5c65fc8a58267cd211cd9df34a     --hash=sha256:d97a66e48da820829d96077d76b8dfbe6c6140f106e558dae87e81ac4e6b30b7",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_requests": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_requests",
-              "requirement": "requests==2.25.1     --hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804     --hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "annotation": "@@rules_python~override~pip~whl_mods_hub//:requests.json",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "other_module_pip": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "pip_hub_repository_bzlmod",
-            "attributes": {
-              "name": "rules_python~override~pip~other_module_pip",
-              "repo_name": "other_module_pip",
-              "whl_map": {
-                "absl_py": [
-                  "3.11.6"
-                ]
-              },
-              "default_version": "3.9.18"
-            }
-          },
-          "pip_39_six": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_six",
-              "requirement": "six==1.16.0     --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926     --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_mccabe": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_mccabe",
-              "requirement": "mccabe==0.7.0     --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325     --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_pylint_print": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_pylint_print",
-              "requirement": "pylint-print==1.0.1     --hash=sha256:30aa207e9718ebf4ceb47fb87012092e6d8743aab932aa07aa14a73e750ad3d0     --hash=sha256:a2b2599e7887b93e551db2624c523c1e6e9e58c3be8416cd98d41e4427e2669b",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_python_magic": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_python_magic",
-              "requirement": "python-magic==0.4.27     --hash=sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b     --hash=sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_isort": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_isort",
-              "requirement": "isort==5.11.4     --hash=sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6     --hash=sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_pathspec": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_pathspec",
-              "requirement": "pathspec==0.10.3     --hash=sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6     --hash=sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "whl_mods_hub": {
-            "bzlFile": "@@rules_python~override//python/extensions:pip.bzl",
-            "ruleClassName": "_whl_mods_repo",
-            "attributes": {
-              "name": "rules_python~override~pip~whl_mods_hub",
-              "whl_mods": {
-                "requests": "{\"additive_build_content\":\"load(\\\"@bazel_skylib//rules:write_file.bzl\\\", \\\"write_file\\\")\\n\\nwrite_file(\\n    name = \\\"generated_file\\\",\\n    out = \\\"generated_file.txt\\\",\\n    content = [\\\"Hello world from requests\\\"],\\n)\\n\",\"copy_executables\":{},\"copy_files\":{},\"data\":[\":generated_file\"],\"data_exclude_glob\":[],\"srcs_exclude_glob\":[]}",
-                "wheel": "{\"additive_build_content\":\"load(\\\"@bazel_skylib//rules:write_file.bzl\\\", \\\"write_file\\\")\\nwrite_file(\\n    name = \\\"generated_file\\\",\\n    out = \\\"generated_file.txt\\\",\\n    content = [\\\"Hello world from build content file\\\"],\\n)\\n\",\"copy_executables\":{\"@@//whl_mods:data/copy_executable.py\":\"copied_content/executable.py\"},\"copy_files\":{\"@@//whl_mods:data/copy_file.txt\":\"copied_content/file.txt\"},\"data\":[\":generated_file\"],\"data_exclude_glob\":[\"site-packages/*.dist-info/WHEEL\"],\"srcs_exclude_glob\":[]}"
-              }
-            }
-          },
-          "pip_39_mccabe": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_mccabe",
-              "requirement": "mccabe==0.7.0     --hash=sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325     --hash=sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_platformdirs": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_platformdirs",
-              "requirement": "platformdirs==2.6.0     --hash=sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca     --hash=sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_certifi": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_certifi",
-              "requirement": "certifi==2023.5.7     --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7     --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_pyyaml": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_pyyaml",
-              "requirement": "pyyaml==6.0     --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf     --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293     --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b     --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57     --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b     --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4     --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07     --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba     --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9     --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287     --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513     --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0     --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782     --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0     --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92     --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f     --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2     --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc     --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1     --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c     --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86     --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4     --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c     --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34     --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b     --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d     --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c     --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb     --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7     --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737     --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3     --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d     --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358     --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53     --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78     --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803     --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a     --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f     --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174     --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_yamllint": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_yamllint",
-              "requirement": "yamllint==1.28.0     --hash=sha256:89bb5b5ac33b1ade059743cf227de73daa34d5e5a474b06a5e17fc16583b0cf2     --hash=sha256:9e3d8ddd16d0583214c5fdffe806c9344086721f107435f68bad990e5a88826b",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_s3cmd": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_s3cmd",
-              "requirement": "s3cmd==2.1.0     --hash=sha256:49cd23d516b17974b22b611a95ce4d93fe326feaa07320bd1d234fed68cbccfa     --hash=sha256:966b0a494a916fc3b4324de38f089c86c70ee90e8e1cae6d59102103a4c0cc03",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_chardet": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_chardet",
-              "requirement": "chardet==4.0.0     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa     --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_dill": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_dill",
-              "requirement": "dill==0.3.6     --hash=sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0     --hash=sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_wheel": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_wheel",
-              "requirement": "wheel==0.40.0     --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873     --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "annotation": "@@rules_python~override~pip~whl_mods_hub//:wheel.json",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_pylint": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_pylint",
-              "requirement": "pylint==2.15.9     --hash=sha256:18783cca3cfee5b83c6c5d10b3cdb66c6594520ffae61890858fe8d932e1c6b4     --hash=sha256:349c8cd36aede4d50a0754a8c0218b43323d13d5d88f4b2952ddfe3e169681eb",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_39_wrapt": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_39_wrapt",
-              "requirement": "wrapt==1.14.1     --hash=sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3     --hash=sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b     --hash=sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4     --hash=sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2     --hash=sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656     --hash=sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3     --hash=sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff     --hash=sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310     --hash=sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a     --hash=sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57     --hash=sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069     --hash=sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383     --hash=sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe     --hash=sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87     --hash=sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d     --hash=sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b     --hash=sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907     --hash=sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f     --hash=sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0     --hash=sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28     --hash=sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1     --hash=sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853     --hash=sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc     --hash=sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3     --hash=sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3     --hash=sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164     --hash=sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1     --hash=sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c     --hash=sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1     --hash=sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7     --hash=sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1     --hash=sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320     --hash=sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed     --hash=sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1     --hash=sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248     --hash=sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c     --hash=sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456     --hash=sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77     --hash=sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef     --hash=sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1     --hash=sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7     --hash=sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86     --hash=sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4     --hash=sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d     --hash=sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d     --hash=sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8     --hash=sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5     --hash=sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471     --hash=sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00     --hash=sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68     --hash=sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3     --hash=sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d     --hash=sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735     --hash=sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d     --hash=sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569     --hash=sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7     --hash=sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59     --hash=sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5     --hash=sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb     --hash=sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b     --hash=sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f     --hash=sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462     --hash=sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015     --hash=sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af",
-              "repo": "pip_39",
-              "repo_prefix": "pip_39_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_urllib3": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_urllib3",
-              "requirement": "urllib3==1.26.16     --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f     --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_six": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_six",
-              "requirement": "six==1.16.0     --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926     --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_pathspec": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_pathspec",
-              "requirement": "pathspec==0.11.1     --hash=sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687     --hash=sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_wheel": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_wheel",
-              "requirement": "wheel==0.40.0     --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873     --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "annotation": "@@rules_python~override~pip~whl_mods_hub//:wheel.json",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          },
-          "pip_310_websockets": {
-            "bzlFile": "@@rules_python~override//python/pip_install:pip_repository.bzl",
-            "ruleClassName": "whl_library",
-            "attributes": {
-              "name": "rules_python~override~pip~pip_310_websockets",
-              "requirement": "websockets==11.0.3     --hash=sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd     --hash=sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f     --hash=sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998     --hash=sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82     --hash=sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788     --hash=sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa     --hash=sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f     --hash=sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4     --hash=sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7     --hash=sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f     --hash=sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd     --hash=sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69     --hash=sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb     --hash=sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b     --hash=sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016     --hash=sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac     --hash=sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4     --hash=sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb     --hash=sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99     --hash=sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e     --hash=sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54     --hash=sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf     --hash=sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007     --hash=sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3     --hash=sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6     --hash=sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86     --hash=sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1     --hash=sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61     --hash=sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11     --hash=sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8     --hash=sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f     --hash=sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931     --hash=sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526     --hash=sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016     --hash=sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae     --hash=sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd     --hash=sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b     --hash=sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311     --hash=sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af     --hash=sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152     --hash=sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288     --hash=sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de     --hash=sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97     --hash=sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d     --hash=sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d     --hash=sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca     --hash=sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0     --hash=sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9     --hash=sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b     --hash=sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e     --hash=sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128     --hash=sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d     --hash=sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c     --hash=sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5     --hash=sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6     --hash=sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b     --hash=sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b     --hash=sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280     --hash=sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c     --hash=sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c     --hash=sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f     --hash=sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20     --hash=sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8     --hash=sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb     --hash=sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602     --hash=sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf     --hash=sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0     --hash=sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74     --hash=sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0     --hash=sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564",
-              "repo": "pip_310",
-              "repo_prefix": "pip_310_",
-              "python_interpreter": "",
-              "python_interpreter_target": "@@rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu//:bin/python3",
-              "quiet": true,
-              "timeout": 600,
-              "isolated": true,
-              "extra_pip_args": [
-                "--extra-index-url",
-                "https://pypi.python.org/simple/"
-              ],
-              "download_only": false,
-              "pip_data_exclude": [],
-              "enable_implicit_namespace_pkgs": false,
-              "environment": {}
-            }
-          }
-        }
-      }
-    },
-    "@@rules_python~override//python/extensions:python.bzl%python": {
-      "general": {
-        "bzlTransitiveDigest": "um1PsPhf8NOD6Qkf5ix7Gf2asyhE3Fl6J5gEfPfNz/s=",
-        "accumulatedFileDigests": {},
-        "envVariables": {},
-        "generatedRepoSpecs": {
-          "python_3_11_s390x-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_11_s390x-unknown-linux-gnu",
-              "sha256": "f9f19823dba3209cedc4647b00f46ed0177242917db20fb7fb539970e384531c",
-              "patches": [],
-              "platform": "s390x-unknown-linux-gnu",
-              "python_version": "3.11.6",
-              "release_filename": "20231002/cpython-3.11.6+20231002-s390x-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.11.6+20231002-s390x-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_10_aarch64-apple-darwin": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_aarch64-apple-darwin",
-              "sha256": "fd027b1dedf1ea034cdaa272e91771bdf75ddef4c8653b05d224a0645aa2ca3c",
-              "patches": [],
-              "platform": "aarch64-apple-darwin",
-              "python_version": "3.10.13",
-              "release_filename": "20231002/cpython-3.10.13+20231002-aarch64-apple-darwin-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.10.13+20231002-aarch64-apple-darwin-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "@python_3_10_aarch64-apple-darwin_coverage//:coverage",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_10_x86_64-apple-darwin": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_x86_64-apple-darwin",
-              "sha256": "be0b19b6af1f7d8c667e5abef5505ad06cf72e5a11bb5844970c395a7e5b1275",
-              "patches": [],
-              "platform": "x86_64-apple-darwin",
-              "python_version": "3.10.13",
-              "release_filename": "20231002/cpython-3.10.13+20231002-x86_64-apple-darwin-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.10.13+20231002-x86_64-apple-darwin-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "@python_3_10_x86_64-apple-darwin_coverage//:coverage",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_11_aarch64-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_11_aarch64-unknown-linux-gnu",
-              "sha256": "3e26a672df17708c4dc928475a5974c3fb3a34a9b45c65fb4bd1e50504cc84ec",
-              "patches": [],
-              "platform": "aarch64-unknown-linux-gnu",
-              "python_version": "3.11.6",
-              "release_filename": "20231002/cpython-3.11.6+20231002-aarch64-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.11.6+20231002-aarch64-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_10_ppc64le-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_ppc64le-unknown-linux-gnu",
-              "sha256": "f3f9c43eec1a0c3f72845d0b705da17a336d3906b7df212d2640b8f47e8ff375",
-              "patches": [],
-              "platform": "ppc64le-unknown-linux-gnu",
-              "python_version": "3.10.13",
-              "release_filename": "20231002/cpython-3.10.13+20231002-ppc64le-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.10.13+20231002-ppc64le-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_10_x86_64-pc-windows-msvc": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_x86_64-pc-windows-msvc",
-              "sha256": "b8d930ce0d04bda83037ad3653d7450f8907c88e24bb8255a29b8dab8930d6f1",
-              "patches": [],
-              "platform": "x86_64-pc-windows-msvc",
-              "python_version": "3.10.13",
-              "release_filename": "20231002/cpython-3.10.13+20231002-x86_64-pc-windows-msvc-shared-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.10.13+20231002-x86_64-pc-windows-msvc-shared-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "pythons_hub": {
-            "bzlFile": "@@rules_python~override//python/extensions/private:pythons_hub.bzl",
-            "ruleClassName": "hub_repo",
-            "attributes": {
-              "name": "rules_python~override~python~pythons_hub",
-              "default_python_version": "3.9",
-              "toolchain_prefixes": [
-                "_0000_python_3_10_",
-                "_0001_python_3_11_",
-                "_0002_python_3_9_"
-              ],
-              "toolchain_python_versions": [
-                "3.10",
-                "3.11",
-                "3.9"
-              ],
-              "toolchain_set_python_version_constraints": [
-                "True",
-                "True",
-                "False"
-              ],
-              "toolchain_user_repository_names": [
-                "python_3_10",
-                "python_3_11",
-                "python_3_9"
-              ]
-            }
-          },
-          "python_3_10_x86_64-apple-darwin_coverage": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_x86_64-apple-darwin_coverage",
-              "build_file_content": "\nfilegroup(\n    name = \"coverage\",\n    srcs = [\"coverage/__main__.py\"],\n    data = glob([\"coverage/*.py\", \"coverage/**/*.py\", \"coverage/*.so\"]),\n    visibility = [\"@python_3_10_x86_64-apple-darwin//:__subpackages__\"],\n)\n    ",
-              "patch_args": [
-                "-p1"
-              ],
-              "patches": [
-                "@@rules_python~override//python/private:coverage.patch"
-              ],
-              "sha256": "d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8",
-              "type": "zip",
-              "urls": [
-                "https://files.pythonhosted.org/packages/01/24/be01e62a7bce89bcffe04729c540382caa5a06bee45ae42136c93e2499f5/coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl"
-              ]
-            }
-          },
-          "python_3_9_ppc64le-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_ppc64le-unknown-linux-gnu",
-              "sha256": "101c38b22fb2f5a0945156da4259c8e9efa0c08de9d7f59afa51e7ce6e22a1cc",
-              "patches": [],
-              "platform": "ppc64le-unknown-linux-gnu",
-              "python_version": "3.9.18",
-              "release_filename": "20231002/cpython-3.9.18+20231002-ppc64le-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.9.18+20231002-ppc64le-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_11_ppc64le-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_11_ppc64le-unknown-linux-gnu",
-              "sha256": "7937035f690a624dba4d014ffd20c342e843dd46f89b0b0a1e5726b85deb8eaf",
-              "patches": [],
-              "platform": "ppc64le-unknown-linux-gnu",
-              "python_version": "3.11.6",
-              "release_filename": "20231002/cpython-3.11.6+20231002-ppc64le-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.11.6+20231002-ppc64le-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_9_x86_64-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu",
-              "sha256": "f3ff38b1ccae7dcebd8bbf2e533c9a984fac881de0ffd1636fbb61842bd924de",
-              "patches": [],
-              "platform": "x86_64-unknown-linux-gnu",
-              "python_version": "3.9.18",
-              "release_filename": "20231002/cpython-3.9.18+20231002-x86_64-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.9.18+20231002-x86_64-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "@python_3_9_x86_64-unknown-linux-gnu_coverage//:coverage",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_9_s390x-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_s390x-unknown-linux-gnu",
-              "sha256": "eee31e55ffbc1f460d7b17f05dd89e45a2636f374a6f8dc29ea13d0497f7f586",
-              "patches": [],
-              "platform": "s390x-unknown-linux-gnu",
-              "python_version": "3.9.18",
-              "release_filename": "20231002/cpython-3.9.18+20231002-s390x-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.9.18+20231002-s390x-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_9_x86_64-unknown-linux-gnu_coverage": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_x86_64-unknown-linux-gnu_coverage",
-              "build_file_content": "\nfilegroup(\n    name = \"coverage\",\n    srcs = [\"coverage/__main__.py\"],\n    data = glob([\"coverage/*.py\", \"coverage/**/*.py\", \"coverage/*.so\"]),\n    visibility = [\"@python_3_9_x86_64-unknown-linux-gnu//:__subpackages__\"],\n)\n    ",
-              "patch_args": [
-                "-p1"
-              ],
-              "patches": [
-                "@@rules_python~override//python/private:coverage.patch"
-              ],
-              "sha256": "6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1",
-              "type": "zip",
-              "urls": [
-                "https://files.pythonhosted.org/packages/fe/57/e4f8ad64d84ca9e759d783a052795f62a9f9111585e46068845b1cb52c2b/coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
-              ]
-            }
-          },
-          "python_3_9_aarch64-unknown-linux-gnu_coverage": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_aarch64-unknown-linux-gnu_coverage",
-              "build_file_content": "\nfilegroup(\n    name = \"coverage\",\n    srcs = [\"coverage/__main__.py\"],\n    data = glob([\"coverage/*.py\", \"coverage/**/*.py\", \"coverage/*.so\"]),\n    visibility = [\"@python_3_9_aarch64-unknown-linux-gnu//:__subpackages__\"],\n)\n    ",
-              "patch_args": [
-                "-p1"
-              ],
-              "patches": [
-                "@@rules_python~override//python/private:coverage.patch"
-              ],
-              "sha256": "201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7",
-              "type": "zip",
-              "urls": [
-                "https://files.pythonhosted.org/packages/61/af/5964b8d7d9a5c767785644d9a5a63cacba9a9c45cc42ba06d25895ec87be/coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl"
-              ]
-            }
-          },
-          "python_3_9": {
-            "bzlFile": "@@rules_python~override//python/private:toolchains_repo.bzl",
-            "ruleClassName": "toolchain_aliases",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9",
-              "python_version": "3.9.18",
-              "user_repository_name": "python_3_9"
-            }
-          },
-          "python_3_11_aarch64-apple-darwin": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_11_aarch64-apple-darwin",
-              "sha256": "916c35125b5d8323a21526d7a9154ca626453f63d0878e95b9f613a95006c990",
-              "patches": [],
-              "platform": "aarch64-apple-darwin",
-              "python_version": "3.11.6",
-              "release_filename": "20231002/cpython-3.11.6+20231002-aarch64-apple-darwin-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.11.6+20231002-aarch64-apple-darwin-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_11_x86_64-pc-windows-msvc": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_11_x86_64-pc-windows-msvc",
-              "sha256": "3933545e6d41462dd6a47e44133ea40995bc6efeed8c2e4cbdf1a699303e95ea",
-              "patches": [],
-              "platform": "x86_64-pc-windows-msvc",
-              "python_version": "3.11.6",
-              "release_filename": "20231002/cpython-3.11.6+20231002-x86_64-pc-windows-msvc-shared-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.11.6+20231002-x86_64-pc-windows-msvc-shared-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_9_aarch64-apple-darwin_coverage": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_aarch64-apple-darwin_coverage",
-              "build_file_content": "\nfilegroup(\n    name = \"coverage\",\n    srcs = [\"coverage/__main__.py\"],\n    data = glob([\"coverage/*.py\", \"coverage/**/*.py\", \"coverage/*.so\"]),\n    visibility = [\"@python_3_9_aarch64-apple-darwin//:__subpackages__\"],\n)\n    ",
-              "patch_args": [
-                "-p1"
-              ],
-              "patches": [
-                "@@rules_python~override//python/private:coverage.patch"
-              ],
-              "sha256": "06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2",
-              "type": "zip",
-              "urls": [
-                "https://files.pythonhosted.org/packages/ca/0c/3dfeeb1006c44b911ee0ed915350db30325d01808525ae7cc8d57643a2ce/coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl"
-              ]
-            }
-          },
-          "python_3_9_aarch64-apple-darwin": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_aarch64-apple-darwin",
-              "sha256": "fdc4054837e37b69798c2ef796222a480bc1f80e8ad3a01a95d0168d8282a007",
-              "patches": [],
-              "platform": "aarch64-apple-darwin",
-              "python_version": "3.9.18",
-              "release_filename": "20231002/cpython-3.9.18+20231002-aarch64-apple-darwin-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.9.18+20231002-aarch64-apple-darwin-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "@python_3_9_aarch64-apple-darwin_coverage//:coverage",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_10_aarch64-apple-darwin_coverage": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_aarch64-apple-darwin_coverage",
-              "build_file_content": "\nfilegroup(\n    name = \"coverage\",\n    srcs = [\"coverage/__main__.py\"],\n    data = glob([\"coverage/*.py\", \"coverage/**/*.py\", \"coverage/*.so\"]),\n    visibility = [\"@python_3_10_aarch64-apple-darwin//:__subpackages__\"],\n)\n    ",
-              "patch_args": [
-                "-p1"
-              ],
-              "patches": [
-                "@@rules_python~override//python/private:coverage.patch"
-              ],
-              "sha256": "6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb",
-              "type": "zip",
-              "urls": [
-                "https://files.pythonhosted.org/packages/3d/80/7060a445e1d2c9744b683dc935248613355657809d6c6b2716cdf4ca4766/coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl"
-              ]
-            }
-          },
-          "python_3_10_aarch64-unknown-linux-gnu_coverage": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_aarch64-unknown-linux-gnu_coverage",
-              "build_file_content": "\nfilegroup(\n    name = \"coverage\",\n    srcs = [\"coverage/__main__.py\"],\n    data = glob([\"coverage/*.py\", \"coverage/**/*.py\", \"coverage/*.so\"]),\n    visibility = [\"@python_3_10_aarch64-unknown-linux-gnu//:__subpackages__\"],\n)\n    ",
-              "patch_args": [
-                "-p1"
-              ],
-              "patches": [
-                "@@rules_python~override//python/private:coverage.patch"
-              ],
-              "sha256": "ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6",
-              "type": "zip",
-              "urls": [
-                "https://files.pythonhosted.org/packages/b8/9d/926fce7e03dbfc653104c2d981c0fa71f0572a9ebd344d24c573bd6f7c4f/coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl"
-              ]
-            }
-          },
-          "python_3_9_x86_64-apple-darwin_coverage": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_x86_64-apple-darwin_coverage",
-              "build_file_content": "\nfilegroup(\n    name = \"coverage\",\n    srcs = [\"coverage/__main__.py\"],\n    data = glob([\"coverage/*.py\", \"coverage/**/*.py\", \"coverage/*.so\"]),\n    visibility = [\"@python_3_9_x86_64-apple-darwin//:__subpackages__\"],\n)\n    ",
-              "patch_args": [
-                "-p1"
-              ],
-              "patches": [
-                "@@rules_python~override//python/private:coverage.patch"
-              ],
-              "sha256": "537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9",
-              "type": "zip",
-              "urls": [
-                "https://files.pythonhosted.org/packages/88/da/495944ebf0ad246235a6bd523810d9f81981f9b81c6059ba1f56e943abe0/coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl"
-              ]
-            }
-          },
-          "python_3_9_x86_64-pc-windows-msvc": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_x86_64-pc-windows-msvc",
-              "sha256": "02ea7bb64524886bd2b05d6b6be4401035e4ba4319146f274f0bcd992822cd75",
-              "patches": [],
-              "platform": "x86_64-pc-windows-msvc",
-              "python_version": "3.9.18",
-              "release_filename": "20231002/cpython-3.9.18+20231002-x86_64-pc-windows-msvc-shared-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.9.18+20231002-x86_64-pc-windows-msvc-shared-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_9_aarch64-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_aarch64-unknown-linux-gnu",
-              "sha256": "1e0a3e8ce8e58901a259748c0ab640d2b8294713782d14229e882c6898b2fb36",
-              "patches": [],
-              "platform": "aarch64-unknown-linux-gnu",
-              "python_version": "3.9.18",
-              "release_filename": "20231002/cpython-3.9.18+20231002-aarch64-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.9.18+20231002-aarch64-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "@python_3_9_aarch64-unknown-linux-gnu_coverage//:coverage",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_10_aarch64-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_aarch64-unknown-linux-gnu",
-              "sha256": "8675915ff454ed2f1597e27794bc7df44f5933c26b94aa06af510fe91b58bb97",
-              "patches": [],
-              "platform": "aarch64-unknown-linux-gnu",
-              "python_version": "3.10.13",
-              "release_filename": "20231002/cpython-3.10.13+20231002-aarch64-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.10.13+20231002-aarch64-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "@python_3_10_aarch64-unknown-linux-gnu_coverage//:coverage",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_10_x86_64-unknown-linux-gnu_coverage": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu_coverage",
-              "build_file_content": "\nfilegroup(\n    name = \"coverage\",\n    srcs = [\"coverage/__main__.py\"],\n    data = glob([\"coverage/*.py\", \"coverage/**/*.py\", \"coverage/*.so\"]),\n    visibility = [\"@python_3_10_x86_64-unknown-linux-gnu//:__subpackages__\"],\n)\n    ",
-              "patch_args": [
-                "-p1"
-              ],
-              "patches": [
-                "@@rules_python~override//python/private:coverage.patch"
-              ],
-              "sha256": "31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063",
-              "type": "zip",
-              "urls": [
-                "https://files.pythonhosted.org/packages/b4/bd/1b2331e3a04f4cc9b7b332b1dd0f3a1261dfc4114f8479bebfcc2afee9e8/coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
-              ]
-            }
-          },
-          "python_3_11": {
-            "bzlFile": "@@rules_python~override//python/private:toolchains_repo.bzl",
-            "ruleClassName": "toolchain_aliases",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_11",
-              "python_version": "3.11.6",
-              "user_repository_name": "python_3_11"
-            }
-          },
-          "python_3_10_s390x-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_s390x-unknown-linux-gnu",
-              "sha256": "859f6cfe9aedb6e8858892fdc124037e83ab05f28d42a7acd314c6a16d6bd66c",
-              "patches": [],
-              "platform": "s390x-unknown-linux-gnu",
-              "python_version": "3.10.13",
-              "release_filename": "20231002/cpython-3.10.13+20231002-s390x-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.10.13+20231002-s390x-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_10": {
-            "bzlFile": "@@rules_python~override//python/private:toolchains_repo.bzl",
-            "ruleClassName": "toolchain_aliases",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10",
-              "python_version": "3.10.13",
-              "user_repository_name": "python_3_10"
-            }
-          },
-          "python_3_11_x86_64-apple-darwin": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_11_x86_64-apple-darwin",
-              "sha256": "178cb1716c2abc25cb56ae915096c1a083e60abeba57af001996e8bc6ce1a371",
-              "patches": [],
-              "platform": "x86_64-apple-darwin",
-              "python_version": "3.11.6",
-              "release_filename": "20231002/cpython-3.11.6+20231002-x86_64-apple-darwin-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.11.6+20231002-x86_64-apple-darwin-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_versions": {
-            "bzlFile": "@@rules_python~override//python/private:toolchains_repo.bzl",
-            "ruleClassName": "multi_toolchain_aliases",
-            "attributes": {
-              "name": "rules_python~override~python~python_versions",
-              "python_versions": {
-                "3.9": "python_3_9",
-                "3.10": "python_3_10",
-                "3.11": "python_3_11"
-              }
-            }
-          },
-          "python_3_9_x86_64-apple-darwin": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_9_x86_64-apple-darwin",
-              "sha256": "82231cb77d4a5c8081a1a1d5b8ae440abe6993514eb77a926c826e9a69a94fb1",
-              "patches": [],
-              "platform": "x86_64-apple-darwin",
-              "python_version": "3.9.18",
-              "release_filename": "20231002/cpython-3.9.18+20231002-x86_64-apple-darwin-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.9.18+20231002-x86_64-apple-darwin-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "@python_3_9_x86_64-apple-darwin_coverage//:coverage",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_10_x86_64-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_10_x86_64-unknown-linux-gnu",
-              "sha256": "5d0429c67c992da19ba3eb58b3acd0b35ec5e915b8cae9a4aa8ca565c423847a",
-              "patches": [],
-              "platform": "x86_64-unknown-linux-gnu",
-              "python_version": "3.10.13",
-              "release_filename": "20231002/cpython-3.10.13+20231002-x86_64-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.10.13+20231002-x86_64-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "@python_3_10_x86_64-unknown-linux-gnu_coverage//:coverage",
-              "ignore_root_user_error": false
-            }
-          },
-          "python_3_11_x86_64-unknown-linux-gnu": {
-            "bzlFile": "@@rules_python~override//python:repositories.bzl",
-            "ruleClassName": "python_repository",
-            "attributes": {
-              "name": "rules_python~override~python~python_3_11_x86_64-unknown-linux-gnu",
-              "sha256": "ee37a7eae6e80148c7e3abc56e48a397c1664f044920463ad0df0fc706eacea8",
-              "patches": [],
-              "platform": "x86_64-unknown-linux-gnu",
-              "python_version": "3.11.6",
-              "release_filename": "20231002/cpython-3.11.6+20231002-x86_64-unknown-linux-gnu-install_only.tar.gz",
-              "urls": [
-                "https://github.com/indygreg/python-build-standalone/releases/download/20231002/cpython-3.11.6+20231002-x86_64-unknown-linux-gnu-install_only.tar.gz"
-              ],
-              "distutils_content": "",
-              "strip_prefix": "python",
-              "coverage_tool": "",
-              "ignore_root_user_error": false
-            }
-          }
-        }
-      }
-    },
-    "@@rules_python~override//python/extensions/private:internal_deps.bzl%internal_deps": {
-      "general": {
-        "bzlTransitiveDigest": "xWcXgf89ru5MrJTsPvm77jragVvWzXfqANqHTd0qAEw=",
-        "accumulatedFileDigests": {},
-        "envVariables": {},
-        "generatedRepoSpecs": {
-          "pypi__wheel": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__wheel",
-              "url": "https://files.pythonhosted.org/packages/b8/8b/31273bf66016be6ad22bb7345c37ff350276cfd46e389a0c2ac5da9d9073/wheel-0.41.2-py3-none-any.whl",
-              "sha256": "75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__click": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__click",
-              "url": "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl",
-              "sha256": "ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__importlib_metadata": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__importlib_metadata",
-              "url": "https://files.pythonhosted.org/packages/cc/37/db7ba97e676af155f5fcb1a35466f446eadc9104e25b83366e8088c9c926/importlib_metadata-6.8.0-py3-none-any.whl",
-              "sha256": "3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__pyproject_hooks": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__pyproject_hooks",
-              "url": "https://files.pythonhosted.org/packages/d5/ea/9ae603de7fbb3df820b23a70f6aff92bf8c7770043254ad8d2dc9d6bcba4/pyproject_hooks-1.0.0-py3-none-any.whl",
-              "sha256": "283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__pep517": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__pep517",
-              "url": "https://files.pythonhosted.org/packages/ee/2f/ef63e64e9429111e73d3d6cbee80591672d16f2725e648ebc52096f3d323/pep517-0.13.0-py3-none-any.whl",
-              "sha256": "4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__packaging": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__packaging",
-              "url": "https://files.pythonhosted.org/packages/ab/c3/57f0601a2d4fe15de7a553c00adbc901425661bf048f2a22dfc500caf121/packaging-23.1-py3-none-any.whl",
-              "sha256": "994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__pip_tools": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__pip_tools",
-              "url": "https://files.pythonhosted.org/packages/e8/df/47e6267c6b5cdae867adbdd84b437393e6202ce4322de0a5e0b92960e1d6/pip_tools-7.3.0-py3-none-any.whl",
-              "sha256": "8717693288720a8c6ebd07149c93ab0be1fced0b5191df9e9decd3263e20d85e",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__setuptools": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__setuptools",
-              "url": "https://files.pythonhosted.org/packages/4f/ab/0bcfebdfc3bfa8554b2b2c97a555569c4c1ebc74ea288741ea8326c51906/setuptools-68.1.2-py3-none-any.whl",
-              "sha256": "3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__zipp": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__zipp",
-              "url": "https://files.pythonhosted.org/packages/8c/08/d3006317aefe25ea79d3b76c9650afabaf6d63d1c8443b236e7405447503/zipp-3.16.2-py3-none-any.whl",
-              "sha256": "679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__colorama": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__colorama",
-              "url": "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl",
-              "sha256": "4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__build": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__build",
-              "url": "https://files.pythonhosted.org/packages/58/91/17b00d5fac63d3dca605f1b8269ba3c65e98059e1fd99d00283e42a454f0/build-0.10.0-py3-none-any.whl",
-              "sha256": "af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "rules_python_internal": {
-            "bzlFile": "@@rules_python~override//python/private:internal_config_repo.bzl",
-            "ruleClassName": "internal_config_repo",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~rules_python_internal"
-            }
-          },
-          "pypi__pip": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__pip",
-              "url": "https://files.pythonhosted.org/packages/50/c2/e06851e8cc28dcad7c155f4753da8833ac06a5c704c109313b8d5a62968a/pip-23.2.1-py3-none-any.whl",
-              "sha256": "7ccf472345f20d35bdc9d1841ff5f313260c2c33fe417f48c30ac46cccabf5be",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__installer": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__installer",
-              "url": "https://files.pythonhosted.org/packages/e5/ca/1172b6638d52f2d6caa2dd262ec4c811ba59eee96d54a7701930726bce18/installer-0.7.0-py3-none-any.whl",
-              "sha256": "05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__more_itertools": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__more_itertools",
-              "url": "https://files.pythonhosted.org/packages/5a/cb/6dce742ea14e47d6f565589e859ad225f2a5de576d7696e0623b784e226b/more_itertools-10.1.0-py3-none-any.whl",
-              "sha256": "64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          },
-          "pypi__tomli": {
-            "bzlFile": "@@bazel_tools//tools/build_defs/repo:http.bzl",
-            "ruleClassName": "http_archive",
-            "attributes": {
-              "name": "rules_python~override~internal_deps~pypi__tomli",
-              "url": "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl",
-              "sha256": "939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
-              "type": "zip",
-              "build_file_content": "package(default_visibility = [\"//visibility:public\"])\n\nload(\"@rules_python//python:defs.bzl\", \"py_library\")\n\npy_library(\n    name = \"lib\",\n    srcs = glob([\"**/*.py\"]),\n    data = glob([\"**/*\"], exclude=[\n        # These entries include those put into user-installed dependencies by\n        # data_exclude in /python/pip_install/tools/bazel.py\n        # to avoid non-determinism following pip install's behavior.\n        \"**/*.py\",\n        \"**/*.pyc\",\n        \"**/*.pyc.*\",  # During pyc creation, temp files named *.pyc.NNN are created\n        \"**/* *\",\n        \"**/*.dist-info/RECORD\",\n        \"BUILD\",\n        \"WORKSPACE\",\n    ]),\n    # This makes this directory a top-level in the python import\n    # search path for anything that depends on this.\n    imports = [\".\"],\n)\n"
-            }
-          }
-        }
-      }
-    }
-  }
-}
diff --git a/examples/bzlmod/__main__.py b/examples/bzlmod/__main__.py
index daf1749..2dd322a 100644
--- a/examples/bzlmod/__main__.py
+++ b/examples/bzlmod/__main__.py
@@ -12,9 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from lib import main
 import sys
 
+from lib import main
+
 if __name__ == "__main__":
     print(main([["A", 1], ["B", 2]]))
     print(sys.version)
diff --git a/examples/bzlmod/lib.py b/examples/bzlmod/lib.py
index 5f0167f..e76042d 100644
--- a/examples/bzlmod/lib.py
+++ b/examples/bzlmod/lib.py
@@ -12,8 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from tabulate import tabulate
 import sphinx  # noqa
+from tabulate import tabulate
+
 
 def main(table):
     return tabulate(table)
diff --git a/examples/bzlmod/libs/my_lib/__init__.py b/examples/bzlmod/libs/my_lib/__init__.py
index 6db2e85..271e933 100644
--- a/examples/bzlmod/libs/my_lib/__init__.py
+++ b/examples/bzlmod/libs/my_lib/__init__.py
@@ -17,6 +17,12 @@
 
 def websockets_is_for_python_version(sanitized_version_check):
     # We are checking that the name of the repository folders
-    # match the expexted generated names. If we update the folder
-    # structure or naming we will need to modify this test
-    return f"pip_{sanitized_version_check}_websockets" in websockets.__file__
+    # match the expected generated names. If we update the folder
+    # structure or naming we will need to modify this test.
+    want = f"_{sanitized_version_check}_websockets"
+    got_full = websockets.__file__
+    if want not in got_full:
+        print(f"Failed, expected '{want}' to be a substring of '{got_full}'.")
+        return False
+
+    return True
diff --git a/examples/bzlmod/requirements_lock_3_10.txt b/examples/bzlmod/requirements_lock_3_10.txt
index 525fa3e..ace879f 100644
--- a/examples/bzlmod/requirements_lock_3_10.txt
+++ b/examples/bzlmod/requirements_lock_3_10.txt
@@ -18,9 +18,9 @@
     --hash=sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900 \
     --hash=sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed
     # via sphinx
-certifi==2023.5.7 \
-    --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \
-    --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716
+certifi==2023.7.22 \
+    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
+    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
     # via requests
 chardet==4.0.0 \
     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \
@@ -50,9 +50,9 @@
     --hash=sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504 \
     --hash=sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6
     # via pylint
-jinja2==3.1.2 \
-    --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
-    --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
+jinja2==3.1.4 \
+    --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
+    --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
     # via sphinx
 lazy-object-proxy==1.9.0 \
     --hash=sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382 \
diff --git a/examples/bzlmod/requirements_lock_3_9.txt b/examples/bzlmod/requirements_lock_3_9.txt
index e78562f..e6aaa99 100644
--- a/examples/bzlmod/requirements_lock_3_9.txt
+++ b/examples/bzlmod/requirements_lock_3_9.txt
@@ -18,9 +18,9 @@
     --hash=sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900 \
     --hash=sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed
     # via sphinx
-certifi==2022.12.7 \
-    --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
-    --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+certifi==2023.7.22 \
+    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
+    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
     # via requests
 chardet==4.0.0 \
     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \
@@ -54,30 +54,48 @@
     --hash=sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6 \
     --hash=sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b
     # via pylint
-jinja2==3.1.2 \
-    --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
-    --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
+jinja2==3.1.4 \
+    --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
+    --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
     # via sphinx
-lazy-object-proxy==1.8.0 \
-    --hash=sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada \
-    --hash=sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d \
-    --hash=sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7 \
-    --hash=sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe \
-    --hash=sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd \
-    --hash=sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c \
-    --hash=sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858 \
-    --hash=sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288 \
-    --hash=sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec \
-    --hash=sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f \
-    --hash=sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891 \
-    --hash=sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c \
-    --hash=sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25 \
-    --hash=sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156 \
-    --hash=sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8 \
-    --hash=sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f \
-    --hash=sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e \
-    --hash=sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0 \
-    --hash=sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b
+lazy-object-proxy==1.10.0 \
+    --hash=sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56 \
+    --hash=sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4 \
+    --hash=sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8 \
+    --hash=sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282 \
+    --hash=sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757 \
+    --hash=sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424 \
+    --hash=sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b \
+    --hash=sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255 \
+    --hash=sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70 \
+    --hash=sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94 \
+    --hash=sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074 \
+    --hash=sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c \
+    --hash=sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee \
+    --hash=sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9 \
+    --hash=sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9 \
+    --hash=sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69 \
+    --hash=sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f \
+    --hash=sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3 \
+    --hash=sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9 \
+    --hash=sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d \
+    --hash=sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977 \
+    --hash=sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b \
+    --hash=sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43 \
+    --hash=sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658 \
+    --hash=sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a \
+    --hash=sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd \
+    --hash=sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83 \
+    --hash=sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4 \
+    --hash=sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696 \
+    --hash=sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05 \
+    --hash=sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3 \
+    --hash=sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6 \
+    --hash=sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895 \
+    --hash=sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4 \
+    --hash=sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba \
+    --hash=sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03 \
+    --hash=sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c
     # via astroid
 markupsafe==2.1.3 \
     --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
@@ -181,47 +199,58 @@
     --hash=sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b \
     --hash=sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3
     # via s3cmd
-pyyaml==6.0 \
-    --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
-    --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
-    --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
-    --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
-    --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
-    --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
-    --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
-    --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
-    --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
-    --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
-    --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
-    --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
-    --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
-    --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
-    --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
-    --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
-    --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
-    --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
-    --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
-    --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
-    --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
-    --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
-    --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
-    --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
-    --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
-    --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
-    --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
-    --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
-    --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
-    --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
-    --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
-    --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
-    --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
-    --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
-    --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
-    --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
-    --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
-    --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
-    --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
-    --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
+pyyaml==6.0.1 \
+    --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \
+    --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \
+    --hash=sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df \
+    --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \
+    --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \
+    --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \
+    --hash=sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595 \
+    --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \
+    --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \
+    --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \
+    --hash=sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290 \
+    --hash=sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9 \
+    --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \
+    --hash=sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6 \
+    --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \
+    --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \
+    --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \
+    --hash=sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6 \
+    --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \
+    --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \
+    --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \
+    --hash=sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0 \
+    --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \
+    --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \
+    --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \
+    --hash=sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28 \
+    --hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \
+    --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
+    --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
+    --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \
+    --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
+    --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
+    --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \
+    --hash=sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0 \
+    --hash=sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515 \
+    --hash=sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c \
+    --hash=sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c \
+    --hash=sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924 \
+    --hash=sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34 \
+    --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \
+    --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \
+    --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \
+    --hash=sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54 \
+    --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \
+    --hash=sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b \
+    --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \
+    --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \
+    --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \
+    --hash=sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585 \
+    --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \
+    --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f
     # via yamllint
 requests==2.25.1 \
     --hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804 \
diff --git a/examples/bzlmod/requirements_windows_3_10.txt b/examples/bzlmod/requirements_windows_3_10.txt
index 05905e5..e4373c1 100644
--- a/examples/bzlmod/requirements_windows_3_10.txt
+++ b/examples/bzlmod/requirements_windows_3_10.txt
@@ -18,9 +18,9 @@
     --hash=sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900 \
     --hash=sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed
     # via sphinx
-certifi==2023.5.7 \
-    --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \
-    --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716
+certifi==2023.7.22 \
+    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
+    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
     # via requests
 chardet==4.0.0 \
     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \
@@ -53,9 +53,9 @@
     --hash=sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504 \
     --hash=sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6
     # via pylint
-jinja2==3.1.2 \
-    --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
-    --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
+jinja2==3.1.4 \
+    --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
+    --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
     # via sphinx
 lazy-object-proxy==1.9.0 \
     --hash=sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382 \
diff --git a/examples/bzlmod/requirements_windows_3_9.txt b/examples/bzlmod/requirements_windows_3_9.txt
index a325101..636b4df 100644
--- a/examples/bzlmod/requirements_windows_3_9.txt
+++ b/examples/bzlmod/requirements_windows_3_9.txt
@@ -18,9 +18,9 @@
     --hash=sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900 \
     --hash=sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed
     # via sphinx
-certifi==2022.12.7 \
-    --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
-    --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+certifi==2023.7.22 \
+    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
+    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
     # via requests
 chardet==4.0.0 \
     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \
@@ -57,30 +57,48 @@
     --hash=sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6 \
     --hash=sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b
     # via pylint
-jinja2==3.1.2 \
-    --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
-    --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
+jinja2==3.1.4 \
+    --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
+    --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
     # via sphinx
-lazy-object-proxy==1.8.0 \
-    --hash=sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada \
-    --hash=sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d \
-    --hash=sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7 \
-    --hash=sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe \
-    --hash=sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd \
-    --hash=sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c \
-    --hash=sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858 \
-    --hash=sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288 \
-    --hash=sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec \
-    --hash=sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f \
-    --hash=sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891 \
-    --hash=sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c \
-    --hash=sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25 \
-    --hash=sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156 \
-    --hash=sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8 \
-    --hash=sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f \
-    --hash=sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e \
-    --hash=sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0 \
-    --hash=sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b
+lazy-object-proxy==1.10.0 \
+    --hash=sha256:009e6bb1f1935a62889ddc8541514b6a9e1fcf302667dcb049a0be5c8f613e56 \
+    --hash=sha256:02c83f957782cbbe8136bee26416686a6ae998c7b6191711a04da776dc9e47d4 \
+    --hash=sha256:0aefc7591920bbd360d57ea03c995cebc204b424524a5bd78406f6e1b8b2a5d8 \
+    --hash=sha256:127a789c75151db6af398b8972178afe6bda7d6f68730c057fbbc2e96b08d282 \
+    --hash=sha256:18dd842b49456aaa9a7cf535b04ca4571a302ff72ed8740d06b5adcd41fe0757 \
+    --hash=sha256:217138197c170a2a74ca0e05bddcd5f1796c735c37d0eee33e43259b192aa424 \
+    --hash=sha256:2297f08f08a2bb0d32a4265e98a006643cd7233fb7983032bd61ac7a02956b3b \
+    --hash=sha256:2fc0a92c02fa1ca1e84fc60fa258458e5bf89d90a1ddaeb8ed9cc3147f417255 \
+    --hash=sha256:30b339b2a743c5288405aa79a69e706a06e02958eab31859f7f3c04980853b70 \
+    --hash=sha256:366c32fe5355ef5fc8a232c5436f4cc66e9d3e8967c01fb2e6302fd6627e3d94 \
+    --hash=sha256:3ad54b9ddbe20ae9f7c1b29e52f123120772b06dbb18ec6be9101369d63a4074 \
+    --hash=sha256:5ad9e6ed739285919aa9661a5bbed0aaf410aa60231373c5579c6b4801bd883c \
+    --hash=sha256:5faf03a7d8942bb4476e3b62fd0f4cf94eaf4618e304a19865abf89a35c0bbee \
+    --hash=sha256:75fc59fc450050b1b3c203c35020bc41bd2695ed692a392924c6ce180c6f1dc9 \
+    --hash=sha256:76a095cfe6045c7d0ca77db9934e8f7b71b14645f0094ffcd842349ada5c5fb9 \
+    --hash=sha256:78247b6d45f43a52ef35c25b5581459e85117225408a4128a3daf8bf9648ac69 \
+    --hash=sha256:782e2c9b2aab1708ffb07d4bf377d12901d7a1d99e5e410d648d892f8967ab1f \
+    --hash=sha256:7ab7004cf2e59f7c2e4345604a3e6ea0d92ac44e1c2375527d56492014e690c3 \
+    --hash=sha256:80b39d3a151309efc8cc48675918891b865bdf742a8616a337cb0090791a0de9 \
+    --hash=sha256:80fa48bd89c8f2f456fc0765c11c23bf5af827febacd2f523ca5bc1893fcc09d \
+    --hash=sha256:855e068b0358ab916454464a884779c7ffa312b8925c6f7401e952dcf3b89977 \
+    --hash=sha256:92f09ff65ecff3108e56526f9e2481b8116c0b9e1425325e13245abfd79bdb1b \
+    --hash=sha256:952c81d415b9b80ea261d2372d2a4a2332a3890c2b83e0535f263ddfe43f0d43 \
+    --hash=sha256:9a3a87cf1e133e5b1994144c12ca4aa3d9698517fe1e2ca82977781b16955658 \
+    --hash=sha256:9e4ed0518a14dd26092614412936920ad081a424bdcb54cc13349a8e2c6d106a \
+    --hash=sha256:a899b10e17743683b293a729d3a11f2f399e8a90c73b089e29f5d0fe3509f0dd \
+    --hash=sha256:b1f711e2c6dcd4edd372cf5dec5c5a30d23bba06ee012093267b3376c079ec83 \
+    --hash=sha256:b4f87d4ed9064b2628da63830986c3d2dca7501e6018347798313fcf028e2fd4 \
+    --hash=sha256:cb73507defd385b7705c599a94474b1d5222a508e502553ef94114a143ec6696 \
+    --hash=sha256:dc0d2fc424e54c70c4bc06787e4072c4f3b1aa2f897dfdc34ce1013cf3ceef05 \
+    --hash=sha256:e221060b701e2aa2ea991542900dd13907a5c90fa80e199dbf5a03359019e7a3 \
+    --hash=sha256:e271058822765ad5e3bca7f05f2ace0de58a3f4e62045a8c90a0dfd2f8ad8cc6 \
+    --hash=sha256:e2adb09778797da09d2b5ebdbceebf7dd32e2c96f79da9052b2e87b6ea495895 \
+    --hash=sha256:e333e2324307a7b5d86adfa835bb500ee70bfcd1447384a822e96495796b0ca4 \
+    --hash=sha256:e98c8af98d5707dcdecc9ab0863c0ea6e88545d42ca7c3feffb6b4d1e370c7ba \
+    --hash=sha256:edb45bb8278574710e68a6b021599a10ce730d156e5b254941754a9cc0b17d03 \
+    --hash=sha256:fec03caabbc6b59ea4a638bee5fce7117be8e99a4103d9d5ad77f15d6f81020c
     # via astroid
 markupsafe==2.1.3 \
     --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
@@ -184,47 +202,58 @@
     --hash=sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b \
     --hash=sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3
     # via s3cmd
-pyyaml==6.0 \
-    --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \
-    --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
-    --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
-    --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
-    --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
-    --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
-    --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
-    --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
-    --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
-    --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
-    --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
-    --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
-    --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \
-    --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
-    --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
-    --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
-    --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
-    --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
-    --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \
-    --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
-    --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
-    --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
-    --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
-    --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
-    --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
-    --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \
-    --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
-    --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
-    --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \
-    --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
-    --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
-    --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
-    --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \
-    --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
-    --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
-    --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
-    --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
-    --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \
-    --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
-    --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
+pyyaml==6.0.1 \
+    --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \
+    --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \
+    --hash=sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df \
+    --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \
+    --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \
+    --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \
+    --hash=sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595 \
+    --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \
+    --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \
+    --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \
+    --hash=sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290 \
+    --hash=sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9 \
+    --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \
+    --hash=sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6 \
+    --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \
+    --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \
+    --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \
+    --hash=sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6 \
+    --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \
+    --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \
+    --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \
+    --hash=sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0 \
+    --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \
+    --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \
+    --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \
+    --hash=sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28 \
+    --hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \
+    --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \
+    --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \
+    --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \
+    --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \
+    --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \
+    --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \
+    --hash=sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0 \
+    --hash=sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515 \
+    --hash=sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c \
+    --hash=sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c \
+    --hash=sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924 \
+    --hash=sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34 \
+    --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \
+    --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \
+    --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \
+    --hash=sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54 \
+    --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \
+    --hash=sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b \
+    --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \
+    --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \
+    --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \
+    --hash=sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585 \
+    --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \
+    --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f
     # via yamllint
 requests==2.25.1 \
     --hash=sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804 \
diff --git a/examples/bzlmod/test.py b/examples/bzlmod/test.py
index 5331875..24be3ba 100644
--- a/examples/bzlmod/test.py
+++ b/examples/bzlmod/test.py
@@ -14,6 +14,7 @@
 
 import os
 import pathlib
+import re
 import sys
 import unittest
 
@@ -63,16 +64,47 @@
             first_item.endswith("coverage"),
             f"Expected the first item in sys.path '{first_item}' to not be related to coverage",
         )
+
+        # We're trying to make sure that the coverage library added by the
+        # toolchain is _after_ any user-provided dependencies. This lets users
+        # override what coverage version they're using.
+        first_coverage_index = None
+        last_user_dep_index = None
+        for i, path in enumerate(sys.path):
+            if re.search("rules_python.*[~+]pip[~+]", path):
+                last_user_dep_index = i
+            if first_coverage_index is None and re.search(
+                ".*rules_python.*[~+]python[~+].*coverage.*", path
+            ):
+                first_coverage_index = i
+
         if os.environ.get("COVERAGE_MANIFEST"):
-            # we are running under the 'bazel coverage :test'
-            self.assertTrue(
-                "_coverage" in last_item,
-                f"Expected {last_item} to be related to coverage",
+            self.assertIsNotNone(
+                first_coverage_index,
+                "Expected to find toolchain coverage, but "
+                + f"it was not found.\nsys.path:\n{all_paths}",
             )
-            self.assertEqual(pathlib.Path(last_item).name, "coverage")
+            self.assertIsNotNone(
+                last_user_dep_index,
+                "Expected to find at least one user dep, "
+                + "but none were found.\nsys.path:\n{all_paths}",
+            )
+            # we are running under the 'bazel coverage :test'
+            self.assertGreater(
+                first_coverage_index,
+                last_user_dep_index,
+                "Expected coverage provided by the toolchain to be after "
+                + "user provided dependencies.\n"
+                + f"Found coverage at index: {first_coverage_index}\n"
+                + f"Last user dep at index: {last_user_dep_index}\n"
+                + f"Full sys.path:\n{all_paths}",
+            )
         else:
-            self.assertFalse(
-                "coverage" in last_item, f"Expected coverage tooling to not be present"
+            self.assertIsNone(
+                first_coverage_index,
+                "Expected toolchain coverage to not be present\n"
+                + f"Found coverage at index: {first_coverage_index}\n"
+                + f"Full sys.path:\n{all_paths}",
             )
 
     def test_main(self):
diff --git a/examples/bzlmod/tests/BUILD.bazel b/examples/bzlmod/tests/BUILD.bazel
index ce7079c..9f7aa1b 100644
--- a/examples/bzlmod/tests/BUILD.bazel
+++ b/examples/bzlmod/tests/BUILD.bazel
@@ -2,6 +2,8 @@
 load("@python_versions//3.11:defs.bzl", py_binary_3_11 = "py_binary", py_test_3_11 = "py_test")
 load("@python_versions//3.9:defs.bzl", py_binary_3_9 = "py_binary", py_test_3_9 = "py_test")
 load("@rules_python//python:defs.bzl", "py_binary", "py_test")
+load("@rules_python//python:versions.bzl", "MINOR_MAPPING")
+load("@rules_python//python/config_settings:transition.bzl", py_versioned_binary = "py_binary", py_versioned_test = "py_test")
 
 py_binary(
     name = "version_default",
@@ -27,6 +29,13 @@
     main = "version.py",
 )
 
+py_versioned_binary(
+    name = "version_3_10_versioned",
+    srcs = ["version.py"],
+    main = "version.py",
+    python_version = "3.10",
+)
+
 # This is a work in progress and the commented
 # tests will not work  until we can support
 # multiple pips with bzlmod.
@@ -52,6 +61,28 @@
     deps = ["//libs/my_lib"],
 )
 
+py_versioned_test(
+    name = "my_lib_versioned_test",
+    srcs = ["my_lib_test.py"],
+    main = "my_lib_test.py",
+    python_version = "3.10",
+    deps = select(
+        {
+            "@rules_python//python/config_settings:is_python_" + MINOR_MAPPING["3.10"]: ["//libs/my_lib"],
+        },
+        no_match_error = """\
+This test is failing to find dependencies and it seems that the is_python_{version}
+does not match the transitioned configuration of python-version 3.10. Please
+look at the
+
+    @rules_python//python/config_settings:config_settings.bzl
+
+to fix any bugs.""".format(
+            version = MINOR_MAPPING["3.10"],
+        ),
+    ),
+)
+
 py_test(
     name = "version_default_test",
     srcs = ["version_test.py"],
@@ -73,6 +104,14 @@
     main = "version_test.py",
 )
 
+py_versioned_test(
+    name = "version_versioned_test",
+    srcs = ["version_test.py"],
+    env = {"VERSION_CHECK": "3.10"},
+    main = "version_test.py",
+    python_version = "3.10",
+)
+
 py_test_3_11(
     name = "version_3_11_test",
     srcs = ["version_test.py"],
@@ -104,6 +143,19 @@
     main = "cross_version_test.py",
 )
 
+py_versioned_test(
+    name = "version_3_10_takes_3_9_subprocess_test_2",
+    srcs = ["cross_version_test.py"],
+    data = [":version_3_9"],
+    env = {
+        "SUBPROCESS_VERSION_CHECK": "3.9",
+        "SUBPROCESS_VERSION_PY_BINARY": "$(rootpath :version_3_9)",
+        "VERSION_CHECK": "3.10",
+    },
+    main = "cross_version_test.py",
+    python_version = "3.10",
+)
+
 sh_test(
     name = "version_test_binary_default",
     srcs = ["version_test.sh"],
diff --git a/examples/bzlmod/tests/dupe_requirements/BUILD.bazel b/examples/bzlmod/tests/dupe_requirements/BUILD.bazel
deleted file mode 100644
index 47eb7ca..0000000
--- a/examples/bzlmod/tests/dupe_requirements/BUILD.bazel
+++ /dev/null
@@ -1,19 +0,0 @@
-load("@rules_python//python:pip.bzl", "compile_pip_requirements")
-load("@rules_python//python:py_test.bzl", "py_test")
-
-py_test(
-    name = "dupe_requirements_test",
-    srcs = ["dupe_requirements_test.py"],
-    deps = [
-        "@dupe_requirements//pyjwt",
-    ],
-)
-
-compile_pip_requirements(
-    name = "requirements",
-    src = "requirements.in",
-    requirements_txt = "requirements.txt",
-    # This is to make the requirements diff test not run on CI. The content we
-    # need in requirements.txt isn't exactly what will be generated.
-    tags = ["manual"],
-)
diff --git a/examples/bzlmod/tests/dupe_requirements/dupe_requirements_test.py b/examples/bzlmod/tests/dupe_requirements/dupe_requirements_test.py
deleted file mode 100644
index 1139dc5..0000000
--- a/examples/bzlmod/tests/dupe_requirements/dupe_requirements_test.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# There's nothing to test at runtime. Building indicates success.
-# Just import the relevant modules as a basic check.
-import cryptography
-import jwt
diff --git a/examples/bzlmod/tests/dupe_requirements/requirements.in b/examples/bzlmod/tests/dupe_requirements/requirements.in
deleted file mode 100644
index b1f6233..0000000
--- a/examples/bzlmod/tests/dupe_requirements/requirements.in
+++ /dev/null
@@ -1,2 +0,0 @@
-pyjwt
-pyjwt[crypto]
diff --git a/examples/bzlmod/tests/dupe_requirements/requirements.txt b/examples/bzlmod/tests/dupe_requirements/requirements.txt
deleted file mode 100644
index 785f556..0000000
--- a/examples/bzlmod/tests/dupe_requirements/requirements.txt
+++ /dev/null
@@ -1,97 +0,0 @@
-#
-# This file is manually tweaked output from the automatic generation.
-# To generate:
-# 1. bazel run //tests/dupe_requirements:requirements.update
-# 2. Then copy/paste the pyjtw lines so there are duplicates
-#
-pyjwt==2.8.0 \
-    --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \
-    --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320
-    # via -r tests/dupe_requirements/requirements.in
-pyjwt[crypto]==2.8.0 \
-    --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \
-    --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320
-    # via -r tests/dupe_requirements/requirements.in
-cffi==1.16.0 \
-    --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \
-    --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \
-    --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \
-    --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \
-    --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \
-    --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \
-    --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \
-    --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \
-    --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \
-    --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \
-    --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \
-    --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \
-    --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \
-    --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \
-    --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \
-    --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \
-    --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \
-    --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \
-    --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \
-    --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \
-    --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \
-    --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \
-    --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \
-    --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \
-    --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \
-    --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \
-    --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \
-    --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \
-    --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \
-    --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \
-    --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \
-    --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \
-    --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \
-    --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \
-    --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \
-    --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \
-    --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \
-    --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \
-    --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \
-    --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \
-    --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \
-    --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \
-    --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \
-    --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \
-    --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \
-    --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \
-    --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \
-    --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \
-    --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \
-    --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \
-    --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \
-    --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357
-    # via cryptography
-cryptography==41.0.7 \
-    --hash=sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960 \
-    --hash=sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a \
-    --hash=sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc \
-    --hash=sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a \
-    --hash=sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf \
-    --hash=sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1 \
-    --hash=sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39 \
-    --hash=sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406 \
-    --hash=sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a \
-    --hash=sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a \
-    --hash=sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c \
-    --hash=sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be \
-    --hash=sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15 \
-    --hash=sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2 \
-    --hash=sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d \
-    --hash=sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157 \
-    --hash=sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003 \
-    --hash=sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248 \
-    --hash=sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a \
-    --hash=sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec \
-    --hash=sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309 \
-    --hash=sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7 \
-    --hash=sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d
-    # via pyjwt
-pycparser==2.21 \
-    --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
-    --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
-    # via cffi
diff --git a/examples/bzlmod_build_file_generation/.bazelrc b/examples/bzlmod_build_file_generation/.bazelrc
index 1fbada7..acc7102 100644
--- a/examples/bzlmod_build_file_generation/.bazelrc
+++ b/examples/bzlmod_build_file_generation/.bazelrc
@@ -2,7 +2,6 @@
 
 # Windows requires these for multi-python support:
 build --enable_runfiles
-startup --windows_enable_symlinks
 
 common --experimental_enable_bzlmod
 
diff --git a/examples/bzlmod_build_file_generation/.bazelversion b/examples/bzlmod_build_file_generation/.bazelversion
deleted file mode 100644
index 09b254e..0000000
--- a/examples/bzlmod_build_file_generation/.bazelversion
+++ /dev/null
@@ -1 +0,0 @@
-6.0.0
diff --git a/examples/bzlmod_build_file_generation/BUILD.bazel b/examples/bzlmod_build_file_generation/BUILD.bazel
index bca3b36..33d01f4 100644
--- a/examples/bzlmod_build_file_generation/BUILD.bazel
+++ b/examples/bzlmod_build_file_generation/BUILD.bazel
@@ -29,6 +29,8 @@
     exclude_patterns = [
         "^_|(\\._)+",  # This is the default.
         "(\\.tests)+",  # Add a custom one to get rid of the psutil tests.
+        "^colorama",  # Get rid of colorama on Windows.
+        "^lazy_object_proxy\\.cext$",  # Get rid of this on Linux because it isn't included on Windows.
     ],
     wheels = all_whl_requirements,
 )
@@ -47,10 +49,6 @@
     name = "gazelle_python_manifest",
     modules_mapping = ":modules_map",
     pip_repository_name = "pip",
-    requirements = [
-        "//:requirements_lock.txt",
-        "//:requirements_windows.txt",
-    ],
     tags = ["exclusive"],
 )
 
diff --git a/examples/bzlmod_build_file_generation/gazelle_python.yaml b/examples/bzlmod_build_file_generation/gazelle_python.yaml
index 46a1c8b..d0d3224 100644
--- a/examples/bzlmod_build_file_generation/gazelle_python.yaml
+++ b/examples/bzlmod_build_file_generation/gazelle_python.yaml
@@ -6,584 +6,30 @@
 manifest:
   modules_mapping:
     S3: s3cmd
-    S3.ACL: s3cmd
-    S3.AccessLog: s3cmd
-    S3.BidirMap: s3cmd
-    S3.CloudFront: s3cmd
-    S3.Config: s3cmd
-    S3.ConnMan: s3cmd
-    S3.Crypto: s3cmd
-    S3.Custom_httplib27: s3cmd
-    S3.Custom_httplib3x: s3cmd
-    S3.Exceptions: s3cmd
-    S3.ExitCodes: s3cmd
-    S3.FileDict: s3cmd
-    S3.FileLists: s3cmd
-    S3.HashCache: s3cmd
-    S3.MultiPart: s3cmd
-    S3.PkgInfo: s3cmd
-    S3.Progress: s3cmd
-    S3.S3: s3cmd
-    S3.S3Uri: s3cmd
-    S3.SortedDict: s3cmd
-    S3.Utils: s3cmd
     astroid: astroid
-    astroid.arguments: astroid
-    astroid.astroid_manager: astroid
-    astroid.bases: astroid
-    astroid.brain: astroid
-    astroid.brain.brain_argparse: astroid
-    astroid.brain.brain_attrs: astroid
-    astroid.brain.brain_boto3: astroid
-    astroid.brain.brain_builtin_inference: astroid
-    astroid.brain.brain_collections: astroid
-    astroid.brain.brain_crypt: astroid
-    astroid.brain.brain_ctypes: astroid
-    astroid.brain.brain_curses: astroid
-    astroid.brain.brain_dataclasses: astroid
-    astroid.brain.brain_dateutil: astroid
-    astroid.brain.brain_fstrings: astroid
-    astroid.brain.brain_functools: astroid
-    astroid.brain.brain_gi: astroid
-    astroid.brain.brain_hashlib: astroid
-    astroid.brain.brain_http: astroid
-    astroid.brain.brain_hypothesis: astroid
-    astroid.brain.brain_io: astroid
-    astroid.brain.brain_mechanize: astroid
-    astroid.brain.brain_multiprocessing: astroid
-    astroid.brain.brain_namedtuple_enum: astroid
-    astroid.brain.brain_nose: astroid
-    astroid.brain.brain_numpy_core_einsumfunc: astroid
-    astroid.brain.brain_numpy_core_fromnumeric: astroid
-    astroid.brain.brain_numpy_core_function_base: astroid
-    astroid.brain.brain_numpy_core_multiarray: astroid
-    astroid.brain.brain_numpy_core_numeric: astroid
-    astroid.brain.brain_numpy_core_numerictypes: astroid
-    astroid.brain.brain_numpy_core_umath: astroid
-    astroid.brain.brain_numpy_ma: astroid
-    astroid.brain.brain_numpy_ndarray: astroid
-    astroid.brain.brain_numpy_random_mtrand: astroid
-    astroid.brain.brain_numpy_utils: astroid
-    astroid.brain.brain_pathlib: astroid
-    astroid.brain.brain_pkg_resources: astroid
-    astroid.brain.brain_pytest: astroid
-    astroid.brain.brain_qt: astroid
-    astroid.brain.brain_random: astroid
-    astroid.brain.brain_re: astroid
-    astroid.brain.brain_responses: astroid
-    astroid.brain.brain_scipy_signal: astroid
-    astroid.brain.brain_signal: astroid
-    astroid.brain.brain_six: astroid
-    astroid.brain.brain_sqlalchemy: astroid
-    astroid.brain.brain_ssl: astroid
-    astroid.brain.brain_subprocess: astroid
-    astroid.brain.brain_threading: astroid
-    astroid.brain.brain_type: astroid
-    astroid.brain.brain_typing: astroid
-    astroid.brain.brain_unittest: astroid
-    astroid.brain.brain_uuid: astroid
-    astroid.brain.helpers: astroid
-    astroid.builder: astroid
-    astroid.const: astroid
-    astroid.context: astroid
-    astroid.decorators: astroid
-    astroid.exceptions: astroid
-    astroid.filter_statements: astroid
-    astroid.helpers: astroid
-    astroid.inference: astroid
-    astroid.inference_tip: astroid
-    astroid.interpreter: astroid
-    astroid.interpreter.dunder_lookup: astroid
-    astroid.interpreter.objectmodel: astroid
-    astroid.manager: astroid
-    astroid.mixins: astroid
-    astroid.modutils: astroid
-    astroid.node_classes: astroid
-    astroid.nodes: astroid
-    astroid.nodes.as_string: astroid
-    astroid.nodes.const: astroid
-    astroid.nodes.node_classes: astroid
-    astroid.nodes.node_ng: astroid
-    astroid.nodes.scoped_nodes: astroid
-    astroid.nodes.scoped_nodes.mixin: astroid
-    astroid.nodes.scoped_nodes.scoped_nodes: astroid
-    astroid.nodes.scoped_nodes.utils: astroid
-    astroid.nodes.utils: astroid
-    astroid.objects: astroid
-    astroid.protocols: astroid
-    astroid.raw_building: astroid
-    astroid.rebuilder: astroid
-    astroid.scoped_nodes: astroid
-    astroid.test_utils: astroid
-    astroid.transforms: astroid
-    astroid.typing: astroid
-    astroid.util: astroid
     certifi: certifi
-    certifi.core: certifi
     chardet: chardet
-    chardet.big5freq: chardet
-    chardet.big5prober: chardet
-    chardet.chardistribution: chardet
-    chardet.charsetgroupprober: chardet
-    chardet.charsetprober: chardet
-    chardet.cli: chardet
-    chardet.cli.chardetect: chardet
-    chardet.codingstatemachine: chardet
-    chardet.compat: chardet
-    chardet.cp949prober: chardet
-    chardet.enums: chardet
-    chardet.escprober: chardet
-    chardet.escsm: chardet
-    chardet.eucjpprober: chardet
-    chardet.euckrfreq: chardet
-    chardet.euckrprober: chardet
-    chardet.euctwfreq: chardet
-    chardet.euctwprober: chardet
-    chardet.gb2312freq: chardet
-    chardet.gb2312prober: chardet
-    chardet.hebrewprober: chardet
-    chardet.jisfreq: chardet
-    chardet.jpcntx: chardet
-    chardet.langbulgarianmodel: chardet
-    chardet.langgreekmodel: chardet
-    chardet.langhebrewmodel: chardet
-    chardet.langhungarianmodel: chardet
-    chardet.langrussianmodel: chardet
-    chardet.langthaimodel: chardet
-    chardet.langturkishmodel: chardet
-    chardet.latin1prober: chardet
-    chardet.mbcharsetprober: chardet
-    chardet.mbcsgroupprober: chardet
-    chardet.mbcssm: chardet
-    chardet.metadata: chardet
-    chardet.metadata.languages: chardet
-    chardet.sbcharsetprober: chardet
-    chardet.sbcsgroupprober: chardet
-    chardet.sjisprober: chardet
-    chardet.universaldetector: chardet
-    chardet.utf8prober: chardet
-    chardet.version: chardet
     dateutil: python_dateutil
-    dateutil.easter: python_dateutil
-    dateutil.parser: python_dateutil
-    dateutil.parser.isoparser: python_dateutil
-    dateutil.relativedelta: python_dateutil
-    dateutil.rrule: python_dateutil
-    dateutil.tz: python_dateutil
-    dateutil.tz.tz: python_dateutil
-    dateutil.tz.win: python_dateutil
-    dateutil.tzwin: python_dateutil
-    dateutil.utils: python_dateutil
-    dateutil.zoneinfo: python_dateutil
-    dateutil.zoneinfo.rebuild: python_dateutil
     dill: dill
-    dill.detect: dill
-    dill.logger: dill
-    dill.objtypes: dill
-    dill.pointers: dill
-    dill.session: dill
-    dill.settings: dill
-    dill.source: dill
-    dill.temp: dill
     idna: idna
-    idna.codec: idna
-    idna.compat: idna
-    idna.core: idna
-    idna.idnadata: idna
-    idna.intranges: idna
-    idna.package_data: idna
-    idna.uts46data: idna
     isort: isort
-    isort.api: isort
-    isort.comments: isort
-    isort.core: isort
-    isort.deprecated: isort
-    isort.deprecated.finders: isort
-    isort.exceptions: isort
-    isort.files: isort
-    isort.format: isort
-    isort.hooks: isort
-    isort.identify: isort
-    isort.io: isort
-    isort.literal: isort
-    isort.logo: isort
-    isort.main: isort
-    isort.output: isort
-    isort.parse: isort
-    isort.place: isort
-    isort.profiles: isort
-    isort.pylama_isort: isort
-    isort.sections: isort
-    isort.settings: isort
-    isort.setuptools_commands: isort
-    isort.sorting: isort
-    isort.stdlibs: isort
-    isort.stdlibs.all: isort
-    isort.stdlibs.py2: isort
-    isort.stdlibs.py27: isort
-    isort.stdlibs.py3: isort
-    isort.stdlibs.py310: isort
-    isort.stdlibs.py311: isort
-    isort.stdlibs.py36: isort
-    isort.stdlibs.py37: isort
-    isort.stdlibs.py38: isort
-    isort.stdlibs.py39: isort
-    isort.utils: isort
-    isort.wrap: isort
-    isort.wrap_modes: isort
     lazy_object_proxy: lazy_object_proxy
-    lazy_object_proxy.compat: lazy_object_proxy
-    lazy_object_proxy.simple: lazy_object_proxy
-    lazy_object_proxy.slots: lazy_object_proxy
-    lazy_object_proxy.utils: lazy_object_proxy
     magic: python_magic
-    magic.compat: python_magic
-    magic.loader: python_magic
     mccabe: mccabe
     pathspec: pathspec
-    pathspec.gitignore: pathspec
-    pathspec.pathspec: pathspec
-    pathspec.pattern: pathspec
-    pathspec.patterns: pathspec
-    pathspec.patterns.gitwildmatch: pathspec
-    pathspec.util: pathspec
     pkg_resources: setuptools
-    pkg_resources.extern: setuptools
     platformdirs: platformdirs
-    platformdirs.android: platformdirs
-    platformdirs.api: platformdirs
-    platformdirs.macos: platformdirs
-    platformdirs.unix: platformdirs
-    platformdirs.version: platformdirs
-    platformdirs.windows: platformdirs
     pylint: pylint
-    pylint.checkers: pylint
-    pylint.checkers.async: pylint
-    pylint.checkers.base: pylint
-    pylint.checkers.base.basic_checker: pylint
-    pylint.checkers.base.basic_error_checker: pylint
-    pylint.checkers.base.comparison_checker: pylint
-    pylint.checkers.base.docstring_checker: pylint
-    pylint.checkers.base.name_checker: pylint
-    pylint.checkers.base.name_checker.checker: pylint
-    pylint.checkers.base.name_checker.naming_style: pylint
-    pylint.checkers.base.pass_checker: pylint
-    pylint.checkers.base_checker: pylint
-    pylint.checkers.classes: pylint
-    pylint.checkers.classes.class_checker: pylint
-    pylint.checkers.classes.special_methods_checker: pylint
-    pylint.checkers.deprecated: pylint
-    pylint.checkers.design_analysis: pylint
-    pylint.checkers.dunder_methods: pylint
-    pylint.checkers.ellipsis_checker: pylint
-    pylint.checkers.exceptions: pylint
-    pylint.checkers.format: pylint
-    pylint.checkers.imports: pylint
-    pylint.checkers.lambda_expressions: pylint
-    pylint.checkers.logging: pylint
-    pylint.checkers.mapreduce_checker: pylint
-    pylint.checkers.method_args: pylint
-    pylint.checkers.misc: pylint
-    pylint.checkers.modified_iterating_checker: pylint
-    pylint.checkers.newstyle: pylint
-    pylint.checkers.non_ascii_names: pylint
-    pylint.checkers.raw_metrics: pylint
-    pylint.checkers.refactoring: pylint
-    pylint.checkers.refactoring.implicit_booleaness_checker: pylint
-    pylint.checkers.refactoring.not_checker: pylint
-    pylint.checkers.refactoring.recommendation_checker: pylint
-    pylint.checkers.refactoring.refactoring_checker: pylint
-    pylint.checkers.similar: pylint
-    pylint.checkers.spelling: pylint
-    pylint.checkers.stdlib: pylint
-    pylint.checkers.strings: pylint
-    pylint.checkers.threading_checker: pylint
-    pylint.checkers.typecheck: pylint
-    pylint.checkers.unicode: pylint
-    pylint.checkers.unsupported_version: pylint
-    pylint.checkers.utils: pylint
-    pylint.checkers.variables: pylint
-    pylint.config: pylint
-    pylint.config.argument: pylint
-    pylint.config.arguments_manager: pylint
-    pylint.config.arguments_provider: pylint
-    pylint.config.callback_actions: pylint
-    pylint.config.config_file_parser: pylint
-    pylint.config.config_initialization: pylint
-    pylint.config.configuration_mixin: pylint
-    pylint.config.deprecation_actions: pylint
-    pylint.config.environment_variable: pylint
-    pylint.config.exceptions: pylint
-    pylint.config.find_default_config_files: pylint
-    pylint.config.help_formatter: pylint
-    pylint.config.option: pylint
-    pylint.config.option_manager_mixin: pylint
-    pylint.config.option_parser: pylint
-    pylint.config.options_provider_mixin: pylint
-    pylint.config.utils: pylint
-    pylint.constants: pylint
-    pylint.epylint: pylint
-    pylint.exceptions: pylint
-    pylint.extensions: pylint
-    pylint.extensions.bad_builtin: pylint
-    pylint.extensions.broad_try_clause: pylint
-    pylint.extensions.check_elif: pylint
-    pylint.extensions.code_style: pylint
-    pylint.extensions.comparetozero: pylint
-    pylint.extensions.comparison_placement: pylint
-    pylint.extensions.confusing_elif: pylint
-    pylint.extensions.consider_ternary_expression: pylint
-    pylint.extensions.docparams: pylint
-    pylint.extensions.docstyle: pylint
-    pylint.extensions.empty_comment: pylint
-    pylint.extensions.emptystring: pylint
-    pylint.extensions.eq_without_hash: pylint
-    pylint.extensions.for_any_all: pylint
-    pylint.extensions.mccabe: pylint
-    pylint.extensions.no_self_use: pylint
-    pylint.extensions.overlapping_exceptions: pylint
-    pylint.extensions.private_import: pylint
-    pylint.extensions.redefined_loop_name: pylint
-    pylint.extensions.redefined_variable_type: pylint
-    pylint.extensions.set_membership: pylint
-    pylint.extensions.typing: pylint
-    pylint.extensions.while_used: pylint
-    pylint.graph: pylint
-    pylint.interfaces: pylint
-    pylint.lint: pylint
-    pylint.lint.base_options: pylint
-    pylint.lint.caching: pylint
-    pylint.lint.expand_modules: pylint
-    pylint.lint.message_state_handler: pylint
-    pylint.lint.parallel: pylint
-    pylint.lint.pylinter: pylint
-    pylint.lint.report_functions: pylint
-    pylint.lint.run: pylint
-    pylint.lint.utils: pylint
-    pylint.message: pylint
-    pylint.message.message: pylint
-    pylint.message.message_definition: pylint
-    pylint.message.message_definition_store: pylint
-    pylint.message.message_id_store: pylint
-    pylint.pyreverse: pylint
-    pylint.pyreverse.diadefslib: pylint
-    pylint.pyreverse.diagrams: pylint
-    pylint.pyreverse.dot_printer: pylint
-    pylint.pyreverse.inspector: pylint
-    pylint.pyreverse.main: pylint
-    pylint.pyreverse.mermaidjs_printer: pylint
-    pylint.pyreverse.plantuml_printer: pylint
-    pylint.pyreverse.printer: pylint
-    pylint.pyreverse.printer_factory: pylint
-    pylint.pyreverse.utils: pylint
-    pylint.pyreverse.vcg_printer: pylint
-    pylint.pyreverse.writer: pylint
-    pylint.reporters: pylint
-    pylint.reporters.base_reporter: pylint
-    pylint.reporters.collecting_reporter: pylint
-    pylint.reporters.json_reporter: pylint
-    pylint.reporters.multi_reporter: pylint
-    pylint.reporters.reports_handler_mix_in: pylint
-    pylint.reporters.text: pylint
-    pylint.reporters.ureports: pylint
-    pylint.reporters.ureports.base_writer: pylint
-    pylint.reporters.ureports.nodes: pylint
-    pylint.reporters.ureports.text_writer: pylint
-    pylint.testutils: pylint
-    pylint.testutils.checker_test_case: pylint
-    pylint.testutils.configuration_test: pylint
-    pylint.testutils.constants: pylint
-    pylint.testutils.decorator: pylint
-    pylint.testutils.functional: pylint
-    pylint.testutils.functional.find_functional_tests: pylint
-    pylint.testutils.functional.lint_module_output_update: pylint
-    pylint.testutils.functional.test_file: pylint
-    pylint.testutils.functional_test_file: pylint
-    pylint.testutils.get_test_info: pylint
-    pylint.testutils.global_test_linter: pylint
-    pylint.testutils.lint_module_test: pylint
-    pylint.testutils.output_line: pylint
-    pylint.testutils.pyreverse: pylint
-    pylint.testutils.reporter_for_tests: pylint
-    pylint.testutils.tokenize_str: pylint
-    pylint.testutils.unittest_linter: pylint
-    pylint.testutils.utils: pylint
-    pylint.typing: pylint
-    pylint.utils: pylint
-    pylint.utils.ast_walker: pylint
-    pylint.utils.docs: pylint
-    pylint.utils.file_state: pylint
-    pylint.utils.linterstats: pylint
-    pylint.utils.pragma_parser: pylint
-    pylint.utils.utils: pylint
     requests: requests
-    requests.adapters: requests
-    requests.api: requests
-    requests.auth: requests
-    requests.certs: requests
-    requests.compat: requests
-    requests.cookies: requests
-    requests.exceptions: requests
-    requests.help: requests
-    requests.hooks: requests
-    requests.models: requests
-    requests.packages: requests
-    requests.sessions: requests
-    requests.status_codes: requests
-    requests.structures: requests
-    requests.utils: requests
     setuptools: setuptools
-    setuptools.archive_util: setuptools
-    setuptools.build_meta: setuptools
-    setuptools.command: setuptools
-    setuptools.command.alias: setuptools
-    setuptools.command.bdist_egg: setuptools
-    setuptools.command.bdist_rpm: setuptools
-    setuptools.command.build: setuptools
-    setuptools.command.build_clib: setuptools
-    setuptools.command.build_ext: setuptools
-    setuptools.command.build_py: setuptools
-    setuptools.command.develop: setuptools
-    setuptools.command.dist_info: setuptools
-    setuptools.command.easy_install: setuptools
-    setuptools.command.editable_wheel: setuptools
-    setuptools.command.egg_info: setuptools
-    setuptools.command.install: setuptools
-    setuptools.command.install_egg_info: setuptools
-    setuptools.command.install_lib: setuptools
-    setuptools.command.install_scripts: setuptools
-    setuptools.command.py36compat: setuptools
-    setuptools.command.register: setuptools
-    setuptools.command.rotate: setuptools
-    setuptools.command.saveopts: setuptools
-    setuptools.command.sdist: setuptools
-    setuptools.command.setopt: setuptools
-    setuptools.command.test: setuptools
-    setuptools.command.upload: setuptools
-    setuptools.command.upload_docs: setuptools
-    setuptools.config: setuptools
-    setuptools.config.expand: setuptools
-    setuptools.config.pyprojecttoml: setuptools
-    setuptools.config.setupcfg: setuptools
-    setuptools.dep_util: setuptools
-    setuptools.depends: setuptools
-    setuptools.discovery: setuptools
-    setuptools.dist: setuptools
-    setuptools.errors: setuptools
-    setuptools.extension: setuptools
-    setuptools.extern: setuptools
-    setuptools.glob: setuptools
-    setuptools.installer: setuptools
-    setuptools.launch: setuptools
-    setuptools.logging: setuptools
-    setuptools.monkey: setuptools
-    setuptools.msvc: setuptools
-    setuptools.namespaces: setuptools
-    setuptools.package_index: setuptools
-    setuptools.py34compat: setuptools
-    setuptools.sandbox: setuptools
-    setuptools.unicode_utils: setuptools
-    setuptools.version: setuptools
-    setuptools.wheel: setuptools
-    setuptools.windows_support: setuptools
     six: six
     tabulate: tabulate
-    tabulate.version: tabulate
     tomli: tomli
     tomlkit: tomlkit
-    tomlkit.api: tomlkit
-    tomlkit.container: tomlkit
-    tomlkit.exceptions: tomlkit
-    tomlkit.items: tomlkit
-    tomlkit.parser: tomlkit
-    tomlkit.source: tomlkit
-    tomlkit.toml_char: tomlkit
-    tomlkit.toml_document: tomlkit
-    tomlkit.toml_file: tomlkit
     typing_extensions: typing_extensions
     urllib3: urllib3
-    urllib3.connection: urllib3
-    urllib3.connectionpool: urllib3
-    urllib3.contrib: urllib3
-    urllib3.contrib.appengine: urllib3
-    urllib3.contrib.ntlmpool: urllib3
-    urllib3.contrib.pyopenssl: urllib3
-    urllib3.contrib.securetransport: urllib3
-    urllib3.contrib.socks: urllib3
-    urllib3.exceptions: urllib3
-    urllib3.fields: urllib3
-    urllib3.filepost: urllib3
-    urllib3.packages: urllib3
-    urllib3.packages.backports: urllib3
-    urllib3.packages.backports.makefile: urllib3
-    urllib3.packages.six: urllib3
-    urllib3.poolmanager: urllib3
-    urllib3.request: urllib3
-    urllib3.response: urllib3
-    urllib3.util: urllib3
-    urllib3.util.connection: urllib3
-    urllib3.util.proxy: urllib3
-    urllib3.util.queue: urllib3
-    urllib3.util.request: urllib3
-    urllib3.util.response: urllib3
-    urllib3.util.retry: urllib3
-    urllib3.util.ssl_: urllib3
-    urllib3.util.ssl_match_hostname: urllib3
-    urllib3.util.ssltransport: urllib3
-    urllib3.util.timeout: urllib3
-    urllib3.util.url: urllib3
-    urllib3.util.wait: urllib3
     wrapt: wrapt
-    wrapt.arguments: wrapt
-    wrapt.decorators: wrapt
-    wrapt.importer: wrapt
-    wrapt.wrappers: wrapt
     yaml: PyYAML
-    yaml.composer: PyYAML
-    yaml.constructor: PyYAML
-    yaml.cyaml: PyYAML
-    yaml.dumper: PyYAML
-    yaml.emitter: PyYAML
-    yaml.error: PyYAML
-    yaml.events: PyYAML
-    yaml.loader: PyYAML
-    yaml.nodes: PyYAML
-    yaml.parser: PyYAML
-    yaml.reader: PyYAML
-    yaml.representer: PyYAML
-    yaml.resolver: PyYAML
-    yaml.scanner: PyYAML
-    yaml.serializer: PyYAML
-    yaml.tokens: PyYAML
     yamllint: yamllint
-    yamllint.cli: yamllint
-    yamllint.config: yamllint
-    yamllint.linter: yamllint
-    yamllint.parser: yamllint
-    yamllint.rules: yamllint
-    yamllint.rules.braces: yamllint
-    yamllint.rules.brackets: yamllint
-    yamllint.rules.colons: yamllint
-    yamllint.rules.commas: yamllint
-    yamllint.rules.comments: yamllint
-    yamllint.rules.comments_indentation: yamllint
-    yamllint.rules.common: yamllint
-    yamllint.rules.document_end: yamllint
-    yamllint.rules.document_start: yamllint
-    yamllint.rules.empty_lines: yamllint
-    yamllint.rules.empty_values: yamllint
-    yamllint.rules.float_values: yamllint
-    yamllint.rules.hyphens: yamllint
-    yamllint.rules.indentation: yamllint
-    yamllint.rules.key_duplicates: yamllint
-    yamllint.rules.key_ordering: yamllint
-    yamllint.rules.line_length: yamllint
-    yamllint.rules.new_line_at_end_of_file: yamllint
-    yamllint.rules.new_lines: yamllint
-    yamllint.rules.octal_values: yamllint
-    yamllint.rules.quoted_strings: yamllint
-    yamllint.rules.trailing_spaces: yamllint
-    yamllint.rules.truthy: yamllint
   pip_repository:
     name: pip
-integrity: cd25503dc6b3d9e1c5f46715ba2d0499ecc8b3d654ebcbf9f4e52f2074290e0a
diff --git a/examples/bzlmod_build_file_generation/requirements_lock.txt b/examples/bzlmod_build_file_generation/requirements_lock.txt
index 3fd053f..8ba315b 100644
--- a/examples/bzlmod_build_file_generation/requirements_lock.txt
+++ b/examples/bzlmod_build_file_generation/requirements_lock.txt
@@ -8,9 +8,9 @@
     --hash=sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907 \
     --hash=sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7
     # via pylint
-certifi==2022.12.7 \
-    --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
-    --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+certifi==2023.7.22 \
+    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
+    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
     # via requests
 chardet==4.0.0 \
     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \
diff --git a/examples/bzlmod_build_file_generation/requirements_windows.txt b/examples/bzlmod_build_file_generation/requirements_windows.txt
index 15e9228..09971f9 100644
--- a/examples/bzlmod_build_file_generation/requirements_windows.txt
+++ b/examples/bzlmod_build_file_generation/requirements_windows.txt
@@ -8,9 +8,9 @@
     --hash=sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907 \
     --hash=sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7
     # via pylint
-certifi==2022.12.7 \
-    --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
-    --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+certifi==2023.7.22 \
+    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
+    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
     # via requests
 chardet==4.0.0 \
     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \
diff --git a/examples/bzlmod_build_file_generation/runfiles/runfiles_test.py b/examples/bzlmod_build_file_generation/runfiles/runfiles_test.py
index a588040..5bfa530 100644
--- a/examples/bzlmod_build_file_generation/runfiles/runfiles_test.py
+++ b/examples/bzlmod_build_file_generation/runfiles/runfiles_test.py
@@ -26,7 +26,9 @@
         self.assertEqual(runfiles.Create().CurrentRepository(), "")
 
     def testRunfilesWithRepoMapping(self):
-        data_path = runfiles.Create().Rlocation("example_bzlmod_build_file_generation/runfiles/data/data.txt")
+        data_path = runfiles.Create().Rlocation(
+            "example_bzlmod_build_file_generation/runfiles/data/data.txt"
+        )
         with open(data_path) as f:
             self.assertEqual(f.read().strip(), "Hello, example_bzlmod!")
 
diff --git a/examples/multi_python_versions/.bazelrc b/examples/multi_python_versions/.bazelrc
index 3fd6365..58080ab 100644
--- a/examples/multi_python_versions/.bazelrc
+++ b/examples/multi_python_versions/.bazelrc
@@ -2,6 +2,5 @@
 
 # Windows requires these for multi-python support:
 build --enable_runfiles
-startup --windows_enable_symlinks
 
 coverage --java_runtime_version=remotejdk_11
diff --git a/examples/multi_python_versions/WORKSPACE b/examples/multi_python_versions/WORKSPACE
index 35855ca..4f731d9 100644
--- a/examples/multi_python_versions/WORKSPACE
+++ b/examples/multi_python_versions/WORKSPACE
@@ -9,10 +9,6 @@
 
 py_repositories()
 
-load("@rules_python//python/pip_install:repositories.bzl", "pip_install_dependencies")
-
-pip_install_dependencies()
-
 default_python_version = "3.9"
 
 python_register_multi_toolchains(
@@ -28,19 +24,15 @@
 )
 
 load("@python//:pip.bzl", "multi_pip_parse")
-load("@python//3.10:defs.bzl", interpreter_3_10 = "interpreter")
-load("@python//3.11:defs.bzl", interpreter_3_11 = "interpreter")
-load("@python//3.8:defs.bzl", interpreter_3_8 = "interpreter")
-load("@python//3.9:defs.bzl", interpreter_3_9 = "interpreter")
 
 multi_pip_parse(
     name = "pypi",
     default_version = default_python_version,
     python_interpreter_target = {
-        "3.10": interpreter_3_10,
-        "3.11": interpreter_3_11,
-        "3.8": interpreter_3_8,
-        "3.9": interpreter_3_9,
+        "3.10": "@python_3_10_host//:python",
+        "3.11": "@python_3_11_host//:python",
+        "3.8": "@python_3_8_host//:python",
+        "3.9": "@python_3_9_host//:python",
     },
     requirements_lock = {
         "3.10": "//requirements:requirements_lock_3_10.txt",
diff --git a/examples/multi_python_versions/tests/my_lib_test.py b/examples/multi_python_versions/tests/my_lib_test.py
index 1d4880f..449cb84 100644
--- a/examples/multi_python_versions/tests/my_lib_test.py
+++ b/examples/multi_python_versions/tests/my_lib_test.py
@@ -23,5 +23,9 @@
 if not my_lib.websockets_is_for_python_version(
     workspace_version
 ) and not my_lib.websockets_is_for_python_version(bzlmod_version):
-    print("expected package for Python version is different than returned")
+    print(
+        "expected package for Python version is different than returned\n"
+        f"expected either {workspace_version} or {bzlmod_version}\n"
+        f"but got {my_lib.websockets.__file__}"
+    )
     sys.exit(1)
diff --git a/examples/pip_parse/BUILD.bazel b/examples/pip_parse/BUILD.bazel
index 367a795..fd744a2 100644
--- a/examples/pip_parse/BUILD.bazel
+++ b/examples/pip_parse/BUILD.bazel
@@ -24,7 +24,7 @@
 #toolchain(
 #    name = "my_py_toolchain",
 #    toolchain = ":my_py_runtime_pair",
-#    toolchain_type = "@bazel_tools//tools/python:toolchain_type",
+#    toolchain_type = "@rules_python//python:toolchain_type",
 #)
 # End of toolchain setup.
 
diff --git a/examples/pip_parse/MODULE.bazel b/examples/pip_parse/MODULE.bazel
index 3977f8a..f9ca908 100644
--- a/examples/pip_parse/MODULE.bazel
+++ b/examples/pip_parse/MODULE.bazel
@@ -8,11 +8,20 @@
 
 python = use_extension("@rules_python//python/extensions:python.bzl", "python")
 python.toolchain(
-    python_version = "3.9",
+    # We can specify the exact version.
+    python_version = "3.9.13",
+)
+
+# You can use this repo mapping to ensure that your BUILD.bazel files don't need
+# to be updated when the python version changes to a different `3.9` version.
+use_repo(
+    python,
+    python_3_9 = "python_3_9_13",
 )
 
 pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
 pip.parse(
+    download_only = True,
     experimental_requirement_cycles = {
         "sphinx": [
             "sphinx",
@@ -24,7 +33,8 @@
         ],
     },
     hub_name = "pypi",
-    python_version = "3.9",
+    # We need to use the same version here as in the `python.toolchain` call.
+    python_version = "3.9.13",
     requirements_lock = "//:requirements_lock.txt",
     requirements_windows = "//:requirements_windows.txt",
 )
diff --git a/examples/pip_parse/WORKSPACE b/examples/pip_parse/WORKSPACE
index 415d064..bb4714d 100644
--- a/examples/pip_parse/WORKSPACE
+++ b/examples/pip_parse/WORKSPACE
@@ -10,11 +10,10 @@
 py_repositories()
 
 python_register_toolchains(
-    name = "python39",
-    python_version = "3.9",
+    name = "python_3_9",
+    python_version = "3.9.13",
 )
 
-load("@python39//:defs.bzl", "interpreter")
 load("@rules_python//python:pip.bzl", "pip_parse")
 
 pip_parse(
@@ -52,7 +51,7 @@
     # 3. Wrapper script, like in the autodetecting python toolchain.
     #
     # Here, we use the interpreter constant that resolves to the host interpreter from the default Python toolchain.
-    python_interpreter_target = interpreter,
+    python_interpreter_target = "@python_3_9_host//:python",
 
     # (Optional) You can set quiet to False if you want to see pip output.
     #quiet = False,
diff --git a/examples/pip_parse/pip_parse_test.py b/examples/pip_parse/pip_parse_test.py
index 79e1a75..2fdd454 100644
--- a/examples/pip_parse/pip_parse_test.py
+++ b/examples/pip_parse/pip_parse_test.py
@@ -28,10 +28,7 @@
     def _remove_leading_dirs(self, paths):
         # Removes the first two directories (external/<reponame>)
         # to normalize what workspace and bzlmod produce.
-        return [
-            '/'.join(v.split('/')[2:])
-            for v in paths
-        ]
+        return ["/".join(v.split("/")[2:]) for v in paths]
 
     def test_entry_point(self):
         entry_point_path = os.environ.get("YAMLLINT_ENTRY_POINT")
diff --git a/examples/pip_parse/requirements_lock.txt b/examples/pip_parse/requirements_lock.txt
index b0eedf3..4e8af7f 100644
--- a/examples/pip_parse/requirements_lock.txt
+++ b/examples/pip_parse/requirements_lock.txt
@@ -12,9 +12,9 @@
     --hash=sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900 \
     --hash=sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed
     # via sphinx
-certifi==2022.12.7 \
-    --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
-    --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+certifi==2023.7.22 \
+    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
+    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
     # via requests
 chardet==4.0.0 \
     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \
@@ -36,9 +36,9 @@
     --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \
     --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743
     # via sphinx
-jinja2==3.1.2 \
-    --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
-    --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
+jinja2==3.1.4 \
+    --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
+    --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
     # via sphinx
 markupsafe==2.1.3 \
     --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
@@ -232,7 +232,7 @@
     # via importlib-metadata
 
 # The following packages are considered to be unsafe in a requirements file:
-setuptools==65.6.3 \
-    --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \
-    --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75
+setuptools==70.0.0 \
+    --hash=sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4 \
+    --hash=sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0
     # via yamllint
diff --git a/examples/pip_parse/requirements_windows.txt b/examples/pip_parse/requirements_windows.txt
index 18ab80f..4debc11 100644
--- a/examples/pip_parse/requirements_windows.txt
+++ b/examples/pip_parse/requirements_windows.txt
@@ -12,9 +12,9 @@
     --hash=sha256:33e0952d7dd6374af8dbf6768cc4ddf3ccfefc244f9986d4074704f2fbd18900 \
     --hash=sha256:7077a4984b02b6727ac10f1f7294484f737443d7e2e66c5e4380e41a3ae0b4ed
     # via sphinx
-certifi==2022.12.7 \
-    --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
-    --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+certifi==2023.7.22 \
+    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
+    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
     # via requests
 chardet==4.0.0 \
     --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \
@@ -40,9 +40,9 @@
     --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \
     --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743
     # via sphinx
-jinja2==3.1.2 \
-    --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
-    --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
+jinja2==3.1.4 \
+    --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
+    --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
     # via sphinx
 markupsafe==2.1.3 \
     --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
@@ -236,7 +236,7 @@
     # via importlib-metadata
 
 # The following packages are considered to be unsafe in a requirements file:
-setuptools==65.6.3 \
-    --hash=sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54 \
-    --hash=sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75
+setuptools==70.0.0 \
+    --hash=sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4 \
+    --hash=sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0
     # via yamllint
diff --git a/examples/pip_parse_vendored/.bazelrc b/examples/pip_parse_vendored/.bazelrc
index b90bf8f..3818a03 100644
--- a/examples/pip_parse_vendored/.bazelrc
+++ b/examples/pip_parse_vendored/.bazelrc
@@ -2,7 +2,6 @@
 
 # Windows requires these for multi-python support:
 build --enable_runfiles
-startup --windows_enable_symlinks
 
 # Vendoring requirements.bzl files isn't necessary under bzlmod
 # When workspace support is dropped, this example can be removed.
diff --git a/examples/pip_parse_vendored/BUILD.bazel b/examples/pip_parse_vendored/BUILD.bazel
index ddf3281..e2b1f5d 100644
--- a/examples/pip_parse_vendored/BUILD.bazel
+++ b/examples/pip_parse_vendored/BUILD.bazel
@@ -1,6 +1,9 @@
+load("@bazel_skylib//rules:build_test.bzl", "build_test")
 load("@bazel_skylib//rules:diff_test.bzl", "diff_test")
 load("@bazel_skylib//rules:write_file.bzl", "write_file")
+load("@rules_python//python:defs.bzl", "py_test")
 load("@rules_python//python:pip.bzl", "compile_pip_requirements")
+load("//:requirements.bzl", "all_data_requirements", "all_requirements", "all_whl_requirements", "requirement")
 
 # This rule adds a convenient way to update the requirements.txt
 # lockfile based on the requirements.in.
@@ -9,21 +12,26 @@
     src = "requirements.in",
 )
 
-# The requirements.bzl file is generated with a reference to the interpreter for the host platform.
-# In order to check in a platform-agnostic file, we have to replace that reference with the symbol
-# loaded from our python toolchain.
+# The requirements.bzl file is using the hub repo to access packages via the
+# `requirement` macro and when the requirements.bzl is vendored, the hub
+# repo won't be present. As a result, we have to adjust the label scheme in
+# the requirements.bzl to make sure that they continue to work.
 genrule(
-    name = "make_platform_agnostic",
-    srcs = ["@pip//:requirements.bzl"],
+    name = "requirement_bzl",
+    srcs = ["@pip_deps_to_be_vendored//:requirements.bzl"],
     outs = ["requirements.clean.bzl"],
     cmd = " | ".join([
         "cat $<",
-        # Insert our load statement after the existing one so we don't produce a file with buildifier warnings
-        """sed -e '/^load.*.pip.bzl/i\\'$$'\\n''load("@python39//:defs.bzl", "interpreter")'""",
-        # Replace the bazel 6.0.0 specific comment with something that bazel 5.4.0 would produce.
-        # This enables this example to be run as a test under bazel 5.4.0.
-        """sed -e 's#@//#//#'""",
-        """sed 's#"@python39_.*//:bin/python3"#interpreter#' >$@""",
+        # Substitute the name of the hub to ensure that the dependencies do
+        # not require the hub repo initialized in the WORKSPACE.
+        "sed -e 's/pip_deps_to_be_vendored/my_project_pip_deps_vendored/g'",
+        # Change the labels from using the hub repo to using the spoke repos
+        # directly.
+        "sed -e 's|//\\([^:]*\\):pkg|_\\1//:pkg|g'",
+        "sed -e 's|//\\([^:]*\\):whl|_\\1//:whl|g'",
+        "sed -e 's|//\\([^:]*\\):data|_\\1//:data|g'",
+        # Change the convenience macros to use the same naming.
+        "sed -e 's|//{}:{}|_{}//:{}|g' >$@",
     ]),
 )
 
@@ -42,7 +50,7 @@
 sh_binary(
     name = "vendor_requirements",
     srcs = ["update.sh"],
-    data = [":make_platform_agnostic"],
+    data = [":requirement_bzl"],
 )
 
 # Similarly ensures that the requirements.bzl file is updated
@@ -51,5 +59,18 @@
     name = "test_vendored",
     failure_message = "Please run:  bazel run //:vendor_requirements",
     file1 = "requirements.bzl",
-    file2 = ":make_platform_agnostic",
+    file2 = "requirement_bzl",
+)
+
+py_test(
+    name = "test_dependency_usage",
+    srcs = ["test_dependency_usage.py"],
+    deps = [
+        requirement("requests"),
+    ],
+)
+
+build_test(
+    name = "test_requirement_lists",
+    targets = all_requirements + all_whl_requirements + all_data_requirements,
 )
diff --git a/examples/pip_parse_vendored/WORKSPACE b/examples/pip_parse_vendored/WORKSPACE
index 157f70a..d7a11ea 100644
--- a/examples/pip_parse_vendored/WORKSPACE
+++ b/examples/pip_parse_vendored/WORKSPACE
@@ -1,4 +1,4 @@
-workspace(name = "pip_repository_annotations_example")
+workspace(name = "pip_parse_vendored_example")
 
 local_repository(
     name = "rules_python",
@@ -14,14 +14,20 @@
     python_version = "3.9",
 )
 
-load("@python39//:defs.bzl", "interpreter")
 load("@rules_python//python:pip.bzl", "pip_parse")
 
 # This repository isn't referenced, except by our test that asserts the requirements.bzl is updated.
 # It also wouldn't be needed by users of this ruleset.
+# If you're using envsubst with extra_pip_args, as we do below, the value of the environment
+# variables at the time we generate requirements.bzl don't make it into the file, as you may
+# verify by inspection; the environment variables at a later time, when we download the
+# packages, will be the ones that take effect.
 pip_parse(
-    name = "pip",
-    python_interpreter_target = interpreter,
+    # We choose a unique name here to make sure we can do some cleanup on it.
+    name = "pip_deps_to_be_vendored",
+    envsubst = ["PIP_RETRIES"],
+    extra_pip_args = ["--retries=${PIP_RETRIES:-5}"],
+    python_interpreter_target = "@python39_host//:python",
     requirements_lock = "//:requirements.txt",
 )
 
diff --git a/examples/pip_parse_vendored/requirements.bzl b/examples/pip_parse_vendored/requirements.bzl
index adbee66..50bfe9f 100644
--- a/examples/pip_parse_vendored/requirements.bzl
+++ b/examples/pip_parse_vendored/requirements.bzl
@@ -1,41 +1,72 @@
 """Starlark representation of locked requirements.
 
-@generated by rules_python pip_parse repository rule
-from //:requirements.txt
+@generated by rules_python pip_parse repository rule.
 """
 
-load("@python39//:defs.bzl", "interpreter")
 load("@rules_python//python:pip.bzl", "pip_utils")
 load("@rules_python//python/pip_install:pip_repository.bzl", "group_library", "whl_library")
 
-all_requirements = ["@pip//certifi:pkg", "@pip//charset_normalizer:pkg", "@pip//idna:pkg", "@pip//requests:pkg", "@pip//urllib3:pkg"]
+all_requirements = [
+    "@my_project_pip_deps_vendored_certifi//:pkg",
+    "@my_project_pip_deps_vendored_charset_normalizer//:pkg",
+    "@my_project_pip_deps_vendored_idna//:pkg",
+    "@my_project_pip_deps_vendored_requests//:pkg",
+    "@my_project_pip_deps_vendored_urllib3//:pkg",
+]
 
-all_whl_requirements_by_package = {"certifi": "@pip//certifi:whl", "charset_normalizer": "@pip//charset_normalizer:whl", "idna": "@pip//idna:whl", "requests": "@pip//requests:whl", "urllib3": "@pip//urllib3:whl"}
+all_whl_requirements_by_package = {
+    "certifi": "@my_project_pip_deps_vendored_certifi//:whl",
+    "charset_normalizer": "@my_project_pip_deps_vendored_charset_normalizer//:whl",
+    "idna": "@my_project_pip_deps_vendored_idna//:whl",
+    "requests": "@my_project_pip_deps_vendored_requests//:whl",
+    "urllib3": "@my_project_pip_deps_vendored_urllib3//:whl",
+}
 
 all_whl_requirements = all_whl_requirements_by_package.values()
 
-all_data_requirements = ["@pip//certifi:data", "@pip//charset_normalizer:data", "@pip//idna:data", "@pip//requests:data", "@pip//urllib3:data"]
+all_data_requirements = [
+    "@my_project_pip_deps_vendored_certifi//:data",
+    "@my_project_pip_deps_vendored_charset_normalizer//:data",
+    "@my_project_pip_deps_vendored_idna//:data",
+    "@my_project_pip_deps_vendored_requests//:data",
+    "@my_project_pip_deps_vendored_urllib3//:data",
+]
 
-_packages = [("pip_certifi", "certifi==2023.7.22     --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082     --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"), ("pip_charset_normalizer", "charset-normalizer==2.1.1     --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845     --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"), ("pip_idna", "idna==3.4     --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4     --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"), ("pip_requests", "requests==2.28.1     --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983     --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"), ("pip_urllib3", "urllib3==1.26.13     --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc     --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8")]
-_config = {"download_only": False, "enable_implicit_namespace_pkgs": False, "environment": {}, "extra_pip_args": [], "isolated": True, "pip_data_exclude": [], "python_interpreter": "python3", "python_interpreter_target": interpreter, "quiet": True, "repo": "pip", "repo_prefix": "pip_", "timeout": 600}
+_packages = [
+    ("my_project_pip_deps_vendored_certifi", "certifi==2023.7.22     --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082     --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"),
+    ("my_project_pip_deps_vendored_charset_normalizer", "charset-normalizer==2.1.1     --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845     --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"),
+    ("my_project_pip_deps_vendored_idna", "idna==3.4     --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4     --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"),
+    ("my_project_pip_deps_vendored_requests", "requests==2.28.1     --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983     --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"),
+    ("my_project_pip_deps_vendored_urllib3", "urllib3==1.26.13     --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc     --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"),
+]
+_config = {
+    "download_only": False,
+    "enable_implicit_namespace_pkgs": False,
+    "environment": {},
+    "envsubst": ["PIP_RETRIES"],
+    "extra_pip_args": ["--retries=${PIP_RETRIES:-5}"],
+    "isolated": True,
+    "pip_data_exclude": [],
+    "python_interpreter": "python3",
+    "python_interpreter_target": "@python39_host//:python",
+    "quiet": True,
+    "repo": "my_project_pip_deps_vendored",
+    "repo_prefix": "my_project_pip_deps_vendored_",
+    "timeout": 600,
+}
 _annotations = {}
 
 def requirement(name):
-    return "@pip//{}:{}".format(pip_utils.normalize_name(name), "pkg")
+    return "@my_project_pip_deps_vendored_{}//:{}".format(pip_utils.normalize_name(name), "pkg")
 
 def whl_requirement(name):
-    return "@pip//{}:{}".format(pip_utils.normalize_name(name), "whl")
+    return "@my_project_pip_deps_vendored_{}//:{}".format(pip_utils.normalize_name(name), "whl")
 
 def data_requirement(name):
-    return "@pip//{}:{}".format(pip_utils.normalize_name(name), "data")
+    return "@my_project_pip_deps_vendored_{}//:{}".format(pip_utils.normalize_name(name), "data")
 
 def dist_info_requirement(name):
-    return "@pip//{}:{}".format(pip_utils.normalize_name(name), "dist_info")
-
-def entry_point(pkg, script = None):
-    if not script:
-        script = pkg
-    return "@pip_" + pip_utils.normalize_name(pkg) + "//:rules_python_wheel_entry_point_" + script
+    return "@my_project_pip_deps_vendored_{}//:{}".format(pip_utils.normalize_name(name), "dist_info")
 
 def _get_annotation(requirement):
     # This expects to parse `setuptools==58.2.0     --hash=sha256:2551203ae6955b9876741a26ab3e767bb3242dafe86a32a749ea0d78b6792f11`
@@ -60,10 +91,10 @@
         for requirement in group_requirements
     }
 
-    group_repo = "pip__groups"
+    group_repo = "my_project_pip_deps_vendored__groups"
     group_library(
         name = group_repo,
-        repo_prefix = "pip_",
+        repo_prefix = "my_project_pip_deps_vendored_",
         groups = all_requirement_groups,
     )
 
@@ -72,7 +103,7 @@
     whl_config.update(whl_library_kwargs)
 
     for name, requirement in _packages:
-        group_name = requirement_group_mapping.get(name.replace("pip_", ""))
+        group_name = requirement_group_mapping.get(name.replace("my_project_pip_deps_vendored_", ""))
         group_deps = all_requirement_groups.get(group_name, [])
 
         whl_library(
diff --git a/examples/pip_parse_vendored/test_dependency_usage.py b/examples/pip_parse_vendored/test_dependency_usage.py
new file mode 100644
index 0000000..e2cf970
--- /dev/null
+++ b/examples/pip_parse_vendored/test_dependency_usage.py
@@ -0,0 +1,12 @@
+import unittest
+
+import requests
+
+
+class TestDependencies(unittest.TestCase):
+    def test_import(self):
+        self.assertIsNotNone(requests.get)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/examples/pip_repository_annotations/WORKSPACE b/examples/pip_repository_annotations/WORKSPACE
index 3535055..8540555 100644
--- a/examples/pip_repository_annotations/WORKSPACE
+++ b/examples/pip_repository_annotations/WORKSPACE
@@ -14,7 +14,6 @@
     python_version = "3.9",
 )
 
-load("@python39//:defs.bzl", "interpreter")
 load("@rules_python//python:pip.bzl", "package_annotation", "pip_parse")
 
 # Here we can see an example of annotations being applied to an arbitrary
@@ -54,7 +53,7 @@
 pip_parse(
     name = "pip",
     annotations = ANNOTATIONS,
-    python_interpreter_target = interpreter,
+    python_interpreter_target = "@python39_host//:python",
     requirements_lock = "//:requirements.txt",
 )
 
diff --git a/examples/pip_repository_annotations/requirements.txt b/examples/pip_repository_annotations/requirements.txt
index 290d93e..f1069a7 100644
--- a/examples/pip_repository_annotations/requirements.txt
+++ b/examples/pip_repository_annotations/requirements.txt
@@ -16,9 +16,9 @@
     --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
     --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
     # via requests
-idna==3.4 \
-    --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
-    --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
+idna==3.7 \
+    --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \
+    --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0
     # via requests
 requests[security]==2.28.1 \
     --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \
diff --git a/examples/py_proto_library/WORKSPACE b/examples/py_proto_library/WORKSPACE
index bf38112..81f189d 100644
--- a/examples/py_proto_library/WORKSPACE
+++ b/examples/py_proto_library/WORKSPACE
@@ -1,4 +1,6 @@
-workspace(name = "rules_python_py_proto_library_example")
+# NB: short workspace name is required to workaround PATH length limitation, see
+# https://github.com/bazelbuild/bazel/issues/18683#issuecomment-1843857373
+workspace(name = "p")
 
 # The following local_path_override is only needed to run this example as part of our CI.
 local_repository(
@@ -24,21 +26,9 @@
 
 http_archive(
     name = "rules_proto",
-    sha256 = "dc3fb206a2cb3441b485eb1e423165b231235a1ea9b031b4433cf7bc1fa460dd",
-    strip_prefix = "rules_proto-5.3.0-21.7",
-    urls = [
-        "https://github.com/bazelbuild/rules_proto/archive/refs/tags/5.3.0-21.7.tar.gz",
-    ],
-)
-
-http_archive(
-    name = "com_google_protobuf",
-    sha256 = "75be42bd736f4df6d702a0e4e4d30de9ee40eac024c4b845d17ae4cc831fe4ae",
-    strip_prefix = "protobuf-21.7",
-    urls = [
-        "https://mirror.bazel.build/github.com/protocolbuffers/protobuf/archive/v21.7.tar.gz",
-        "https://github.com/protocolbuffers/protobuf/archive/v21.7.tar.gz",
-    ],
+    sha256 = "904a8097fae42a690c8e08d805210e40cccb069f5f9a0f6727cf4faa7bed2c9c",
+    strip_prefix = "rules_proto-6.0.0-rc1",
+    url = "https://github.com/bazelbuild/rules_proto/releases/download/6.0.0-rc1/rules_proto-6.0.0-rc1.tar.gz",
 )
 
 load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies", "rules_proto_toolchains")
@@ -46,3 +36,14 @@
 rules_proto_dependencies()
 
 rules_proto_toolchains()
+
+http_archive(
+    name = "com_google_protobuf",
+    sha256 = "4fc5ff1b2c339fb86cd3a25f0b5311478ab081e65ad258c6789359cd84d421f8",
+    strip_prefix = "protobuf-26.1",
+    urls = ["https://github.com/protocolbuffers/protobuf/archive/v26.1.tar.gz"],
+)
+
+load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
+
+protobuf_deps()
diff --git a/examples/py_proto_library/message_test.py b/examples/py_proto_library/message_test.py
index 3aee1ee..b1a6942 100644
--- a/examples/py_proto_library/message_test.py
+++ b/examples/py_proto_library/message_test.py
@@ -3,13 +3,14 @@
 
 from another_proto import message_pb2
 
+
 class TestCase(unittest.TestCase):
     def test_message(self):
         got = message_pb2.TestMessage(
-            index = 5,
+            index=5,
         )
         self.assertIsNotNone(got)
 
 
 if __name__ == "__main__":
-  sys.exit(unittest.main())
+    sys.exit(unittest.main())
diff --git a/examples/wheel/BUILD.bazel b/examples/wheel/BUILD.bazel
index ab4f3a3..aa063ce 100644
--- a/examples/wheel/BUILD.bazel
+++ b/examples/wheel/BUILD.bazel
@@ -13,10 +13,14 @@
 # limitations under the License.
 
 load("@bazel_skylib//rules:build_test.bzl", "build_test")
+load("@bazel_skylib//rules:write_file.bzl", "write_file")
 load("//examples/wheel/private:wheel_utils.bzl", "directory_writer", "make_variable_tags")
 load("//python:defs.bzl", "py_library", "py_test")
 load("//python:packaging.bzl", "py_package", "py_wheel")
+load("//python:pip.bzl", "compile_pip_requirements")
 load("//python:versions.bzl", "gen_python_config_settings")
+load("//python/entry_points:py_console_script_binary.bzl", "py_console_script_binary")
+load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")  # buildifier: disable=bzl-visibility
 
 package(default_visibility = ["//visibility:public"])
 
@@ -54,9 +58,11 @@
     testonly = True,  # Set this to verify the generated .dist target doesn't break things
     # Package data. We're building "example_minimal_library-0.0.1-py3-none-any.whl"
     distribution = "example_minimal_library",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     python_tag = "py3",
+    # NOTE: twine_binary = "//tools/publish:twine" does not work on non-bzlmod
+    # setups because the `//tools/publish:twine` produces multiple files and is
+    # unsuitable as the `src` to the underlying native_binary rule.
+    twine = None if BZLMOD_ENABLED else "@rules_python_publish_deps_twine//:pkg",
     version = "0.0.1",
     deps = [
         "//examples/wheel/lib:module_with_data",
@@ -78,8 +84,6 @@
     testonly = True,
     abi = "$(ABI)",
     distribution = "example_minimal_library",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     python_tag = "$(PYTHON_TAG)",
     toolchains = ["//examples/wheel:make_variable_tags"],
     version = "$(VERSION)",
@@ -99,8 +103,6 @@
     name = "minimal_with_py_library_with_stamp",
     # Package data. We're building "example_minimal_library-0.0.1-py3-none-any.whl"
     distribution = "example_minimal_library{BUILD_USER}",
-    incompatible_normalize_name = False,
-    incompatible_normalize_version = False,
     python_tag = "py3",
     stamp = 1,
     version = "0.1.{BUILD_TIMESTAMP}",
@@ -129,8 +131,6 @@
     name = "minimal_with_py_package",
     # Package data. We're building "example_minimal_package-0.0.1-py3-none-any.whl"
     distribution = "example_minimal_package",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     python_tag = "py3",
     version = "0.0.1",
     deps = [":example_pkg"],
@@ -164,8 +164,6 @@
         "//examples/wheel:README.md": "README",
     },
     homepage = "www.example.com",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     license = "Apache 2.0",
     project_urls = {
         "Bug Tracker": "www.example.com/issues",
@@ -187,8 +185,6 @@
     entry_points = {
         "console_scripts": ["main = foo.bar:baz"],
     },
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     python_tag = "py3",
     strip_path_prefixes = [
         "examples",
@@ -203,8 +199,6 @@
     name = "custom_package_root_multi_prefix",
     # Package data. We're building "custom_custom_package_root_multi_prefix-0.0.1-py3-none-any.whl"
     distribution = "example_custom_package_root_multi_prefix",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     python_tag = "py3",
     strip_path_prefixes = [
         "examples/wheel/lib",
@@ -220,8 +214,6 @@
     name = "custom_package_root_multi_prefix_reverse_order",
     # Package data. We're building "custom_custom_package_root_multi_prefix_reverse_order-0.0.1-py3-none-any.whl"
     distribution = "example_custom_package_root_multi_prefix_reverse_order",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     python_tag = "py3",
     strip_path_prefixes = [
         "examples/wheel",
@@ -236,8 +228,6 @@
 py_wheel(
     name = "python_requires_in_a_package",
     distribution = "example_python_requires_in_a_package",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     python_requires = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
     python_tag = "py3",
     version = "0.0.1",
@@ -249,8 +239,6 @@
 py_wheel(
     name = "use_rule_with_dir_in_outs",
     distribution = "use_rule_with_dir_in_outs",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     python_tag = "py3",
     version = "0.0.1",
     deps = [
@@ -264,8 +252,6 @@
     name = "python_abi3_binary_wheel",
     abi = "abi3",
     distribution = "example_python_abi3_binary_wheel",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     # these platform strings must line up with test_python_abi3_binary_wheel() in wheel_test.py
     platform = select({
         ":aarch64-apple-darwin": "macosx_11_0_arm64",
@@ -280,32 +266,73 @@
 )
 
 py_wheel(
-    name = "legacy_filename_escaping",
-    # Per https://www.python.org/dev/peps/pep-0427/#escaping-and-unicode
-    # runs of non-alphanumeric, non-digit symbols should be replaced with a single underscore.
-    # Unicode non-ascii letters should *not* be replaced with underscore.
-    distribution = "file~~name-escaping",
-    incompatible_normalize_name = False,
-    incompatible_normalize_version = False,
-    python_tag = "py3",
-    version = "0.0.1-r7",
-    deps = [":example_pkg"],
-)
-
-py_wheel(
     name = "filename_escaping",
     # Per https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode
     # runs of "-", "_" and "." should be replaced with a single underscore.
     # Unicode non-ascii letters aren't allowed according to
     # https://packaging.python.org/en/latest/specifications/name-normalization/.
     distribution = "File--Name-Escaping",
-    incompatible_normalize_name = True,
-    incompatible_normalize_version = True,
     python_tag = "py3",
     version = "v0.0.1.RC1+ubuntu-r7",
     deps = [":example_pkg"],
 )
 
+write_file(
+    name = "requires_file",
+    out = "requires.txt",
+    content = """\
+# Requirements file
+--index-url https://pypi.com
+
+tomli>=2.0.0
+starlark  # Example comment
+""".splitlines(),
+)
+
+write_file(
+    name = "extra_requires_file",
+    out = "extra_requires.txt",
+    content = """\
+# Extras Requirements file
+--index-url https://pypi.com
+
+pyyaml>=6.0.0,!=6.0.1
+toml; (python_version == "3.11" or python_version == "3.12") and python_version != "3.8"
+wheel; python_version == "3.11" or python_version == "3.12"  # Example comment
+""".splitlines(),
+)
+
+# py_wheel can use text files to specify their requirements. This
+# can be convenient for users of `compile_pip_requirements` who have
+# granular `requirements.in` files per package. This target shows
+# how to provide this file.
+py_wheel(
+    name = "requires_files",
+    distribution = "requires_files",
+    extra_requires_files = {":extra_requires.txt": "example"},
+    python_tag = "py3",
+    # py_wheel can use text files to specify their requirements. This
+    # can be convenient for users of `compile_pip_requirements` who have
+    # granular `requirements.in` files per package.
+    requires_file = ":requires.txt",
+    version = "0.0.1",
+    deps = [":example_pkg"],
+)
+
+# Package just a specific py_libraries, without their dependencies
+py_wheel(
+    name = "minimal_data_files",
+    testonly = True,  # Set this to verify the generated .dist target doesn't break things
+
+    # Re-using some files already checked into the repo.
+    data_files = {
+        "//examples/wheel:NOTICE": "scripts/NOTICE",
+        "README.md": "data/target/path/README.md",
+    },
+    distribution = "minimal_data_files",
+    version = "0.0.1",
+)
+
 py_test(
     name = "wheel_test",
     srcs = ["wheel_test.py"],
@@ -315,15 +342,52 @@
         ":custom_package_root_multi_prefix_reverse_order",
         ":customized",
         ":filename_escaping",
-        ":legacy_filename_escaping",
+        ":minimal_data_files",
         ":minimal_with_py_library",
         ":minimal_with_py_library_with_stamp",
         ":minimal_with_py_package",
         ":python_abi3_binary_wheel",
         ":python_requires_in_a_package",
+        ":requires_files",
         ":use_rule_with_dir_in_outs",
     ],
     deps = [
         "//python/runfiles",
     ],
 )
+
+# Test wheel publishing
+
+compile_pip_requirements(
+    name = "requirements_server",
+    src = "requirements_server.in",
+)
+
+py_test(
+    name = "test_publish",
+    srcs = ["test_publish.py"],
+    data = [
+        ":minimal_with_py_library",
+        ":minimal_with_py_library.publish",
+        ":pypiserver",
+    ],
+    env = {
+        "PUBLISH_PATH": "$(location :minimal_with_py_library.publish)",
+        "SERVER_PATH": "$(location :pypiserver)",
+        "WHEEL_PATH": "$(rootpath :minimal_with_py_library)",
+    },
+    target_compatible_with = select({
+        "@platforms//os:linux": [],
+        "@platforms//os:macos": [],
+        "//conditions:default": ["@platforms//:incompatible"],
+    }),
+    deps = [
+        "@pypiserver//pypiserver",
+    ],
+)
+
+py_console_script_binary(
+    name = "pypiserver",
+    pkg = "@pypiserver//pypiserver",
+    script = "pypi-server",
+)
diff --git a/examples/wheel/private/directory_writer.py b/examples/wheel/private/directory_writer.py
index 7d9a93e..4b69f3a 100644
--- a/examples/wheel/private/directory_writer.py
+++ b/examples/wheel/private/directory_writer.py
@@ -48,7 +48,7 @@
 
     args.output.mkdir(parents=True, exist_ok=True)
 
-    for (path, content) in args.files:
+    for path, content in args.files:
         new_file = args.output / path
         new_file.parent.mkdir(parents=True, exist_ok=True)
         new_file.write_text(content)
diff --git a/examples/wheel/requirements_server.in b/examples/wheel/requirements_server.in
new file mode 100644
index 0000000..d5d483d
--- /dev/null
+++ b/examples/wheel/requirements_server.in
@@ -0,0 +1,2 @@
+# This is for running publishing tests
+pypiserver
diff --git a/examples/wheel/requirements_server.txt b/examples/wheel/requirements_server.txt
new file mode 100644
index 0000000..eccab12
--- /dev/null
+++ b/examples/wheel/requirements_server.txt
@@ -0,0 +1,16 @@
+#
+# This file is autogenerated by pip-compile with Python 3.11
+# by the following command:
+#
+#    bazel run //examples/wheel:requirements_server.update
+#
+pypiserver==2.0.1 \
+    --hash=sha256:1dd98fb99d2da4199fb44c7284e57d69a9f7fda2c6c8dc01975c151c592677bf \
+    --hash=sha256:7b58fbd54468235f79e4de07c4f7a9ff829e7ac6869bef47ec11e0710138e162
+    # via -r examples/wheel/requirements_server.in
+
+# The following packages are considered to be unsafe in a requirements file:
+pip==24.0 \
+    --hash=sha256:ba0d021a166865d2265246961bec0152ff124de910c5cc39f1156ce3fa7c69dc \
+    --hash=sha256:ea9bd1a847e8c5774a5777bb398c19e80bcd4e2aa16a4b301b718fe6f593aba2
+    # via pypiserver
diff --git a/examples/wheel/test_publish.py b/examples/wheel/test_publish.py
new file mode 100644
index 0000000..496642a
--- /dev/null
+++ b/examples/wheel/test_publish.py
@@ -0,0 +1,117 @@
+import os
+import socket
+import subprocess
+import textwrap
+import time
+import unittest
+from contextlib import closing
+from pathlib import Path
+from urllib.request import urlopen
+
+
+def find_free_port():
+    with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
+        s.bind(("", 0))
+        s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+        return s.getsockname()[1]
+
+
+class TestTwineUpload(unittest.TestCase):
+    def setUp(self):
+        self.maxDiff = 1000
+        self.port = find_free_port()
+        self.url = f"http://localhost:{self.port}"
+        self.dir = Path(os.environ["TEST_TMPDIR"])
+
+        self.log_file = self.dir / "pypiserver-log.txt"
+        self.log_file.touch()
+        _storage_dir = self.dir / "data"
+        for d in [_storage_dir]:
+            d.mkdir(exist_ok=True)
+
+        print("Starting PyPI server...")
+        self._server = subprocess.Popen(
+            [
+                str(Path(os.environ["SERVER_PATH"])),
+                "run",
+                "--verbose",
+                "--log-file",
+                str(self.log_file),
+                "--host",
+                "localhost",
+                "--port",
+                str(self.port),
+                # Allow unauthenticated access
+                "--authenticate",
+                ".",
+                "--passwords",
+                ".",
+                str(_storage_dir),
+            ],
+        )
+
+        line = "Hit Ctrl-C to quit"
+        interval = 0.1
+        wait_seconds = 40
+        for _ in range(int(wait_seconds / interval)):  # 40 second timeout
+            current_logs = self.log_file.read_text()
+            if line in current_logs:
+                print(current_logs.strip())
+                print("...")
+                break
+
+            time.sleep(0.1)
+        else:
+            raise RuntimeError(
+                f"Could not get the server running fast enough, waited for {wait_seconds}s"
+            )
+
+    def tearDown(self):
+        self._server.terminate()
+        print(f"Stopped PyPI server, all logs:\n{self.log_file.read_text()}")
+
+    def test_upload_and_query_simple_api(self):
+        # Given
+        script_path = Path(os.environ["PUBLISH_PATH"])
+        whl = Path(os.environ["WHEEL_PATH"])
+
+        # When I publish a whl to a package registry
+        subprocess.check_output(
+            [
+                str(script_path),
+                "--no-color",
+                "upload",
+                str(whl),
+                "--verbose",
+                "--non-interactive",
+                "--disable-progress-bar",
+            ],
+            env={
+                "TWINE_REPOSITORY_URL": self.url,
+                "TWINE_USERNAME": "dummy",
+                "TWINE_PASSWORD": "dummy",
+            },
+        )
+
+        # Then I should be able to get its contents
+        with urlopen(self.url + "/example-minimal-library/") as response:
+            got_content = response.read().decode("utf-8")
+            want_content = """
+<!DOCTYPE html>
+<html>
+    <head>
+        <title>Links for example-minimal-library</title>
+    </head>
+    <body>
+        <h1>Links for example-minimal-library</h1>
+             <a href="/packages/example_minimal_library-0.0.1-py3-none-any.whl#sha256=79a4e9c1838c0631d5d8fa49a26efd6e9a364f6b38d9597c0f6df112271a0e28">example_minimal_library-0.0.1-py3-none-any.whl</a><br>
+    </body>
+</html>"""
+            self.assertEqual(
+                textwrap.dedent(want_content).strip(),
+                textwrap.dedent(got_content).strip(),
+            )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/examples/wheel/wheel_test.py b/examples/wheel/wheel_test.py
index 43fbe0c..66ebd50 100644
--- a/examples/wheel/wheel_test.py
+++ b/examples/wheel/wheel_test.py
@@ -15,6 +15,7 @@
 import hashlib
 import os
 import platform
+import stat
 import subprocess
 import unittest
 import zipfile
@@ -58,7 +59,11 @@
         for zinfo in zf.infolist():
             self.assertEqual(zinfo.date_time, (1980, 1, 1, 0, 0, 0), msg=zinfo.filename)
             self.assertEqual(zinfo.create_system, 3, msg=zinfo.filename)
-            self.assertEqual(zinfo.external_attr, 0o777 << 16, msg=zinfo.filename)
+            self.assertEqual(
+                zinfo.external_attr,
+                (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO | stat.S_IFREG) << 16,
+                msg=zinfo.filename,
+            )
             self.assertEqual(
                 zinfo.compress_type, zipfile.ZIP_DEFLATED, msg=zinfo.filename
             )
@@ -78,7 +83,7 @@
                 ],
             )
         self.assertFileSha256Equal(
-            filename, "2818e70fdebd148934f41820f8c54d5d7676d783c0d66c7c8af2ee9141e7ddc7"
+            filename, "79a4e9c1838c0631d5d8fa49a26efd6e9a364f6b38d9597c0f6df112271a0e28"
         )
 
     def test_py_package_wheel(self):
@@ -100,7 +105,7 @@
                 ],
             )
         self.assertFileSha256Equal(
-            filename, "273e27adf9bf90287a42ac911dcece8aa95f2905c37d786725477b26de23627c"
+            filename, "b4815a1d3a17cc6a5ce717ed42b940fa7788cb5168f5c1de02f5f50abed7083e"
         )
 
     def test_customized_wheel(self):
@@ -189,45 +194,7 @@
 second = second.main:s""",
             )
         self.assertFileSha256Equal(
-            filename, "48eed93258bba0bb366c879b77917d947267d89e7e60005d1766d844fb909118"
-        )
-
-    def test_legacy_filename_escaping(self):
-        filename = self._get_path(
-            "file_name_escaping-0.0.1_r7-py3-none-any.whl",
-        )
-        with zipfile.ZipFile(filename) as zf:
-            self.assertAllEntriesHasReproducibleMetadata(zf)
-            self.assertEqual(
-                zf.namelist(),
-                [
-                    "examples/wheel/lib/data.txt",
-                    "examples/wheel/lib/module_with_data.py",
-                    "examples/wheel/lib/simple_module.py",
-                    "examples/wheel/main.py",
-                    # PEP calls for replacing only in the archive filename.
-                    # Alas setuptools also escapes in the dist-info directory
-                    # name, so let's be compatible.
-                    "file_name_escaping-0.0.1_r7.dist-info/WHEEL",
-                    "file_name_escaping-0.0.1_r7.dist-info/METADATA",
-                    "file_name_escaping-0.0.1_r7.dist-info/RECORD",
-                ],
-            )
-            metadata_contents = zf.read(
-                "file_name_escaping-0.0.1_r7.dist-info/METADATA"
-            )
-            self.assertEqual(
-                metadata_contents,
-                b"""\
-Metadata-Version: 2.1
-Name: file~~name-escaping
-Version: 0.0.1-r7
-
-UNKNOWN
-""",
-            )
-        self.assertFileSha256Equal(
-            filename, "ace5fab6458f8c3b4b50801b8e8214288bba786472e81547fced743a67531312"
+            filename, "27f3038be6e768d28735441a1bc567eca2213bd3568d18b22a414e6399a2d48e"
         )
 
     def test_filename_escaping(self):
@@ -293,7 +260,7 @@
             for line in record_contents.splitlines():
                 self.assertFalse(line.startswith("/"))
         self.assertFileSha256Equal(
-            filename, "16e0345c102c6866fed34999d8de5aed7f351adbf372b27adef3bc15161db65e"
+            filename, "f034b3278781f4df32a33df70d794bb94170b450e477c8bd9cd42d2d922476ae"
         )
 
     def test_custom_package_root_multi_prefix_wheel(self):
@@ -324,7 +291,7 @@
             for line in record_contents.splitlines():
                 self.assertFalse(line.startswith("/"))
         self.assertFileSha256Equal(
-            filename, "d2031eb21c69e290db5eac76b0dc026858e9dbdb3da2dc0314e4e9f69eab2e1a"
+            filename, "ff19f5e4540948247742716338bb4194d619cb56df409045d1a99f265ce8e36c"
         )
 
     def test_custom_package_root_multi_prefix_reverse_order_wheel(self):
@@ -355,7 +322,7 @@
             for line in record_contents.splitlines():
                 self.assertFalse(line.startswith("/"))
         self.assertFileSha256Equal(
-            filename, "a37b90685600ccfa56cc5405d1e9a3729ed21dfb31c76fd356e491e2af989566"
+            filename, "4331e378ea8b8148409ae7c02177e4eb24d151a85ef937bb44b79ff5258d634b"
         )
 
     def test_python_requires_wheel(self):
@@ -380,7 +347,7 @@
 """,
             )
         self.assertFileSha256Equal(
-            filename, "529afa454113572e6cd91f069cc9cfe5c28369f29cd495fff19d0ecce389d8e4"
+            filename, "b34676828f93da8cd898d50dcd4f36e02fe273150e213aacb999310a05f5f38c"
         )
 
     def test_python_abi3_binary_wheel(self):
@@ -445,20 +412,20 @@
                 ],
             )
         self.assertFileSha256Equal(
-            filename, "cc9484d527075f07651ca0e7dff4a185c1314020726bcad55fe28d1bba0fec2e"
+            filename, "ac9216bd54dcae1a6270c35fccf8a73b0be87c1b026c28e963b7c76b2f9b722b"
         )
 
     def test_rule_expands_workspace_status_keys_in_wheel_metadata(self):
         filename = self._get_path(
-            "example_minimal_library_BUILD_USER_-0.1._BUILD_TIMESTAMP_-py3-none-any.whl"
+            "example_minimal_library{BUILD_USER}-0.1.{BUILD_TIMESTAMP}-py3-none-any.whl"
         )
 
         with zipfile.ZipFile(filename) as zf:
             self.assertAllEntriesHasReproducibleMetadata(zf)
             metadata_file = None
             for f in zf.namelist():
-                self.assertNotIn("_BUILD_TIMESTAMP_", f)
-                self.assertNotIn("_BUILD_USER_", f)
+                self.assertNotIn("{BUILD_TIMESTAMP}", f)
+                self.assertNotIn("{BUILD_USER}", f)
                 if os.path.basename(f) == "METADATA":
                     metadata_file = f
             self.assertIsNotNone(metadata_file)
@@ -476,6 +443,52 @@
             self.assertNotIn("{BUILD_TIMESTAMP}", version)
             self.assertNotIn("{BUILD_USER}", name)
 
+    def test_requires_file_and_extra_requires_files(self):
+        filename = self._get_path("requires_files-0.0.1-py3-none-any.whl")
+
+        with zipfile.ZipFile(filename) as zf:
+            self.assertAllEntriesHasReproducibleMetadata(zf)
+            metadata_file = None
+            for f in zf.namelist():
+                if os.path.basename(f) == "METADATA":
+                    metadata_file = f
+            self.assertIsNotNone(metadata_file)
+
+            requires = []
+            with zf.open(metadata_file) as fp:
+                for line in fp:
+                    if line.startswith(b"Requires-Dist:"):
+                        requires.append(line.decode("utf-8").strip())
+
+            print(requires)
+            self.assertEqual(
+                [
+                    "Requires-Dist: tomli>=2.0.0",
+                    "Requires-Dist: starlark",
+                    "Requires-Dist: pyyaml!=6.0.1,>=6.0.0; extra == 'example'",
+                    'Requires-Dist: toml; ((python_version == "3.11" or python_version == "3.12") and python_version != "3.8") and extra == \'example\'',
+                    'Requires-Dist: wheel; (python_version == "3.11" or python_version == "3.12") and extra == \'example\'',
+                ],
+                requires,
+            )
+
+    def test_minimal_data_files(self):
+        filename = self._get_path("minimal_data_files-0.0.1-py3-none-any.whl")
+
+        with zipfile.ZipFile(filename) as zf:
+            self.assertAllEntriesHasReproducibleMetadata(zf)
+            metadata_file = None
+            self.assertEqual(
+                zf.namelist(),
+                [
+                    "minimal_data_files-0.0.1.dist-info/WHEEL",
+                    "minimal_data_files-0.0.1.dist-info/METADATA",
+                    "minimal_data_files-0.0.1.data/data/target/path/README.md",
+                    "minimal_data_files-0.0.1.data/scripts/NOTICE",
+                    "minimal_data_files-0.0.1.dist-info/RECORD",
+                ],
+            )
+
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/gazelle/.bazelrc b/gazelle/.bazelrc
index 7a67d3e..e10cd78 100644
--- a/gazelle/.bazelrc
+++ b/gazelle/.bazelrc
@@ -10,7 +10,6 @@
 
 # Windows makes use of runfiles for some rules
 build --enable_runfiles
-startup --windows_enable_symlinks
 
 # Do NOT implicitly create empty __init__.py files in the runfiles tree.
 # By default, these are created in every directory containing Python source code
diff --git a/gazelle/BUILD.bazel b/gazelle/BUILD.bazel
index e00c74a..f74338d 100644
--- a/gazelle/BUILD.bazel
+++ b/gazelle/BUILD.bazel
@@ -1,4 +1,4 @@
-load("@bazel_gazelle//:def.bzl", "DEFAULT_LANGUAGES", "gazelle", "gazelle_binary")
+load("@bazel_gazelle//:def.bzl", "gazelle")
 
 # Gazelle configuration options.
 # See https://github.com/bazelbuild/bazel-gazelle#running-gazelle-with-bazel
@@ -6,19 +6,13 @@
 # gazelle:exclude bazel-out
 gazelle(
     name = "gazelle",
-    gazelle = ":gazelle_binary",
-)
-
-gazelle_binary(
-    name = "gazelle_binary",
-    languages = DEFAULT_LANGUAGES + ["//python"],
 )
 
 gazelle(
     name = "gazelle_update_repos",
     args = [
         "-from_file=go.mod",
-        "-to_macro=deps.bzl%gazelle_deps",
+        "-to_macro=deps.bzl%go_deps",
         "-prune",
     ],
     command = "update-repos",
diff --git a/gazelle/MODULE.bazel b/gazelle/MODULE.bazel
index 8c6ad19..0418b39 100644
--- a/gazelle/MODULE.bazel
+++ b/gazelle/MODULE.bazel
@@ -4,17 +4,32 @@
     compatibility_level = 1,
 )
 
+bazel_dep(name = "bazel_skylib", version = "1.6.1")
 bazel_dep(name = "rules_python", version = "0.18.0")
 bazel_dep(name = "rules_go", version = "0.41.0", repo_name = "io_bazel_rules_go")
 bazel_dep(name = "gazelle", version = "0.33.0", repo_name = "bazel_gazelle")
 
+local_path_override(
+    module_name = "rules_python",
+    path = "..",
+)
+
 go_deps = use_extension("@bazel_gazelle//:extensions.bzl", "go_deps")
 go_deps.from_file(go_mod = "//:go.mod")
 use_repo(
     go_deps,
     "com_github_bazelbuild_buildtools",
-    "com_github_bmatcuk_doublestar",
+    "com_github_bmatcuk_doublestar_v4",
     "com_github_emirpasic_gods",
     "com_github_ghodss_yaml",
+    "com_github_smacker_go_tree_sitter",
+    "com_github_stretchr_testify",
     "in_gopkg_yaml_v2",
+    "org_golang_x_sync",
+)
+
+python_stdlib_list = use_extension("//python:extensions.bzl", "python_stdlib_list")
+use_repo(
+    python_stdlib_list,
+    "python_stdlib_list",
 )
diff --git a/gazelle/README.md b/gazelle/README.md
index a9a69cc..c0494d1 100644
--- a/gazelle/README.md
+++ b/gazelle/README.md
@@ -114,6 +114,10 @@
     pip_repository_name = "pip",
     # This should point to wherever we declare our python dependencies
     # (the same as what we passed to the modules_mapping rule in WORKSPACE)
+    # This argument is optional. If provided, the `.test` target is very
+    # fast because it just has to check an integrity field. If not provided,
+    # the integrity field is not added to the manifest which can help avoid
+    # merge conflicts in large repos.
     requirements = "//:requirements_lock.txt",
 )
 ```
@@ -168,32 +172,405 @@
 
 Python-specific directives are as follows:
 
-| **Directive**                        | **Default value** |
-|--------------------------------------|-------------------|
-| `# gazelle:python_extension`         |   `enabled`       |
-| Controls whether the Python extension is enabled or not. Sub-packages inherit this value. Can be either "enabled" or "disabled". | |
-| `# gazelle:python_root`              |    n/a            |
-| Sets a Bazel package as a Python root. This is used on monorepos with multiple Python projects that don't share the top-level of the workspace as the root. | |
-| `# gazelle:python_manifest_file_name`| `gazelle_python.yaml` |
-| Overrides the default manifest file name. | |
-| `# gazelle:python_ignore_files`      |     n/a           |
-| Controls the files which are ignored from the generated targets. | |
-| `# gazelle:python_ignore_dependencies`|    n/a           |
-| Controls the ignored dependencies from the generated targets. | |
-| `# gazelle:python_validate_import_statements`| `true` |
-| Controls whether the Python import statements should be validated. Can be "true" or "false" | |
-| `# gazelle:python_generation_mode`| `package` |
-| Controls the target generation mode. Can be "file", "package", or "project" | |
-| `# gazelle:python_generation_mode_per_file_include_init`| `package` |
-| Controls whether `__init__.py` files are included as srcs in each generated target when target generation mode is "file". Can be "true", or "false" | |
-| `# gazelle:python_library_naming_convention`| `$package_name$` |
-| Controls the `py_library` naming convention. It interpolates \$package_name\$ with the Bazel package name. E.g. if the Bazel package name is `foo`, setting this to `$package_name$_my_lib` would result in a generated target named `foo_my_lib`. | |
-| `# gazelle:python_binary_naming_convention` | `$package_name$_bin` |
-| Controls the `py_binary` naming convention. Follows the same interpolation rules as `python_library_naming_convention`. | |
-| `# gazelle:python_test_naming_convention` | `$package_name$_test` |
-| Controls the `py_test` naming convention. Follows the same interpolation rules as `python_library_naming_convention`. | |
-| `# gazelle:resolve py ...` | n/a |
+| **Directive**                                                                                                                                                                                                                                                                                   | **Default value** |
+|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------|
+| `# gazelle:python_extension`                                                                                                                                                                                                                                                                    |   `enabled`       |
+| Controls whether the Python extension is enabled or not. Sub-packages inherit this value. Can be either "enabled" or "disabled".                                                                                                                                                                | |
+| [`# gazelle:python_root`](#directive-python_root)                                                                                                                                                                                                                                               |    n/a            |
+| Sets a Bazel package as a Python root. This is used on monorepos with multiple Python projects that don't share the top-level of the workspace as the root. See [Directive: `python_root`](#directive-python_root) below.                                                                       | |
+| `# gazelle:python_manifest_file_name`                                                                                                                                                                                                                                                           | `gazelle_python.yaml` |
+| Overrides the default manifest file name.                                                                                                                                                                                                                                                       | |
+| `# gazelle:python_ignore_files`                                                                                                                                                                                                                                                                 |     n/a           |
+| Controls the files which are ignored from the generated targets.                                                                                                                                                                                                                                | |
+| `# gazelle:python_ignore_dependencies`                                                                                                                                                                                                                                                          |    n/a           |
+| Controls the ignored dependencies from the generated targets.                                                                                                                                                                                                                                   | |
+| `# gazelle:python_validate_import_statements`                                                                                                                                                                                                                                                   | `true` |
+| Controls whether the Python import statements should be validated. Can be "true" or "false"                                                                                                                                                                                                     | |
+| `# gazelle:python_generation_mode`                                                                                                                                                                                                                                                              | `package` |
+| Controls the target generation mode. Can be "file", "package", or "project"                                                                                                                                                                                                                     | |
+| `# gazelle:python_generation_mode_per_file_include_init`                                                                                                                                                                                                                                        | `false` |
+| Controls whether `__init__.py` files are included as srcs in each generated target when target generation mode is "file". Can be "true", or "false"                                                                                                                                             | |
+| [`# gazelle:python_generation_mode_per_package_require_test_entry_point`](#directive-python_generation_mode_per_package_require_test_entry_point)                                                                                                                                               | `true` |
+| Controls whether a file called `__test__.py` or a target called `__test__` is required to generate one test target per package in package mode.                                                                                                                                                 ||
+| `# gazelle:python_library_naming_convention`                                                                                                                                                                                                                                                    | `$package_name$` |
+| Controls the `py_library` naming convention. It interpolates `$package_name$` with the Bazel package name. E.g. if the Bazel package name is `foo`, setting this to `$package_name$_my_lib` would result in a generated target named `foo_my_lib`.                                              | |
+| `# gazelle:python_binary_naming_convention`                                                                                                                                                                                                                                                     | `$package_name$_bin` |
+| Controls the `py_binary` naming convention. Follows the same interpolation rules as `python_library_naming_convention`.                                                                                                                                                                         | |
+| `# gazelle:python_test_naming_convention`                                                                                                                                                                                                                                                       | `$package_name$_test` |
+| Controls the `py_test` naming convention. Follows the same interpolation rules as `python_library_naming_convention`.                                                                                                                                                                           | |
+| `# gazelle:resolve py ...`                                                                                                                                                                                                                                                                      | n/a |
 | Instructs the plugin what target to add as a dependency to satisfy a given import statement. The syntax is `# gazelle:resolve py import-string label` where `import-string` is the symbol in the python `import` statement, and `label` is the Bazel label that Gazelle should write in `deps`. | |
+| [`# gazelle:python_default_visibility labels`](#directive-python_default_visibility)                                                                                                                                                                                                            | |
+| Instructs gazelle to use these visibility labels on all python targets. `labels` is a comma-separated list of labels (without spaces).                                                                                                                                                          | `//$python_root$:__subpackages__` |
+| [`# gazelle:python_visibility label`](#directive-python_visibility)                                                                                                                                                                                                                             | |
+| Appends additional visibility labels to each generated target. This directive can be set multiple times.                                                                                                                                                                                        | |
+| [`# gazelle:python_test_file_pattern`](#directive-python_test_file_pattern)                                                                                                                                                                                                                     | `*_test.py,test_*.py` |
+| Filenames matching these comma-separated `glob`s will be mapped to `py_test` targets.                                                                                                                                                                                                           |
+| `# gazelle:python_label_convention`                                                                                                                                                                                                                                                             | `$distribution_name$` |
+| Defines the format of the distribution name in labels to third-party deps. Useful for using Gazelle plugin with other rules with different repository conventions (e.g. `rules_pycross`). Full label is always prepended with (pip) repository name, e.g. `@pip//numpy`.                        |
+| `# gazelle:python_label_normalization`                                                                                                                                                                                                                                                          | `snake_case` |
+| Controls how distribution names in labels to third-party deps are normalized. Useful for using Gazelle plugin with other rules with different label conventions (e.g. `rules_pycross` uses PEP-503). Can be "snake_case", "none", or "pep503".                                                  |
+
+#### Directive: `python_root`:
+
+Set this directive within the Bazel package that you want to use as the Python root.
+For example, if using a `src` dir (as recommended by the [Python Packaging User
+Guide][python-packaging-user-guide]), then set this directive in `src/BUILD.bazel`:
+
+```starlark
+# ./src/BUILD.bazel
+# Tell gazelle that are python root is the same dir as this Bazel package.
+# gazelle:python_root
+```
+
+Note that the directive does not have any arguments.
+
+Gazelle will then add the necessary `imports` attribute to all targets that it
+generates:
+
+```starlark
+# in ./src/foo/BUILD.bazel
+py_libary(
+    ...
+    imports = [".."],  # Gazelle adds this
+    ...
+)
+
+# in ./src/foo/bar/BUILD.bazel
+py_libary(
+    ...
+    imports = ["../.."],  # Gazelle adds this
+    ...
+)
+```
+
+[python-packaging-user-guide]: https://github.com/pypa/packaging.python.org/blob/4c86169a/source/tutorials/packaging-projects.rst
+
+
+#### Directive: `python_default_visibility`:
+
+Instructs gazelle to use these visibility labels on all _python_ targets
+(typically `py_*`, but can be modified via the `map_kind` directive). The arg
+to this directive is a a comma-separated list (without spaces) of labels.
+
+For example:
+
+```starlark
+# gazelle:python_default_visibility //:__subpackages__,//tests:__subpackages__
+```
+
+produces the following visibility attribute:
+
+```starlark
+py_library(
+    ...,
+    visibility = [
+        "//:__subpackages__",
+        "//tests:__subpackages__",
+    ],
+    ...,
+)
+```
+
+You can also inject the `python_root` value by using the exact string
+`$python_root$`. All instances of this string will be replaced by the `python_root`
+value.
+
+```starlark
+# gazelle:python_default_visibility //$python_root$:__pkg__,//foo/$python_root$/tests:__subpackages__
+
+# Assuming the "# gazelle:python_root" directive is set in ./py/src/BUILD.bazel,
+# the results will be:
+py_library(
+    ...,
+    visibility = [
+        "//foo/py/src/tests:__subpackages__",  # sorted alphabetically
+        "//py/src:__pkg__",
+    ],
+    ...,
+)
+```
+
+Two special values are also accepted as an argument to the directive:
+
++   `NONE`: This removes all default visibility. Labels added by the
+    `python_visibility` directive are still included.
++   `DEFAULT`: This resets the default visibility.
+
+For example:
+
+```starlark
+# gazelle:python_default_visibility NONE
+
+py_library(
+    name = "...",
+    srcs = [...],
+)
+```
+
+```starlark
+# gazelle:python_default_visibility //foo:bar
+# gazelle:python_default_visibility DEFAULT
+
+py_library(
+    ...,
+    visibility = ["//:__subpackages__"],
+    ...,
+)
+```
+
+These special values can be useful for sub-packages.
+
+
+#### Directive: `python_visibility`:
+
+Appends additional `visibility` labels to each generated target.
+
+This directive can be set multiple times. The generated `visibility` attribute
+will include the default visibility and all labels defined by this directive.
+All labels will be ordered alphabetically.
+
+```starlark
+# ./BUILD.bazel
+# gazelle:python_visibility //tests:__pkg__
+# gazelle:python_visibility //bar:baz
+
+py_library(
+   ...
+   visibility = [
+       "//:__subpackages__",  # default visibility
+       "//bar:baz",
+       "//tests:__pkg__",
+   ],
+   ...
+)
+```
+
+Child Bazel packages inherit values from parents:
+
+```starlark
+# ./bar/BUILD.bazel
+# gazelle:python_visibility //tests:__subpackages__
+
+py_library(
+   ...
+   visibility = [
+       "//:__subpackages__",       # default visibility
+       "//bar:baz",                # defined in ../BUILD.bazel
+       "//tests:__pkg__",          # defined in ../BUILD.bazel
+       "//tests:__subpackages__",  # defined in this ./BUILD.bazel
+   ],
+   ...
+)
+
+```
+
+This directive also supports the `$python_root$` placeholder that
+`# gazelle:python_default_visibility` supports.
+
+```starlark
+# gazlle:python_visibility //$python_root$/foo:bar
+
+py_library(
+    ...
+    visibility = ["//this_is_my_python_root/foo:bar"],
+    ...
+)
+```
+
+
+#### Directive: `python_test_file_pattern`:
+
+This directive adjusts which python files will be mapped to the `py_test` rule.
+
++ The default is `*_test.py,test_*.py`: both `test_*.py` and `*_test.py` files
+  will generate `py_test` targets.
++ This directive must have a value. If no value is given, an error will be raised.
++ It is recommended, though not necessary, to include the `.py` extension in
+  the `glob`s: `foo*.py,?at.py`.
++ Like most directives, it applies to the current Bazel package and all subpackages
+  until the directive is set again.
++ This directive accepts multiple `glob` patterns, separated by commas without spaces:
+
+```starlark
+# gazelle:python_test_file_pattern foo*.py,?at
+
+py_library(
+    name = "mylib",
+    srcs = ["mylib.py"],
+)
+
+py_test(
+    name = "foo_bar",
+    srcs = ["foo_bar.py"],
+)
+
+py_test(
+    name = "cat",
+    srcs = ["cat.py"],
+)
+
+py_test(
+    name = "hat",
+    srcs = ["hat.py"],
+)
+```
+
+
+##### Notes
+
+Resetting to the default value (such as in a subpackage) is manual. Set:
+
+```starlark
+# gazelle:python_test_file_pattern *_test.py,test_*.py
+```
+
+There currently is no way to tell gazelle that _no_ files in a package should
+be mapped to `py_test` targets (see [Issue #1826][issue-1826]). The workaround
+is to set this directive to a pattern that will never match a `.py` file, such
+as `foo.bar`:
+
+```starlark
+# No files in this package should be mapped to py_test targets.
+# gazelle:python_test_file_pattern foo.bar
+
+py_library(
+    name = "my_test",
+    srcs = ["my_test.py"],
+)
+```
+
+[issue-1826]: https://github.com/bazelbuild/rules_python/issues/1826
+
+#### Directive: `python_generation_mode_per_package_require_test_entry_point`:
+When `# gazelle:python_generation_mode package`, whether a file called `__test__.py` or a target called `__test__`, a.k.a., entry point, is required to generate one test target per package. If this is set to true but no entry point is found, Gazelle will fall back to file mode and generate one test target per file. Setting this directive to false forces Gazelle to generate one test target per package even without entry point. However, this means the `main` attribute of the `py_test` will not be set and the target will not be runnable unless either:
+1. there happen to be a file in the `srcs` with the same name as the `py_test` target, or
+2. a macro populating the `main` attribute of `py_test` is configured with `gazelle:map_kind` to replace `py_test` when Gazelle is generating Python test targets. For example, user can provide such a macro to Gazelle:
+
+```starlark
+load("@rules_python//python:defs.bzl", _py_test="py_test")
+load("@aspect_rules_py//py:defs.bzl", "py_pytest_main")
+
+def py_test(name, main=None, **kwargs):
+    deps = kwargs.pop("deps", [])
+    if not main:
+        py_pytest_main(
+            name = "__test__",
+            deps = ["@pip_pytest//:pkg"],  # change this to the pytest target in your repo.
+        )
+    
+        deps.append(":__test__")
+        main = ":__test__.py"
+
+    _py_test(
+        name = name,
+        main = main,
+        deps = deps,
+        **kwargs,
+)
+```
+
+### Annotations
+
+*Annotations* refer to comments found _within Python files_ that configure how
+Gazelle acts for that particular file.
+
+Annotations have the form:
+
+```python
+# gazelle:annotation_name value
+```
+
+and can reside anywhere within a Python file where comments are valid. For example:
+
+```python
+import foo
+# gazelle:annotation_name value
+
+def bar():  # gazelle:annotation_name value
+    pass
+```
+
+The annotations are:
+
+| **Annotation**                                                | **Default value** |
+|---------------------------------------------------------------|-------------------|
+| [`# gazelle:ignore imports`](#annotation-ignore)              | N/A               |
+| Tells Gazelle to ignore import statements. `imports` is a comma-separated list of imports to ignore. | |
+| [`# gazelle:include_dep targets`](#annotation-include_dep)    | N/A               |
+| Tells Gazelle to include a set of dependencies, even if they are not imported in a Python module. `targets` is a comma-separated list of target names to include as dependencies. | |
+
+
+#### Annotation: `ignore`
+
+This annotation accepts a comma-separated string of values. Values are names of Python
+imports that Gazelle should _not_ include in target dependencies.
+
+The annotation can be added multiple times, and all values are combined and
+de-duplicated.
+
+For `python_generation_mode = "package"`, the `ignore` annotations
+found across all files included in the generated target are removed from `deps`.
+
+Example:
+
+```python
+import numpy  # a pypi package
+
+# gazelle:ignore bar.baz.hello,foo
+import bar.baz.hello
+import foo
+
+# Ignore this import because _reasons_
+import baz  # gazelle:ignore baz
+```
+
+will cause Gazelle to generate:
+
+```starlark
+deps = ["@pypi//numpy"],
+```
+
+
+#### Annotation: `include_dep`
+
+This annotation accepts a comma-separated string of values. Values _must_
+be Python targets, but _no validation is done_. If a value is not a Python
+target, building will result in an error saying:
+
+```
+<target> does not have mandatory providers: 'PyInfo' or 'CcInfo' or 'PyInfo'.
+```
+
+Adding non-Python targets to the generated target is a feature request being
+tracked in [Issue #1865](https://github.com/bazelbuild/rules_python/issues/1865).
+
+The annotation can be added multiple times, and all values are combined
+and de-duplicated.
+
+For `python_generation_mode = "package"`, the `include_dep` annotations
+found across all files included in the generated target are included in `deps`.
+
+Example:
+
+```python
+# gazelle:include_dep //foo:bar,:hello_world,//:abc
+# gazelle:include_dep //:def,//foo:bar
+import numpy  # a pypi package
+```
+
+will cause Gazelle to generate:
+
+```starlark
+deps = [
+    ":hello_world",
+    "//:abc",
+    "//:def",
+    "//foo:bar",
+    "@pypi//numpy",
+]
+```
+
 
 ### Libraries
 
@@ -250,8 +627,20 @@
 if __name == "__main__":
 ```
 
-Gazelle will create `py_binary` target will be created for every module with such line, with the target name
-being the same as module name.
+Gazelle will create a `py_binary` target for every module with such a line, with
+the target name the same as the module name.
+
+If `python_generation_mode` is set to `file`, then instead of one `py_binary`
+target per module, Gazelle will create one `py_binary` target for each file with
+such a line, and the name of the target will match the name of the script.
+
+Note that it's possible for another script to depend on a `py_binary` target and
+import from the `py_binary`'s scripts. This can have possible negative effects on
+Bazel analysis time and runfiles size compared to depending on a `py_library`
+target. The simplest way to avoid these negative effects is to extract library
+code into a separate script without a `main` line. Gazelle will then create a
+`py_library` target for that library code, and other scripts can depend on that
+`py_library` target.
 
 ## Developer Notes
 
@@ -262,7 +651,7 @@
 
 If you add new Go dependencies to the plugin source code, you need to "tidy" the go.mod file.
 After changing that file, run `go mod tidy` or `bazel run @go_sdk//:bin/go -- mod tidy`
-to update the go.mod and go.sum files. Then run `bazel run //:update_go_deps` to have gazelle
+to update the go.mod and go.sum files. Then run `bazel run //:gazelle_update_repos` to have gazelle
 add the new dependenies to the deps.bzl file. The deps.bzl file is used as defined in our /WORKSPACE
 to include the external repos Bazel loads Go dependencies from.
 
diff --git a/gazelle/WORKSPACE b/gazelle/WORKSPACE
index df2883f..d9f0645 100644
--- a/gazelle/WORKSPACE
+++ b/gazelle/WORKSPACE
@@ -34,16 +34,11 @@
     path = "..",
 )
 
-load("@rules_python//python:repositories.bzl", "py_repositories", "python_register_toolchains")
+load("@rules_python//python:repositories.bzl", "py_repositories")
 
 py_repositories()
 
-python_register_toolchains(
-    name = "python_3_11",
-    python_version = "3.11",
-)
-
 load("//:deps.bzl", _py_gazelle_deps = "gazelle_deps")
 
-# gazelle:repository_macro deps.bzl%gazelle_deps
+# gazelle:repository_macro deps.bzl%go_deps
 _py_gazelle_deps()
diff --git a/gazelle/deps.bzl b/gazelle/deps.bzl
index 26f8c66..948d61e 100644
--- a/gazelle/deps.bzl
+++ b/gazelle/deps.bzl
@@ -14,13 +14,31 @@
 
 "This file managed by `bazel run //:gazelle_update_repos`"
 
-load("@bazel_gazelle//:deps.bzl", _go_repository = "go_repository")
+load(
+    "@bazel_gazelle//:deps.bzl",
+    _go_repository = "go_repository",
+)
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
 
 def go_repository(name, **kwargs):
     if name not in native.existing_rules():
         _go_repository(name = name, **kwargs)
 
+def python_stdlib_list_deps():
+    "Fetch python stdlib list dependencies"
+    http_archive(
+        name = "python_stdlib_list",
+        build_file_content = """exports_files(glob(["stdlib_list/lists/*.txt"]))""",
+        sha256 = "3f6fc8fba0a99ce8fa76c1b794a24f38962f6275ea9d5cfb43a874abe472571e",
+        strip_prefix = "stdlib-list-0.10.0",
+        url = "https://github.com/pypi/stdlib-list/releases/download/v0.10.0/v0.10.0.tar.gz",
+    )
+
 def gazelle_deps():
+    go_deps()
+    python_stdlib_list_deps()
+
+def go_deps():
     "Fetch go dependencies"
     go_repository(
         name = "co_honnef_go_tools",
@@ -28,20 +46,32 @@
         sum = "h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs=",
         version = "v0.0.0-20190523083050-ea95bdfd59fc",
     )
+    go_repository(
+        name = "com_github_bazelbuild_bazel_gazelle",
+        importpath = "github.com/bazelbuild/bazel-gazelle",
+        sum = "h1:ROyUyUHzoEdvoOs1e0haxJx1l5EjZX6AOqiKdVlaBbg=",
+        version = "v0.31.1",
+    )
 
     go_repository(
         name = "com_github_bazelbuild_buildtools",
         build_naming_convention = "go_default_library",
         importpath = "github.com/bazelbuild/buildtools",
-        sum = "h1:jhiMzJ+8unnLRtV8rpbWBFE9pFNzIqgUTyZU5aA++w8=",
-        version = "v0.0.0-20221004120235-7186f635531b",
+        sum = "h1:HTepWP/jhtWTC1gvK0RnvKCgjh4gLqiwaOwGozAXcbw=",
+        version = "v0.0.0-20231103205921-433ea8554e82",
+    )
+    go_repository(
+        name = "com_github_bazelbuild_rules_go",
+        importpath = "github.com/bazelbuild/rules_go",
+        sum = "h1:JzlRxsFNhlX+g4drDRPhIaU5H5LnI978wdMJ0vK4I+k=",
+        version = "v0.41.0",
     )
 
     go_repository(
-        name = "com_github_bmatcuk_doublestar",
-        importpath = "github.com/bmatcuk/doublestar",
-        sum = "h1:gPypJ5xD31uhX6Tf54sDPUOBXTqKH4c9aPY66CyQrS0=",
-        version = "v1.3.4",
+        name = "com_github_bmatcuk_doublestar_v4",
+        importpath = "github.com/bmatcuk/doublestar/v4",
+        sum = "h1:FH9SifrbvJhnlQpztAx++wlkk70QBf0iBWDwNy7PA4I=",
+        version = "v4.6.1",
     )
 
     go_repository(
@@ -81,6 +111,13 @@
         version = "v0.3.4",
     )
     go_repository(
+        name = "com_github_davecgh_go_spew",
+        importpath = "github.com/davecgh/go-spew",
+        sum = "h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=",
+        version = "v1.1.1",
+    )
+
+    go_repository(
         name = "com_github_emirpasic_gods",
         importpath = "github.com/emirpasic/gods",
         sum = "h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=",
@@ -98,6 +135,12 @@
         sum = "h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A=",
         version = "v0.1.0",
     )
+    go_repository(
+        name = "com_github_fsnotify_fsnotify",
+        importpath = "github.com/fsnotify/fsnotify",
+        sum = "h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=",
+        version = "v1.6.0",
+    )
 
     go_repository(
         name = "com_github_ghodss_yaml",
@@ -114,14 +157,14 @@
     go_repository(
         name = "com_github_golang_mock",
         importpath = "github.com/golang/mock",
-        sum = "h1:G5FRp8JnTd7RQH5kemVNlMeyXQAztQ3mOWV95KxsXH8=",
-        version = "v1.1.1",
+        sum = "h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc=",
+        version = "v1.6.0",
     )
     go_repository(
         name = "com_github_golang_protobuf",
         importpath = "github.com/golang/protobuf",
-        sum = "h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM=",
-        version = "v1.4.3",
+        sum = "h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=",
+        version = "v1.5.2",
     )
     go_repository(
         name = "com_github_google_go_cmp",
@@ -129,6 +172,12 @@
         sum = "h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=",
         version = "v0.5.9",
     )
+    go_repository(
+        name = "com_github_pmezard_go_difflib",
+        importpath = "github.com/pmezard/go-difflib",
+        sum = "h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=",
+        version = "v1.0.0",
+    )
 
     go_repository(
         name = "com_github_prometheus_client_model",
@@ -137,6 +186,25 @@
         version = "v0.0.0-20190812154241-14fe0d1b01d4",
     )
     go_repository(
+        name = "com_github_smacker_go_tree_sitter",
+        importpath = "github.com/smacker/go-tree-sitter",
+        sum = "h1:7QZKUmQfnxncZIJGyvX8M8YeMfn8kM10j3J/2KwVTN4=",
+        version = "v0.0.0-20240422154435-0628b34cbf9c",
+    )
+    go_repository(
+        name = "com_github_stretchr_objx",
+        importpath = "github.com/stretchr/objx",
+        sum = "h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=",
+        version = "v0.5.2",
+    )
+    go_repository(
+        name = "com_github_stretchr_testify",
+        importpath = "github.com/stretchr/testify",
+        sum = "h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=",
+        version = "v1.9.0",
+    )
+
+    go_repository(
         name = "com_github_yuin_goldmark",
         importpath = "github.com/yuin/goldmark",
         sum = "h1:fVcFKWvrslecOb/tg+Cc05dkeYx540o0FuFt3nUVDoE=",
@@ -161,6 +229,13 @@
         version = "v2.4.0",
     )
     go_repository(
+        name = "in_gopkg_yaml_v3",
+        importpath = "gopkg.in/yaml.v3",
+        sum = "h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=",
+        version = "v3.0.1",
+    )
+
+    go_repository(
         name = "net_starlark_go",
         importpath = "go.starlark.net",
         sum = "h1:xwwDQW5We85NaTk2APgoN9202w/l0DVGp+GZMfsrh7s=",
@@ -181,14 +256,14 @@
     go_repository(
         name = "org_golang_google_grpc",
         importpath = "google.golang.org/grpc",
-        sum = "h1:rRYRFMVgRv6E0D70Skyfsr28tDXIuuPZyWGMPdMcnXg=",
-        version = "v1.27.0",
+        sum = "h1:fPVVDxY9w++VjTZsYvXWqEf9Rqar/e+9zYfxKK+W+YU=",
+        version = "v1.50.0",
     )
     go_repository(
         name = "org_golang_google_protobuf",
         importpath = "google.golang.org/protobuf",
-        sum = "h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c=",
-        version = "v1.25.0",
+        sum = "h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw=",
+        version = "v1.28.0",
     )
     go_repository(
         name = "org_golang_x_crypto",
@@ -211,14 +286,14 @@
     go_repository(
         name = "org_golang_x_mod",
         importpath = "golang.org/x/mod",
-        sum = "h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=",
-        version = "v0.6.0-dev.0.20220419223038-86c51ed26bb4",
+        sum = "h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk=",
+        version = "v0.10.0",
     )
     go_repository(
         name = "org_golang_x_net",
         importpath = "golang.org/x/net",
-        sum = "h1:PxfKdU9lEEDYjdIzOtC4qFWgkU2rGHdKlKowJSMN9h0=",
-        version = "v0.0.0-20220722155237-a158d28d115b",
+        sum = "h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=",
+        version = "v0.10.0",
     )
     go_repository(
         name = "org_golang_x_oauth2",
@@ -229,20 +304,20 @@
     go_repository(
         name = "org_golang_x_sync",
         importpath = "golang.org/x/sync",
-        sum = "h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw=",
-        version = "v0.0.0-20220722155255-886fb9371eb4",
+        sum = "h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI=",
+        version = "v0.2.0",
     )
     go_repository(
         name = "org_golang_x_sys",
         importpath = "golang.org/x/sys",
-        sum = "h1:k5II8e6QD8mITdi+okbbmR/cIyEbeXLBhy5Ha4nevyc=",
-        version = "v0.0.0-20221010170243-090e33056c14",
+        sum = "h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=",
+        version = "v0.8.0",
     )
     go_repository(
         name = "org_golang_x_text",
         importpath = "golang.org/x/text",
-        sum = "h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=",
-        version = "v0.3.7",
+        sum = "h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=",
+        version = "v0.3.3",
     )
     go_repository(
         name = "org_golang_x_tools",
@@ -250,8 +325,8 @@
             "gazelle:exclude **/testdata/**/*",
         ],
         importpath = "golang.org/x/tools",
-        sum = "h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU=",
-        version = "v0.1.12",
+        sum = "h1:8WMNJAz3zrtPmnYC7ISf5dEn3MT0gY7jBJfw27yrrLo=",
+        version = "v0.9.1",
     )
     go_repository(
         name = "org_golang_x_xerrors",
diff --git a/gazelle/go.mod b/gazelle/go.mod
index 6789aa1..4b65e71 100644
--- a/gazelle/go.mod
+++ b/gazelle/go.mod
@@ -4,17 +4,23 @@
 
 require (
 	github.com/bazelbuild/bazel-gazelle v0.31.1
-	github.com/bazelbuild/buildtools v0.0.0-20230510134650-37bd1811516d
+	github.com/bazelbuild/buildtools v0.0.0-20231103205921-433ea8554e82
 	github.com/bazelbuild/rules_go v0.41.0
-	github.com/bmatcuk/doublestar v1.3.4
+	github.com/bmatcuk/doublestar/v4 v4.6.1
 	github.com/emirpasic/gods v1.18.1
 	github.com/ghodss/yaml v1.0.0
+	github.com/smacker/go-tree-sitter v0.0.0-20240422154435-0628b34cbf9c
+	github.com/stretchr/testify v1.9.0
+	golang.org/x/sync v0.2.0
 	gopkg.in/yaml.v2 v2.4.0
 )
 
 require (
+	github.com/davecgh/go-spew v1.1.1 // indirect
 	github.com/google/go-cmp v0.5.9 // indirect
+	github.com/pmezard/go-difflib v1.0.0 // indirect
 	golang.org/x/mod v0.10.0 // indirect
 	golang.org/x/sys v0.8.0 // indirect
 	golang.org/x/tools v0.9.1 // indirect
+	gopkg.in/yaml.v3 v3.0.1 // indirect
 )
diff --git a/gazelle/go.sum b/gazelle/go.sum
index 5617f9b..46e0127 100644
--- a/gazelle/go.sum
+++ b/gazelle/go.sum
@@ -2,17 +2,20 @@
 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
 github.com/bazelbuild/bazel-gazelle v0.31.1 h1:ROyUyUHzoEdvoOs1e0haxJx1l5EjZX6AOqiKdVlaBbg=
 github.com/bazelbuild/bazel-gazelle v0.31.1/go.mod h1:Ul0pqz50f5wxz0QNzsZ+mrEu4AVAVJZEB5xLnHgIG9c=
-github.com/bazelbuild/buildtools v0.0.0-20230510134650-37bd1811516d h1:Fl1FfItZp34QIQmmDTbZXHB5XA6JfbNNfH7tRRGWvQo=
-github.com/bazelbuild/buildtools v0.0.0-20230510134650-37bd1811516d/go.mod h1:689QdV3hBP7Vo9dJMmzhoYIyo/9iMhEmHkJcnaPRCbo=
+github.com/bazelbuild/buildtools v0.0.0-20231103205921-433ea8554e82 h1:HTepWP/jhtWTC1gvK0RnvKCgjh4gLqiwaOwGozAXcbw=
+github.com/bazelbuild/buildtools v0.0.0-20231103205921-433ea8554e82/go.mod h1:689QdV3hBP7Vo9dJMmzhoYIyo/9iMhEmHkJcnaPRCbo=
 github.com/bazelbuild/rules_go v0.41.0 h1:JzlRxsFNhlX+g4drDRPhIaU5H5LnI978wdMJ0vK4I+k=
 github.com/bazelbuild/rules_go v0.41.0/go.mod h1:TMHmtfpvyfsxaqfL9WnahCsXMWDMICTw7XeK9yVb+YU=
-github.com/bmatcuk/doublestar v1.3.4 h1:gPypJ5xD31uhX6Tf54sDPUOBXTqKH4c9aPY66CyQrS0=
-github.com/bmatcuk/doublestar v1.3.4/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
+github.com/bmatcuk/doublestar/v4 v4.6.1 h1:FH9SifrbvJhnlQpztAx++wlkk70QBf0iBWDwNy7PA4I=
+github.com/bmatcuk/doublestar/v4 v4.6.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
 github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
 github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
 github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
 github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
 github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
 github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
 github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -38,7 +41,17 @@
 github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
 github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
 github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/smacker/go-tree-sitter v0.0.0-20240422154435-0628b34cbf9c h1:7QZKUmQfnxncZIJGyvX8M8YeMfn8kM10j3J/2KwVTN4=
+github.com/smacker/go-tree-sitter v0.0.0-20240422154435-0628b34cbf9c/go.mod h1:q99oHDsbP0xRwmn7Vmob8gbSMNyvJ83OauXPSuHQuKE=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.4/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
+github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
 go.starlark.net v0.0.0-20210223155950-e043a3d3c984/go.mod h1:t3mmBBPzAVvK0L0n1drDmrQsJ8FoIx4INCqVMTr/Zo0=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@@ -55,6 +68,8 @@
 golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.2.0 h1:PUR+T4wwASmuSTYdKjYHI5TD22Wy5ogLU5qZCOLxBrI=
+golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@@ -90,5 +105,8 @@
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
 gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
 gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
 honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
 honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/gazelle/manifest/BUILD.bazel b/gazelle/manifest/BUILD.bazel
index fc7fa09..33b5a46 100644
--- a/gazelle/manifest/BUILD.bazel
+++ b/gazelle/manifest/BUILD.bazel
@@ -1,5 +1,10 @@
 load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
 
+exports_files([
+    # This gets wrapped up into a py_binary with args inside of the gazelle_python_manifest macro.
+    "copy_to_source.py",
+])
+
 go_library(
     name = "manifest",
     srcs = ["manifest.go"],
diff --git a/gazelle/manifest/copy_to_source.py b/gazelle/manifest/copy_to_source.py
new file mode 100644
index 0000000..4ebb958
--- /dev/null
+++ b/gazelle/manifest/copy_to_source.py
@@ -0,0 +1,36 @@
+"""Copy a generated file to the source tree.
+
+Run like:
+    copy_to_source path/to/generated_file path/to/source_file_to_overwrite
+"""
+
+import os
+import shutil
+import stat
+import sys
+from pathlib import Path
+
+
+def copy_to_source(generated_relative_path: Path, target_relative_path: Path) -> None:
+    """Copy the generated file to the target file path.
+
+    Expands the relative paths by looking at Bazel env vars to figure out which absolute paths to use.
+    """
+    # This script normally gets executed from the runfiles dir, so find the absolute path to the generated file based on that.
+    generated_absolute_path = Path.cwd() / generated_relative_path
+
+    # Similarly, the target is relative to the source directory.
+    target_absolute_path = os.getenv("BUILD_WORKSPACE_DIRECTORY") / target_relative_path
+
+    print(f"Copying {generated_absolute_path} to {target_absolute_path}")
+    target_absolute_path.parent.mkdir(parents=True, exist_ok=True)
+    shutil.copy(generated_absolute_path, target_absolute_path)
+
+    target_absolute_path.chmod(0o664)
+
+
+if __name__ == "__main__":
+    if len(sys.argv) != 3:
+        sys.exit("Usage: copy_to_source <generated_file> <target_file>")
+
+    copy_to_source(Path(sys.argv[1]), Path(sys.argv[2]))
diff --git a/gazelle/manifest/defs.bzl b/gazelle/manifest/defs.bzl
index f1a16c4..eacf1c1 100644
--- a/gazelle/manifest/defs.bzl
+++ b/gazelle/manifest/defs.bzl
@@ -16,32 +16,34 @@
 for updating and testing the Gazelle manifest file.
 """
 
-load("@io_bazel_rules_go//go:def.bzl", "GoSource", "go_binary", "go_test")
+load("@bazel_skylib//rules:diff_test.bzl", "diff_test")
+load("@io_bazel_rules_go//go:def.bzl", "GoSource", "go_test")
+load("@rules_python//python:defs.bzl", "py_binary")
 
 def gazelle_python_manifest(
         name,
-        requirements,
         modules_mapping,
+        requirements = [],
         pip_repository_name = "",
         pip_deps_repository_name = "",
         manifest = ":gazelle_python.yaml",
-        use_pip_repository_aliases = None,
         **kwargs):
     """A macro for defining the updating and testing targets for the Gazelle manifest file.
 
     Args:
         name: the name used as a base for the targets.
+        modules_mapping: the target for the generated modules_mapping.json file.
         requirements: the target for the requirements.txt file or a list of
             requirements files that will be concatenated before passing on to
-            the manifest generator.
+            the manifest generator. If unset, no integrity field is added to the
+            manifest, meaning testing it is just as expensive as generating it,
+            but modifying it is much less likely to result in a merge conflict.
         pip_repository_name: the name of the pip_install or pip_repository target.
-        use_pip_repository_aliases: boolean flag to enable using user-friendly
-            python package aliases. Defaults to True.
         pip_deps_repository_name: deprecated - the old pip_install target name.
-        modules_mapping: the target for the generated modules_mapping.json file.
-        manifest: the target for the Gazelle manifest file.
-        **kwargs: other bazel attributes passed to the target target generated by
-            this macro.
+        manifest: the Gazelle manifest file.
+            defaults to the same value as manifest.
+        **kwargs: other bazel attributes passed to the generate and test targets
+            generated by this macro.
     """
     if pip_deps_repository_name != "":
         # buildifier: disable=print
@@ -55,12 +57,17 @@
         # This is a temporary check while pip_deps_repository_name exists as deprecated.
         fail("pip_repository_name must be set in //{}:{}".format(native.package_name(), name))
 
+    test_target = "{}.test".format(name)
     update_target = "{}.update".format(name)
     update_target_label = "//{}:{}".format(native.package_name(), update_target)
 
+    manifest_genrule = name + ".genrule"
+    generated_manifest = name + ".generated_manifest"
+    manifest_generator = Label("//manifest/generate:generate")
     manifest_generator_hash = Label("//manifest/generate:generate_lib_sources_hash")
 
-    if type(requirements) == "list":
+    if requirements and type(requirements) == "list":
+        # This runs if requirements is a list or is unset (default value is empty list)
         native.genrule(
             name = name + "_requirements_gen",
             srcs = sorted(requirements),
@@ -71,74 +78,76 @@
         requirements = name + "_requirements_gen"
 
     update_args = [
-        "--manifest-generator-hash",
-        "$(rootpath {})".format(manifest_generator_hash),
-        "--requirements",
-        "$(rootpath {})".format(requirements),
-        "--pip-repository-name",
-        pip_repository_name,
-        "--modules-mapping",
-        "$(rootpath {})".format(modules_mapping),
-        "--output",
-        "$(rootpath {})".format(manifest),
-        "--update-target",
-        update_target_label,
+        "--manifest-generator-hash=$(execpath {})".format(manifest_generator_hash),
+        "--requirements=$(rootpath {})".format(requirements) if requirements else "--requirements=",
+        "--pip-repository-name={}".format(pip_repository_name),
+        "--modules-mapping=$(execpath {})".format(modules_mapping),
+        "--output=$(execpath {})".format(generated_manifest),
+        "--update-target={}".format(update_target_label),
     ]
 
-    # TODO @aignas 2023-10-31: When removing this code, cleanup the
-    # code in gazelle to only work with aliased targets.
-    if use_pip_repository_aliases == None:
-        update_args += [
-            "--omit-pip-repository-aliases-setting",
-            "true",
-        ]
-    elif use_pip_repository_aliases:
-        update_args += [
-            "--use-pip-repository-aliases",
-            "true",
-        ]
-    else:
-        update_args += [
-            "--use-pip-repository-aliases",
-            "false",
-        ]
-
-    go_binary(
-        name = update_target,
-        embed = [Label("//manifest/generate:generate_lib")],
-        data = [
-            manifest,
+    native.genrule(
+        name = manifest_genrule,
+        outs = [generated_manifest],
+        cmd = "$(execpath {}) {}".format(manifest_generator, " ".join(update_args)),
+        tools = [manifest_generator],
+        srcs = [
             modules_mapping,
-            requirements,
             manifest_generator_hash,
-        ],
-        args = update_args,
-        visibility = ["//visibility:private"],
+        ] + ([requirements] if requirements else []),
         tags = ["manual"],
     )
 
-    attrs = {
-        "env": {
-            "_TEST_MANIFEST": "$(rootpath {})".format(manifest),
-            "_TEST_MANIFEST_GENERATOR_HASH": "$(rootpath {})".format(manifest_generator_hash),
-            "_TEST_REQUIREMENTS": "$(rootpath {})".format(requirements),
-        },
-        "size": "small",
-    }
-    go_test(
-        name = "{}.test".format(name),
-        srcs = [Label("//manifest/test:test.go")],
-        data = [
-            manifest,
-            requirements,
-            manifest_generator_hash,
+    py_binary(
+        name = update_target,
+        srcs = [Label("//manifest:copy_to_source.py")],
+        main = Label("//manifest:copy_to_source.py"),
+        args = [
+            "$(rootpath {})".format(generated_manifest),
+            "$(rootpath {})".format(manifest),
         ],
-        rundir = ".",
-        deps = [Label("//manifest")],
-        # kwargs could contain test-specific attributes like size or timeout
-        **dict(attrs, **kwargs)
+        data = [
+            generated_manifest,
+            manifest,
+        ],
+        tags = kwargs.get("tags", []) + ["manual"],
+        **{k: v for k, v in kwargs.items() if k != "tags"}
     )
 
+    if requirements:
+        attrs = {
+            "env": {
+                "_TEST_MANIFEST": "$(rootpath {})".format(manifest),
+                "_TEST_MANIFEST_GENERATOR_HASH": "$(rlocationpath {})".format(manifest_generator_hash),
+                "_TEST_REQUIREMENTS": "$(rootpath {})".format(requirements),
+            },
+            "size": "small",
+        }
+        go_test(
+            name = test_target,
+            srcs = [Label("//manifest/test:test.go")],
+            data = [
+                manifest,
+                requirements,
+                manifest_generator_hash,
+            ],
+            rundir = ".",
+            deps = [
+                Label("//manifest"),
+                Label("@io_bazel_rules_go//go/runfiles"),
+            ],
+            # kwargs could contain test-specific attributes like size or timeout
+            **dict(attrs, **kwargs)
+        )
+    else:
+        diff_test(
+            name = test_target,
+            file1 = generated_manifest,
+            file2 = manifest,
+            failure_message = "Gazelle manifest is out of date. Run 'bazel run {}' to update it.".format(native.package_relative_label(update_target)),
+            **kwargs
+        )
+
     native.filegroup(
         name = name,
         srcs = [manifest],
diff --git a/gazelle/manifest/generate/generate.go b/gazelle/manifest/generate/generate.go
index 006b15e..19ca08a 100644
--- a/gazelle/manifest/generate/generate.go
+++ b/gazelle/manifest/generate/generate.go
@@ -31,19 +31,11 @@
 	"github.com/bazelbuild/rules_python/gazelle/manifest"
 )
 
-func init() {
-	if os.Getenv("BUILD_WORKSPACE_DIRECTORY") == "" {
-		log.Fatalln("ERROR: this program must run under Bazel")
-	}
-}
-
 func main() {
 	var (
 		manifestGeneratorHashPath string
 		requirementsPath          string
 		pipRepositoryName         string
-		usePipRepositoryAliases   bool
-		omitUsePipRepositoryAliases   bool
 		modulesMappingPath        string
 		outputPath                string
 		updateTarget              string
@@ -64,16 +56,6 @@
 		"pip-repository-name",
 		"",
 		"The name of the pip_install or pip_repository target.")
-	flag.BoolVar(
-		&usePipRepositoryAliases,
-		"use-pip-repository-aliases",
-		true,
-		"Whether to use the pip-repository aliases, which are generated when passing 'incompatible_generate_aliases = True'.")
-	flag.BoolVar(
-		&omitUsePipRepositoryAliases,
-		"omit-pip-repository-aliases-setting",
-		false,
-		"Whether to omit use-pip-repository-aliases flag serialization into the manifest.")
 	flag.StringVar(
 		&modulesMappingPath,
 		"modules-mapping",
@@ -91,10 +73,6 @@
 		"The Bazel target to update the YAML manifest file.")
 	flag.Parse()
 
-	if requirementsPath == "" {
-		log.Fatalln("ERROR: --requirements must be set")
-	}
-
 	if modulesMappingPath == "" {
 		log.Fatalln("ERROR: --modules-mapping must be set")
 	}
@@ -114,18 +92,12 @@
 
 	header := generateHeader(updateTarget)
 	repository := manifest.PipRepository{
-		Name:                    pipRepositoryName,
-	}
-
-	if omitUsePipRepositoryAliases {
-		repository.UsePipRepositoryAliases = nil
-	} else {
-		repository.UsePipRepositoryAliases = &usePipRepositoryAliases
+		Name: pipRepositoryName,
 	}
 
 	manifestFile := manifest.NewFile(&manifest.Manifest{
 		ModulesMapping: modulesMapping,
-		PipRepository: &repository,
+		PipRepository:  &repository,
 	})
 	if err := writeOutput(
 		outputPath,
@@ -173,12 +145,7 @@
 	manifestGeneratorHashPath string,
 	requirementsPath string,
 ) error {
-	stat, err := os.Stat(outputPath)
-	if err != nil {
-		return fmt.Errorf("failed to write output: %w", err)
-	}
-
-	outputFile, err := os.OpenFile(outputPath, os.O_WRONLY|os.O_TRUNC, stat.Mode())
+	outputFile, err := os.OpenFile(outputPath, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0644)
 	if err != nil {
 		return fmt.Errorf("failed to write output: %w", err)
 	}
@@ -188,20 +155,26 @@
 		return fmt.Errorf("failed to write output: %w", err)
 	}
 
-	manifestGeneratorHash, err := os.Open(manifestGeneratorHashPath)
-	if err != nil {
-		return fmt.Errorf("failed to write output: %w", err)
-	}
-	defer manifestGeneratorHash.Close()
+	if requirementsPath != "" {
+		manifestGeneratorHash, err := os.Open(manifestGeneratorHashPath)
+		if err != nil {
+			return fmt.Errorf("failed to write output: %w", err)
+		}
+		defer manifestGeneratorHash.Close()
 
-	requirements, err := os.Open(requirementsPath)
-	if err != nil {
-		return fmt.Errorf("failed to write output: %w", err)
-	}
-	defer requirements.Close()
+		requirements, err := os.Open(requirementsPath)
+		if err != nil {
+			return fmt.Errorf("failed to write output: %w", err)
+		}
+		defer requirements.Close()
 
-	if err := manifestFile.Encode(outputFile, manifestGeneratorHash, requirements); err != nil {
-		return fmt.Errorf("failed to write output: %w", err)
+		if err := manifestFile.EncodeWithIntegrity(outputFile, manifestGeneratorHash, requirements); err != nil {
+			return fmt.Errorf("failed to write output: %w", err)
+		}
+	} else {
+		if err := manifestFile.EncodeWithoutIntegrity(outputFile); err != nil {
+			return fmt.Errorf("failed to write output: %w", err)
+		}
 	}
 
 	return nil
diff --git a/gazelle/manifest/manifest.go b/gazelle/manifest/manifest.go
index 55adef0..26b0dfb 100644
--- a/gazelle/manifest/manifest.go
+++ b/gazelle/manifest/manifest.go
@@ -31,7 +31,7 @@
 	// Integrity is the hash of the requirements.txt file and the Manifest for
 	// ensuring the integrity of the entire gazelle_python.yaml file. This
 	// controls the testing to keep the gazelle_python.yaml file up-to-date.
-	Integrity string `yaml:"integrity"`
+	Integrity string `yaml:"integrity,omitempty"`
 }
 
 // NewFile creates a new File with a given Manifest.
@@ -40,12 +40,21 @@
 }
 
 // Encode encodes the manifest file to the given writer.
-func (f *File) Encode(w io.Writer, manifestGeneratorHashFile, requirements io.Reader) error {
+func (f *File) EncodeWithIntegrity(w io.Writer, manifestGeneratorHashFile, requirements io.Reader) error {
 	integrityBytes, err := f.calculateIntegrity(manifestGeneratorHashFile, requirements)
 	if err != nil {
 		return fmt.Errorf("failed to encode manifest file: %w", err)
 	}
 	f.Integrity = fmt.Sprintf("%x", integrityBytes)
+
+	return f.encode(w)
+}
+
+func (f *File) EncodeWithoutIntegrity(w io.Writer) error {
+	return f.encode(w)
+}
+
+func (f *File) encode(w io.Writer) error {
 	encoder := yaml.NewEncoder(w)
 	defer encoder.Close()
 	if err := encoder.Encode(f); err != nil {
@@ -144,7 +153,4 @@
 type PipRepository struct {
 	// The name of the pip_parse or pip_repository target.
 	Name string
-	// UsePipRepositoryAliases allows to use aliases generated pip_repository
-	// when passing incompatible_generate_aliases = True.
-	UsePipRepositoryAliases *bool `yaml:"use_pip_repository_aliases,omitempty"`
 }
diff --git a/gazelle/manifest/manifest_test.go b/gazelle/manifest/manifest_test.go
index 43c4099..e80c7fc 100644
--- a/gazelle/manifest/manifest_test.go
+++ b/gazelle/manifest/manifest_test.go
@@ -26,21 +26,13 @@
 )
 
 var modulesMapping = manifest.ModulesMapping{
-	"arrow":           "arrow",
-	"arrow.__init__":  "arrow",
-	"arrow.api":       "arrow",
-	"arrow.arrow":     "arrow",
-	"arrow.factory":   "arrow",
-	"arrow.formatter": "arrow",
-	"arrow.locales":   "arrow",
-	"arrow.parser":    "arrow",
-	"arrow.util":      "arrow",
+	"arrow": "arrow",
 }
 
 const pipDepsRepositoryName = "test_repository_name"
 
 func TestFile(t *testing.T) {
-	t.Run("Encode", func(t *testing.T) {
+	t.Run("EncodeWithIntegrity", func(t *testing.T) {
 		f := manifest.NewFile(&manifest.Manifest{
 			ModulesMapping:        modulesMapping,
 			PipDepsRepositoryName: pipDepsRepositoryName,
@@ -53,7 +45,7 @@
 			t.FailNow()
 		}
 		defer requirements.Close()
-		if err := f.Encode(&b, manifestGeneratorHashFile, requirements); err != nil {
+		if err := f.EncodeWithIntegrity(&b, manifestGeneratorHashFile, requirements); err != nil {
 			log.Println(err)
 			t.FailNow()
 		}
diff --git a/gazelle/manifest/test/test.go b/gazelle/manifest/test/test.go
index 72cb260..506c7d2 100644
--- a/gazelle/manifest/test/test.go
+++ b/gazelle/manifest/test/test.go
@@ -26,6 +26,7 @@
 	"path/filepath"
 	"testing"
 
+	"github.com/bazelbuild/rules_go/go/runfiles"
 	"github.com/bazelbuild/rules_python/gazelle/manifest"
 )
 
@@ -49,7 +50,12 @@
 		t.Fatal("failed to find the Gazelle manifest file integrity")
 	}
 
-	manifestGeneratorHashPath := os.Getenv("_TEST_MANIFEST_GENERATOR_HASH")
+	manifestGeneratorHashPath, err := runfiles.Rlocation(
+		os.Getenv("_TEST_MANIFEST_GENERATOR_HASH"))
+	if err != nil {
+		t.Fatal("failed to resolve runfiles path of manifest: %v", err)
+	}
+
 	manifestGeneratorHash, err := os.Open(manifestGeneratorHashPath)
 	if err != nil {
 		t.Fatalf("opening %q: %v", manifestGeneratorHashPath, err)
diff --git a/gazelle/manifest/testdata/gazelle_python.yaml b/gazelle/manifest/testdata/gazelle_python.yaml
index 70f7aff..1f3e03d 100644
--- a/gazelle/manifest/testdata/gazelle_python.yaml
+++ b/gazelle/manifest/testdata/gazelle_python.yaml
@@ -1,13 +1,5 @@
 manifest:
   modules_mapping:
     arrow: arrow
-    arrow.__init__: arrow
-    arrow.api: arrow
-    arrow.arrow: arrow
-    arrow.factory: arrow
-    arrow.formatter: arrow
-    arrow.locales: arrow
-    arrow.parser: arrow
-    arrow.util: arrow
   pip_deps_repository_name: test_repository_name
-integrity: eedf187f8b7ec27cdfc682feee4206e063b51d13d78f77c05d3a30ec11bd7411
+integrity: 96be4e5a31aa39b52e2591c00ffd9265c5a96ece99a7687c21ff7732a38da6dc
diff --git a/gazelle/modules_mapping/generator.py b/gazelle/modules_mapping/generator.py
index be57eac..bbd579d 100644
--- a/gazelle/modules_mapping/generator.py
+++ b/gazelle/modules_mapping/generator.py
@@ -45,6 +45,23 @@
                 else:
                     self.module_for_path(path, whl)
 
+    def simplify(self):
+        simplified = {}
+        for module, wheel_name in sorted(self.mapping.items(), key=lambda x: x[0]):
+            mod = module
+            while True:
+                if mod in simplified:
+                    if simplified[mod] != wheel_name:
+                        break
+                    wheel_name = ""
+                    break
+                if mod.count(".") == 0:
+                    break
+                mod = mod.rsplit(".", 1)[0]
+            if wheel_name:
+                simplified[module] = wheel_name
+        self.mapping = simplified
+
     def module_for_path(self, path, whl):
         ext = pathlib.Path(path).suffix
         if ext == ".py" or ext == ".so":
@@ -70,7 +87,8 @@
                 ext = "".join(pathlib.Path(root).suffixes)
             module = root[: -len(ext)].replace("/", ".")
             if not self.is_excluded(module):
-                self.mapping[module] = wheel_name
+                if not self.is_excluded(module):
+                    self.mapping[module] = wheel_name
 
     def is_excluded(self, module):
         for pattern in self.excluded_patterns:
@@ -86,6 +104,7 @@
             except AssertionError as error:
                 print(error, file=self.stderr)
                 return 1
+        self.simplify()
         mapping_json = json.dumps(self.mapping)
         with open(self.output_file, "w") as f:
             f.write(mapping_json)
diff --git a/gazelle/python/BUILD.bazel b/gazelle/python/BUILD.bazel
index fd051eb..627a867 100644
--- a/gazelle/python/BUILD.bazel
+++ b/gazelle/python/BUILD.bazel
@@ -1,31 +1,31 @@
 load("@bazel_gazelle//:def.bzl", "gazelle_binary")
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-load("@rules_python//python:defs.bzl", "py_binary", "py_test")
+load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
+load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
 load(":gazelle_test.bzl", "gazelle_test")
 
 go_library(
     name = "python",
     srcs = [
         "configure.go",
+        "file_parser.go",
         "fix.go",
         "generate.go",
         "kinds.go",
         "language.go",
-        "lifecycle.go",
         "parser.go",
         "resolve.go",
         "std_modules.go",
         "target.go",
     ],
     # NOTE @aignas 2023-12-03: currently gazelle does not support embedding
-    # generated files, but helper.zip is generated by a build rule.
+    # generated files, but 3.11.txt is generated by a build rule.
     #
     # You will get a benign error like when running gazelle locally:
-    # > 8 gazelle: .../rules_python/gazelle/python/lifecycle.go:26:3: pattern helper.zip: matched no files
+    # > 8 gazelle: .../rules_python/gazelle/python/std_modules.go:24:3: pattern 3.11.txt: matched no files
     #
     # See following for more info:
     # https://github.com/bazelbuild/bazel-gazelle/issues/1513
-    embedsrcs = [":helper.zip"],  # keep
+    embedsrcs = ["stdlib_list.txt"],  # keep # TODO: use user-defined version?
     importpath = "github.com/bazelbuild/rules_python/gazelle/python",
     visibility = ["//visibility:public"],
     deps = [
@@ -38,39 +38,31 @@
         "@bazel_gazelle//resolve:go_default_library",
         "@bazel_gazelle//rule:go_default_library",
         "@com_github_bazelbuild_buildtools//build:go_default_library",
-        "@com_github_bmatcuk_doublestar//:doublestar",
+        "@com_github_bmatcuk_doublestar_v4//:doublestar",
         "@com_github_emirpasic_gods//lists/singlylinkedlist",
         "@com_github_emirpasic_gods//sets/treeset",
         "@com_github_emirpasic_gods//utils",
+        "@com_github_smacker_go_tree_sitter//:go-tree-sitter",
+        "@com_github_smacker_go_tree_sitter//python",
+        "@org_golang_x_sync//errgroup",
     ],
 )
 
-py_binary(
-    name = "helper",
-    srcs = [
-        "__main__.py",
-        "parse.py",
-        "std_modules.py",
-    ],
-    # This is to make sure that the current directory is added to PYTHONPATH
-    imports = ["."],
-    main = "__main__.py",
-    visibility = ["//visibility:public"],
-)
-
-py_test(
-    name = "parse_test",
-    srcs = [
-        "parse.py",
-        "parse_test.py",
-    ],
-    imports = ["."],
-)
-
-filegroup(
-    name = "helper.zip",
-    srcs = [":helper"],
-    output_group = "python_zip_file",
+copy_file(
+    name = "stdlib_list",
+    src = select(
+        {
+            "@rules_python//python/config_settings:is_python_3.10": "@python_stdlib_list//:stdlib_list/lists/3.10.txt",
+            "@rules_python//python/config_settings:is_python_3.11": "@python_stdlib_list//:stdlib_list/lists/3.11.txt",
+            "@rules_python//python/config_settings:is_python_3.12": "@python_stdlib_list//:stdlib_list/lists/3.12.txt",
+            "@rules_python//python/config_settings:is_python_3.8": "@python_stdlib_list//:stdlib_list/lists/3.8.txt",
+            "@rules_python//python/config_settings:is_python_3.9": "@python_stdlib_list//:stdlib_list/lists/3.9.txt",
+            # This is the same behaviour as previously
+            "//conditions:default": "@python_stdlib_list//:stdlib_list/lists/3.11.txt",
+        },
+    ),
+    out = "stdlib_list.txt",
+    allow_symlink = True,
 )
 
 # gazelle:exclude testdata/
@@ -80,7 +72,6 @@
     srcs = ["python_test.go"],
     data = [
         ":gazelle_binary",
-        ":helper",
     ],
     test_dirs = glob(
         # Use this so that we don't need to manually maintain the list.
@@ -109,3 +100,15 @@
     srcs = glob(["**"]),
     visibility = ["//:__pkg__"],
 )
+
+go_test(
+    name = "default_test",
+    srcs = [
+        "file_parser_test.go",
+        "std_modules_test.go",
+    ],
+    embed = [":python"],
+    deps = [
+        "@com_github_stretchr_testify//assert",
+    ],
+)
diff --git a/gazelle/python/__main__.py b/gazelle/python/__main__.py
deleted file mode 100644
index 9974c66..0000000
--- a/gazelle/python/__main__.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# parse.py is a long-living program that communicates over STDIN and STDOUT.
-# STDIN receives parse requests, one per line. It outputs the parsed modules and
-# comments from all the files from each request.
-
-import sys
-
-import parse
-import std_modules
-
-if __name__ == "__main__":
-    if len(sys.argv) < 2:
-        sys.exit("Please provide subcommand, either parse or std_modules")
-    if sys.argv[1] == "parse":
-        sys.exit(parse.main(sys.stdin, sys.stdout))
-    elif sys.argv[1] == "std_modules":
-        sys.exit(std_modules.main(sys.stdin, sys.stdout))
-    else:
-        sys.exit("Unknown subcommand: " + sys.argv[1])
diff --git a/gazelle/python/configure.go b/gazelle/python/configure.go
index 69d2762..a369a64 100644
--- a/gazelle/python/configure.go
+++ b/gazelle/python/configure.go
@@ -25,6 +25,7 @@
 
 	"github.com/bazelbuild/bazel-gazelle/config"
 	"github.com/bazelbuild/bazel-gazelle/rule"
+	"github.com/bmatcuk/doublestar/v4"
 
 	"github.com/bazelbuild/rules_python/gazelle/manifest"
 	"github.com/bazelbuild/rules_python/gazelle/pythonconfig"
@@ -60,9 +61,15 @@
 		pythonconfig.ValidateImportStatementsDirective,
 		pythonconfig.GenerationMode,
 		pythonconfig.GenerationModePerFileIncludeInit,
+		pythonconfig.GenerationModePerPackageRequireTestEntryPoint,
 		pythonconfig.LibraryNamingConvention,
 		pythonconfig.BinaryNamingConvention,
 		pythonconfig.TestNamingConvention,
+		pythonconfig.DefaultVisibilty,
+		pythonconfig.Visibility,
+		pythonconfig.TestFilePattern,
+		pythonconfig.LabelConvention,
+		pythonconfig.LabelNormalization,
 	}
 }
 
@@ -118,6 +125,7 @@
 			}
 		case pythonconfig.PythonRootDirective:
 			config.SetPythonProjectRoot(rel)
+			config.SetDefaultVisibility([]string{fmt.Sprintf(pythonconfig.DefaultVisibilityFmtString, rel)})
 		case pythonconfig.PythonManifestFileNameDirective:
 			gazelleManifestFilename = strings.TrimSpace(d.Value)
 		case pythonconfig.IgnoreFilesDirective:
@@ -156,12 +164,66 @@
 				log.Fatal(err)
 			}
 			config.SetPerFileGenerationIncludeInit(v)
+		case pythonconfig.GenerationModePerPackageRequireTestEntryPoint:
+			v, err := strconv.ParseBool(strings.TrimSpace(d.Value))
+			if err != nil {
+				log.Printf("invalid value for gazelle:%s in %q: %q",
+					pythonconfig.GenerationModePerPackageRequireTestEntryPoint, rel, d.Value)
+			} else {
+				config.SetPerPackageGenerationRequireTestEntryPoint(v)
+			}
 		case pythonconfig.LibraryNamingConvention:
 			config.SetLibraryNamingConvention(strings.TrimSpace(d.Value))
 		case pythonconfig.BinaryNamingConvention:
 			config.SetBinaryNamingConvention(strings.TrimSpace(d.Value))
 		case pythonconfig.TestNamingConvention:
 			config.SetTestNamingConvention(strings.TrimSpace(d.Value))
+		case pythonconfig.DefaultVisibilty:
+			switch directiveArg := strings.TrimSpace(d.Value); directiveArg {
+			case "NONE":
+				config.SetDefaultVisibility([]string{})
+			case "DEFAULT":
+				pythonProjectRoot := config.PythonProjectRoot()
+				defaultVisibility := fmt.Sprintf(pythonconfig.DefaultVisibilityFmtString, pythonProjectRoot)
+				config.SetDefaultVisibility([]string{defaultVisibility})
+			default:
+				// Handle injecting the python root. Assume that the user used the
+				// exact string "$python_root$".
+				labels := strings.ReplaceAll(directiveArg, "$python_root$", config.PythonProjectRoot())
+				config.SetDefaultVisibility(strings.Split(labels, ","))
+			}
+		case pythonconfig.Visibility:
+			labels := strings.ReplaceAll(strings.TrimSpace(d.Value), "$python_root$", config.PythonProjectRoot())
+			config.AppendVisibility(labels)
+		case pythonconfig.TestFilePattern:
+			value := strings.TrimSpace(d.Value)
+			if value == "" {
+				log.Fatal("directive 'python_test_file_pattern' requires a value")
+			}
+			globStrings := strings.Split(value, ",")
+			for _, g := range globStrings {
+				if !doublestar.ValidatePattern(g) {
+					log.Fatalf("invalid glob pattern '%s'", g)
+				}
+			}
+			config.SetTestFilePattern(globStrings)
+		case pythonconfig.LabelConvention:
+			value := strings.TrimSpace(d.Value)
+			if value == "" {
+				log.Fatalf("directive '%s' requires a value", pythonconfig.LabelConvention)
+			}
+			config.SetLabelConvention(value)
+		case pythonconfig.LabelNormalization:
+			switch directiveArg := strings.ToLower(strings.TrimSpace(d.Value)); directiveArg {
+			case "pep503":
+				config.SetLabelNormalization(pythonconfig.Pep503LabelNormalizationType)
+			case "none":
+				config.SetLabelNormalization(pythonconfig.NoLabelNormalizationType)
+			case "snake_case":
+				config.SetLabelNormalization(pythonconfig.SnakeCaseLabelNormalizationType)
+			default:
+				config.SetLabelNormalization(pythonconfig.DefaultLabelNormalizationType)
+			}
 		}
 	}
 
diff --git a/gazelle/python/extensions.bzl b/gazelle/python/extensions.bzl
new file mode 100644
index 0000000..8d339c0
--- /dev/null
+++ b/gazelle/python/extensions.bzl
@@ -0,0 +1,5 @@
+"python_stdlib_list module extension for use with bzlmod"
+
+load("//python/private:extensions.bzl", _python_stdlib_list = "python_stdlib_list")
+
+python_stdlib_list = _python_stdlib_list
diff --git a/gazelle/python/file_parser.go b/gazelle/python/file_parser.go
new file mode 100644
index 0000000..a2b22c2
--- /dev/null
+++ b/gazelle/python/file_parser.go
@@ -0,0 +1,201 @@
+// Copyright 2023 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package python
+
+import (
+	"context"
+	"fmt"
+	"os"
+	"path/filepath"
+	"strings"
+
+	sitter "github.com/smacker/go-tree-sitter"
+	"github.com/smacker/go-tree-sitter/python"
+)
+
+const (
+	sitterNodeTypeString              = "string"
+	sitterNodeTypeComment             = "comment"
+	sitterNodeTypeIdentifier          = "identifier"
+	sitterNodeTypeDottedName          = "dotted_name"
+	sitterNodeTypeIfStatement         = "if_statement"
+	sitterNodeTypeAliasedImport       = "aliased_import"
+	sitterNodeTypeWildcardImport      = "wildcard_import"
+	sitterNodeTypeImportStatement     = "import_statement"
+	sitterNodeTypeComparisonOperator  = "comparison_operator"
+	sitterNodeTypeImportFromStatement = "import_from_statement"
+)
+
+type ParserOutput struct {
+	FileName string
+	Modules  []module
+	Comments []comment
+	HasMain  bool
+}
+
+type FileParser struct {
+	code        []byte
+	relFilepath string
+	output      ParserOutput
+}
+
+func NewFileParser() *FileParser {
+	return &FileParser{}
+}
+
+func ParseCode(code []byte) (*sitter.Node, error) {
+	parser := sitter.NewParser()
+	parser.SetLanguage(python.GetLanguage())
+
+	tree, err := parser.ParseCtx(context.Background(), nil, code)
+	if err != nil {
+		return nil, err
+	}
+
+	return tree.RootNode(), nil
+}
+
+func (p *FileParser) parseMain(ctx context.Context, node *sitter.Node) bool {
+	for i := 0; i < int(node.ChildCount()); i++ {
+		if err := ctx.Err(); err != nil {
+			return false
+		}
+		child := node.Child(i)
+		if child.Type() == sitterNodeTypeIfStatement &&
+			child.Child(1).Type() == sitterNodeTypeComparisonOperator && child.Child(1).Child(1).Type() == "==" {
+			statement := child.Child(1)
+			a, b := statement.Child(0), statement.Child(2)
+			// convert "'__main__' == __name__" to "__name__ == '__main__'"
+			if b.Type() == sitterNodeTypeIdentifier {
+				a, b = b, a
+			}
+			if a.Type() == sitterNodeTypeIdentifier && a.Content(p.code) == "__name__" &&
+				// at github.com/smacker/go-tree-sitter@latest (after v0.0.0-20240422154435-0628b34cbf9c we used)
+				// "__main__" is the second child of b. But now, it isn't.
+				// we cannot use the latest go-tree-sitter because of the top level reference in scanner.c.
+				// https://github.com/smacker/go-tree-sitter/blob/04d6b33fe138a98075210f5b770482ded024dc0f/python/scanner.c#L1
+				b.Type() == sitterNodeTypeString && string(p.code[b.StartByte()+1:b.EndByte()-1]) == "__main__" {
+				return true
+			}
+		}
+	}
+	return false
+}
+
+func parseImportStatement(node *sitter.Node, code []byte) (module, bool) {
+	switch node.Type() {
+	case sitterNodeTypeDottedName:
+		return module{
+			Name:       node.Content(code),
+			LineNumber: node.StartPoint().Row + 1,
+		}, true
+	case sitterNodeTypeAliasedImport:
+		return parseImportStatement(node.Child(0), code)
+	case sitterNodeTypeWildcardImport:
+		return module{
+			Name:       "*",
+			LineNumber: node.StartPoint().Row + 1,
+		}, true
+	}
+	return module{}, false
+}
+
+func (p *FileParser) parseImportStatements(node *sitter.Node) bool {
+	if node.Type() == sitterNodeTypeImportStatement {
+		for j := 1; j < int(node.ChildCount()); j++ {
+			m, ok := parseImportStatement(node.Child(j), p.code)
+			if !ok {
+				continue
+			}
+			m.Filepath = p.relFilepath
+			if strings.HasPrefix(m.Name, ".") {
+				continue
+			}
+			p.output.Modules = append(p.output.Modules, m)
+		}
+	} else if node.Type() == sitterNodeTypeImportFromStatement {
+		from := node.Child(1).Content(p.code)
+		if strings.HasPrefix(from, ".") {
+			return true
+		}
+		for j := 3; j < int(node.ChildCount()); j++ {
+			m, ok := parseImportStatement(node.Child(j), p.code)
+			if !ok {
+				continue
+			}
+			m.Filepath = p.relFilepath
+			m.From = from
+			m.Name = fmt.Sprintf("%s.%s", from, m.Name)
+			p.output.Modules = append(p.output.Modules, m)
+		}
+	} else {
+		return false
+	}
+	return true
+}
+
+func (p *FileParser) parseComments(node *sitter.Node) bool {
+	if node.Type() == sitterNodeTypeComment {
+		p.output.Comments = append(p.output.Comments, comment(node.Content(p.code)))
+		return true
+	}
+	return false
+}
+
+func (p *FileParser) SetCodeAndFile(code []byte, relPackagePath, filename string) {
+	p.code = code
+	p.relFilepath = filepath.Join(relPackagePath, filename)
+	p.output.FileName = filename
+}
+
+func (p *FileParser) parse(ctx context.Context, node *sitter.Node) {
+	if node == nil {
+		return
+	}
+	for i := 0; i < int(node.ChildCount()); i++ {
+		if err := ctx.Err(); err != nil {
+			return
+		}
+		child := node.Child(i)
+		if p.parseImportStatements(child) {
+			continue
+		}
+		if p.parseComments(child) {
+			continue
+		}
+		p.parse(ctx, child)
+	}
+}
+
+func (p *FileParser) Parse(ctx context.Context) (*ParserOutput, error) {
+	rootNode, err := ParseCode(p.code)
+	if err != nil {
+		return nil, err
+	}
+
+	p.output.HasMain = p.parseMain(ctx, rootNode)
+
+	p.parse(ctx, rootNode)
+	return &p.output, nil
+}
+
+func (p *FileParser) ParseFile(ctx context.Context, repoRoot, relPackagePath, filename string) (*ParserOutput, error) {
+	code, err := os.ReadFile(filepath.Join(repoRoot, relPackagePath, filename))
+	if err != nil {
+		return nil, err
+	}
+	p.SetCodeAndFile(code, relPackagePath, filename)
+	return p.Parse(ctx)
+}
diff --git a/gazelle/python/file_parser_test.go b/gazelle/python/file_parser_test.go
new file mode 100644
index 0000000..3682cff
--- /dev/null
+++ b/gazelle/python/file_parser_test.go
@@ -0,0 +1,256 @@
+// Copyright 2023 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package python
+
+import (
+	"context"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestParseImportStatements(t *testing.T) {
+	t.Parallel()
+	units := []struct {
+		name     string
+		code     string
+		filepath string
+		result   []module
+	}{
+		{
+			name:     "not has import",
+			code:     "a = 1\nb = 2",
+			filepath: "",
+			result:   nil,
+		},
+		{
+			name:     "has import",
+			code:     "import unittest\nimport os.path\nfrom foo.bar import abc.xyz",
+			filepath: "abc.py",
+			result: []module{
+				{
+					Name:       "unittest",
+					LineNumber: 1,
+					Filepath:   "abc.py",
+					From:       "",
+				},
+				{
+					Name:       "os.path",
+					LineNumber: 2,
+					Filepath:   "abc.py",
+					From:       "",
+				},
+				{
+					Name:       "foo.bar.abc.xyz",
+					LineNumber: 3,
+					Filepath:   "abc.py",
+					From:       "foo.bar",
+				},
+			},
+		},
+		{
+			name: "has import in def",
+			code: `def foo():
+	import unittest
+`,
+			filepath: "abc.py",
+			result: []module{
+				{
+					Name:       "unittest",
+					LineNumber: 2,
+					Filepath:   "abc.py",
+					From:       "",
+				},
+			},
+		},
+		{
+			name:     "invalid syntax",
+			code:     "import os\nimport",
+			filepath: "abc.py",
+			result: []module{
+				{
+					Name:       "os",
+					LineNumber: 1,
+					Filepath:   "abc.py",
+					From:       "",
+				},
+			},
+		},
+		{
+			name:     "import as",
+			code:     "import os as b\nfrom foo import bar as c# 123",
+			filepath: "abc.py",
+			result: []module{
+				{
+					Name:       "os",
+					LineNumber: 1,
+					Filepath:   "abc.py",
+					From:       "",
+				},
+				{
+					Name:       "foo.bar",
+					LineNumber: 2,
+					Filepath:   "abc.py",
+					From:       "foo",
+				},
+			},
+		},
+		// align to https://docs.python.org/3/reference/simple_stmts.html#index-34
+		{
+			name: "complex import",
+			code: "from unittest import *\nfrom foo import (bar as c, baz, qux as d)\nfrom . import abc",
+			result: []module{
+				{
+					Name:       "unittest.*",
+					LineNumber: 1,
+					From:       "unittest",
+				},
+				{
+					Name:       "foo.bar",
+					LineNumber: 2,
+					From:       "foo",
+				},
+				{
+					Name:       "foo.baz",
+					LineNumber: 2,
+					From:       "foo",
+				},
+				{
+					Name:       "foo.qux",
+					LineNumber: 2,
+					From:       "foo",
+				},
+			},
+		},
+	}
+	for _, u := range units {
+		t.Run(u.name, func(t *testing.T) {
+			p := NewFileParser()
+			code := []byte(u.code)
+			p.SetCodeAndFile(code, "", u.filepath)
+			output, err := p.Parse(context.Background())
+			assert.NoError(t, err)
+			assert.Equal(t, u.result, output.Modules)
+		})
+	}
+}
+
+func TestParseComments(t *testing.T) {
+	t.Parallel()
+	units := []struct {
+		name   string
+		code   string
+		result []comment
+	}{
+		{
+			name:   "not has comment",
+			code:   "a = 1\nb = 2",
+			result: nil,
+		},
+		{
+			name:   "has comment",
+			code:   "# a = 1\n# b = 2",
+			result: []comment{"# a = 1", "# b = 2"},
+		},
+		{
+			name:   "has comment in if",
+			code:   "if True:\n  # a = 1\n  # b = 2",
+			result: []comment{"# a = 1", "# b = 2"},
+		},
+		{
+			name:   "has comment inline",
+			code:   "import os# 123\nfrom pathlib import Path as b#456",
+			result: []comment{"# 123", "#456"},
+		},
+	}
+	for _, u := range units {
+		t.Run(u.name, func(t *testing.T) {
+			p := NewFileParser()
+			code := []byte(u.code)
+			p.SetCodeAndFile(code, "", "")
+			output, err := p.Parse(context.Background())
+			assert.NoError(t, err)
+			assert.Equal(t, u.result, output.Comments)
+		})
+	}
+}
+
+func TestParseMain(t *testing.T) {
+	t.Parallel()
+	units := []struct {
+		name   string
+		code   string
+		result bool
+	}{
+		{
+			name:   "not has main",
+			code:   "a = 1\nb = 2",
+			result: false,
+		},
+		{
+			name: "has main in function",
+			code: `def foo():
+	if __name__ == "__main__":
+		a = 3
+`,
+			result: false,
+		},
+		{
+			name: "has main",
+			code: `
+import unittest
+
+from lib import main
+
+
+class ExampleTest(unittest.TestCase):
+    def test_main(self):
+        self.assertEqual(
+            "",
+            main([["A", 1], ["B", 2]]),
+        )
+
+
+if __name__ == "__main__":
+    unittest.main()
+`,
+			result: true,
+		},
+	}
+	for _, u := range units {
+		t.Run(u.name, func(t *testing.T) {
+			p := NewFileParser()
+			code := []byte(u.code)
+			p.SetCodeAndFile(code, "", "")
+			output, err := p.Parse(context.Background())
+			assert.NoError(t, err)
+			assert.Equal(t, u.result, output.HasMain)
+		})
+	}
+}
+
+func TestParseFull(t *testing.T) {
+	p := NewFileParser()
+	code := []byte(`from bar import abc`)
+	p.SetCodeAndFile(code, "foo", "a.py")
+	output, err := p.Parse(context.Background())
+	assert.NoError(t, err)
+	assert.Equal(t, ParserOutput{
+		Modules:  []module{{Name: "bar.abc", LineNumber: 1, Filepath: "foo/a.py", From: "bar"}},
+		Comments: nil,
+		HasMain:  false,
+		FileName: "a.py",
+	}, *output)
+}
diff --git a/gazelle/python/generate.go b/gazelle/python/generate.go
index 95f5396..c563b47 100644
--- a/gazelle/python/generate.go
+++ b/gazelle/python/generate.go
@@ -27,11 +27,12 @@
 	"github.com/bazelbuild/bazel-gazelle/label"
 	"github.com/bazelbuild/bazel-gazelle/language"
 	"github.com/bazelbuild/bazel-gazelle/rule"
-	"github.com/bazelbuild/rules_python/gazelle/pythonconfig"
-	"github.com/bmatcuk/doublestar"
+	"github.com/bmatcuk/doublestar/v4"
 	"github.com/emirpasic/gods/lists/singlylinkedlist"
 	"github.com/emirpasic/gods/sets/treeset"
 	godsutils "github.com/emirpasic/gods/utils"
+
+	"github.com/bazelbuild/rules_python/gazelle/pythonconfig"
 )
 
 const (
@@ -54,6 +55,17 @@
 	return kind
 }
 
+func matchesAnyGlob(s string, globs []string) bool {
+	// This function assumes that the globs have already been validated. If a glob is
+	// invalid, it's considered a non-match and we move on to the next pattern.
+	for _, g := range globs {
+		if ok, _ := doublestar.Match(g, s); ok {
+			return true
+		}
+	}
+	return false
+}
+
 // GenerateRules extracts build metadata from source files in a directory.
 // GenerateRules is called in each directory where an update is requested
 // in depth-first post-order.
@@ -100,6 +112,8 @@
 	hasPyTestEntryPointTarget := false
 	hasConftestFile := false
 
+	testFileGlobs := cfg.TestFilePattern()
+
 	for _, f := range args.RegularFiles {
 		if cfg.IgnoresFile(filepath.Base(f)) {
 			continue
@@ -113,7 +127,7 @@
 				hasPyTestEntryPointFile = true
 			} else if f == conftestFilename {
 				hasConftestFile = true
-			} else if strings.HasSuffix(f, "_test.py") || strings.HasPrefix(f, "test_") {
+			} else if matchesAnyGlob(f, testFileGlobs) {
 				pyTestFilenames.Add(f)
 			} else {
 				pyLibraryFilenames.Add(f)
@@ -195,7 +209,7 @@
 							}
 						}
 						baseName := filepath.Base(path)
-						if strings.HasSuffix(baseName, "_test.py") || strings.HasPrefix(baseName, "test_") {
+						if matchesAnyGlob(baseName, testFileGlobs) {
 							pyTestFilenames.Add(srcPath)
 						} else {
 							pyLibraryFilenames.Add(srcPath)
@@ -212,7 +226,7 @@
 	}
 
 	parser := newPython3Parser(args.Config.RepoRoot, args.Rel, cfg.IgnoresDependency)
-	visibility := fmt.Sprintf("//%s:__subpackages__", pythonProjectRoot)
+	visibility := cfg.Visibility()
 
 	var result language.GenerateResult
 	result.Gen = make([]*rule.Rule, 0)
@@ -220,28 +234,22 @@
 	collisionErrors := singlylinkedlist.New()
 
 	appendPyLibrary := func(srcs *treeset.Set, pyLibraryTargetName string) {
-		allDeps, mainModules, err := parser.parse(srcs)
+		allDeps, mainModules, annotations, err := parser.parse(srcs)
 		if err != nil {
 			log.Fatalf("ERROR: %v\n", err)
 		}
 
-		// Check if a target with the same name we are generating already
-		// exists, and if it is of a different kind from the one we are
-		// generating. If so, we have to throw an error since Gazelle won't
-		// generate it correctly.
-		if err := ensureNoCollision(args.File, pyLibraryTargetName, actualPyLibraryKind); err != nil {
-			fqTarget := label.New("", args.Rel, pyLibraryTargetName)
-			err := fmt.Errorf("failed to generate target %q of kind %q: %w. "+
-				"Use the '# gazelle:%s' directive to change the naming convention.",
-				fqTarget.String(), actualPyLibraryKind, err, pythonconfig.LibraryNamingConvention)
-			collisionErrors.Add(err)
-		}
-
 		if !hasPyBinaryEntryPointFile {
 			// Creating one py_binary target per main module when __main__.py doesn't exist.
 			mainFileNames := make([]string, 0, len(mainModules))
 			for name := range mainModules {
 				mainFileNames = append(mainFileNames, name)
+
+				// Remove the file from srcs if we're doing per-file library generation so
+				// that we don't also generate a py_library target for it.
+				if cfg.PerFileGeneration() {
+					srcs.Remove(name)
+				}
 			}
 			sort.Strings(mainFileNames)
 			for _, filename := range mainFileNames {
@@ -256,21 +264,56 @@
 					addVisibility(visibility).
 					addSrc(filename).
 					addModuleDependencies(mainModules[filename]).
+					addResolvedDependencies(annotations.includeDeps).
 					generateImportsAttribute().build()
 				result.Gen = append(result.Gen, pyBinary)
 				result.Imports = append(result.Imports, pyBinary.PrivateAttr(config.GazelleImportsKey))
 			}
 		}
 
+		// If we're doing per-file generation, srcs could be empty at this point, meaning we shouldn't make a py_library.
+		// If there is already a package named py_library target before, we should generate an empty py_library.
+		if srcs.Empty() {
+			if args.File == nil {
+				return
+			}
+			generateEmptyLibrary := false
+			for _, r := range args.File.Rules {
+				if r.Kind() == actualPyLibraryKind && r.Name() == pyLibraryTargetName {
+					generateEmptyLibrary = true
+				}
+			}
+			if !generateEmptyLibrary {
+				return
+			}
+		}
+
+		// Check if a target with the same name we are generating already
+		// exists, and if it is of a different kind from the one we are
+		// generating. If so, we have to throw an error since Gazelle won't
+		// generate it correctly.
+		if err := ensureNoCollision(args.File, pyLibraryTargetName, actualPyLibraryKind); err != nil {
+			fqTarget := label.New("", args.Rel, pyLibraryTargetName)
+			err := fmt.Errorf("failed to generate target %q of kind %q: %w. "+
+				"Use the '# gazelle:%s' directive to change the naming convention.",
+				fqTarget.String(), actualPyLibraryKind, err, pythonconfig.LibraryNamingConvention)
+			collisionErrors.Add(err)
+		}
+
 		pyLibrary := newTargetBuilder(pyLibraryKind, pyLibraryTargetName, pythonProjectRoot, args.Rel, pyFileNames).
 			addVisibility(visibility).
 			addSrcs(srcs).
 			addModuleDependencies(allDeps).
+			addResolvedDependencies(annotations.includeDeps).
 			generateImportsAttribute().
 			build()
 
-		result.Gen = append(result.Gen, pyLibrary)
-		result.Imports = append(result.Imports, pyLibrary.PrivateAttr(config.GazelleImportsKey))
+		if pyLibrary.IsEmpty(py.Kinds()[pyLibrary.Kind()]) {
+			result.Empty = append(result.Gen, pyLibrary)
+		} else {
+			result.Gen = append(result.Gen, pyLibrary)
+			result.Imports = append(result.Imports, pyLibrary.PrivateAttr(config.GazelleImportsKey))
+		}
 	}
 	if cfg.PerFileGeneration() {
 		hasInit, nonEmptyInit := hasLibraryEntrypointFile(args.Dir)
@@ -285,12 +328,12 @@
 			}
 			appendPyLibrary(srcs, pyLibraryTargetName)
 		})
-	} else if !pyLibraryFilenames.Empty() {
+	} else {
 		appendPyLibrary(pyLibraryFilenames, cfg.RenderLibraryName(packageName))
 	}
 
 	if hasPyBinaryEntryPointFile {
-		deps, _, err := parser.parseSingle(pyBinaryEntrypointFilename)
+		deps, _, annotations, err := parser.parseSingle(pyBinaryEntrypointFilename)
 		if err != nil {
 			log.Fatalf("ERROR: %v\n", err)
 		}
@@ -314,6 +357,7 @@
 			addVisibility(visibility).
 			addSrc(pyBinaryEntrypointFilename).
 			addModuleDependencies(deps).
+			addResolvedDependencies(annotations.includeDeps).
 			generateImportsAttribute()
 
 		pyBinary := pyBinaryTarget.build()
@@ -324,7 +368,7 @@
 
 	var conftest *rule.Rule
 	if hasConftestFile {
-		deps, _, err := parser.parseSingle(conftestFilename)
+		deps, _, annotations, err := parser.parseSingle(conftestFilename)
 		if err != nil {
 			log.Fatalf("ERROR: %v\n", err)
 		}
@@ -343,6 +387,7 @@
 		conftestTarget := newTargetBuilder(pyLibraryKind, conftestTargetname, pythonProjectRoot, args.Rel, pyFileNames).
 			addSrc(conftestFilename).
 			addModuleDependencies(deps).
+			addResolvedDependencies(annotations.includeDeps).
 			addVisibility(visibility).
 			setTestonly().
 			generateImportsAttribute()
@@ -355,7 +400,7 @@
 
 	var pyTestTargets []*targetBuilder
 	newPyTestTargetBuilder := func(srcs *treeset.Set, pyTestTargetName string) *targetBuilder {
-		deps, _, err := parser.parse(srcs)
+		deps, _, annotations, err := parser.parse(srcs)
 		if err != nil {
 			log.Fatalf("ERROR: %v\n", err)
 		}
@@ -373,29 +418,35 @@
 		return newTargetBuilder(pyTestKind, pyTestTargetName, pythonProjectRoot, args.Rel, pyFileNames).
 			addSrcs(srcs).
 			addModuleDependencies(deps).
+			addResolvedDependencies(annotations.includeDeps).
 			generateImportsAttribute()
 	}
-	if (hasPyTestEntryPointFile || hasPyTestEntryPointTarget || cfg.CoarseGrainedGeneration()) && !cfg.PerFileGeneration() {
+	if (!cfg.PerPackageGenerationRequireTestEntryPoint() || hasPyTestEntryPointFile || hasPyTestEntryPointTarget || cfg.CoarseGrainedGeneration()) && !cfg.PerFileGeneration() {
 		// Create one py_test target per package
 		if hasPyTestEntryPointFile {
 			// Only add the pyTestEntrypointFilename to the pyTestFilenames if
 			// the file exists on disk.
 			pyTestFilenames.Add(pyTestEntrypointFilename)
 		}
-		pyTestTargetName := cfg.RenderTestName(packageName)
-		pyTestTarget := newPyTestTargetBuilder(pyTestFilenames, pyTestTargetName)
+		if hasPyTestEntryPointTarget || !pyTestFilenames.Empty() {
+			pyTestTargetName := cfg.RenderTestName(packageName)
+			pyTestTarget := newPyTestTargetBuilder(pyTestFilenames, pyTestTargetName)
 
-		if hasPyTestEntryPointTarget {
-			entrypointTarget := fmt.Sprintf(":%s", pyTestEntrypointTargetname)
-			main := fmt.Sprintf(":%s", pyTestEntrypointFilename)
-			pyTestTarget.
-				addSrc(entrypointTarget).
-				addResolvedDependency(entrypointTarget).
-				setMain(main)
-		} else {
-			pyTestTarget.setMain(pyTestEntrypointFilename)
+			if hasPyTestEntryPointTarget {
+				entrypointTarget := fmt.Sprintf(":%s", pyTestEntrypointTargetname)
+				main := fmt.Sprintf(":%s", pyTestEntrypointFilename)
+				pyTestTarget.
+					addSrc(entrypointTarget).
+					addResolvedDependency(entrypointTarget).
+					setMain(main)
+			} else if hasPyTestEntryPointFile {
+				pyTestTarget.setMain(pyTestEntrypointFilename)
+			} /* else:
+			main is not set, assuming there is a test file with the same name
+			as the target name, or there is a macro wrapping py_test and setting its main attribute.
+			*/
+			pyTestTargets = append(pyTestTargets, pyTestTarget)
 		}
-		pyTestTargets = append(pyTestTargets, pyTestTarget)
 	} else {
 		// Create one py_test target per file
 		pyTestFilenames.Each(func(index int, testFile interface{}) {
diff --git a/gazelle/python/kinds.go b/gazelle/python/kinds.go
index 941b45b..a948337 100644
--- a/gazelle/python/kinds.go
+++ b/gazelle/python/kinds.go
@@ -34,11 +34,10 @@
 	pyBinaryKind: {
 		MatchAny: true,
 		NonEmptyAttrs: map[string]bool{
-			"deps":       true,
-			"main":       true,
-			"srcs":       true,
-			"imports":    true,
-			"visibility": true,
+			"deps":    true,
+			"main":    true,
+			"srcs":    true,
+			"imports": true,
 		},
 		SubstituteAttrs: map[string]bool{},
 		MergeableAttrs: map[string]bool{
@@ -52,10 +51,9 @@
 		MatchAny:   false,
 		MatchAttrs: []string{"srcs"},
 		NonEmptyAttrs: map[string]bool{
-			"deps":       true,
-			"srcs":       true,
-			"imports":    true,
-			"visibility": true,
+			"deps":    true,
+			"srcs":    true,
+			"imports": true,
 		},
 		SubstituteAttrs: map[string]bool{},
 		MergeableAttrs: map[string]bool{
@@ -68,11 +66,10 @@
 	pyTestKind: {
 		MatchAny: false,
 		NonEmptyAttrs: map[string]bool{
-			"deps":       true,
-			"main":       true,
-			"srcs":       true,
-			"imports":    true,
-			"visibility": true,
+			"deps":    true,
+			"main":    true,
+			"srcs":    true,
+			"imports": true,
 		},
 		SubstituteAttrs: map[string]bool{},
 		MergeableAttrs: map[string]bool{
diff --git a/gazelle/python/language.go b/gazelle/python/language.go
index 568ac92..56eb97b 100644
--- a/gazelle/python/language.go
+++ b/gazelle/python/language.go
@@ -23,7 +23,6 @@
 type Python struct {
 	Configurer
 	Resolver
-	LifeCycleManager
 }
 
 // NewLanguage initializes a new Python that satisfies the language.Language
diff --git a/gazelle/python/lifecycle.go b/gazelle/python/lifecycle.go
deleted file mode 100644
index 6d628e9..0000000
--- a/gazelle/python/lifecycle.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2023 The Bazel Authors. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package python
-
-import (
-	"context"
-	_ "embed"
-	"github.com/bazelbuild/bazel-gazelle/language"
-	"log"
-	"os"
-)
-
-var (
-	//go:embed helper.zip
-	helperZip  []byte
-	helperPath string
-)
-
-type LifeCycleManager struct {
-	language.BaseLifecycleManager
-	pyzFilePath string
-}
-
-func (l *LifeCycleManager) Before(ctx context.Context) {
-	helperPath = os.Getenv("GAZELLE_PYTHON_HELPER")
-	if helperPath == "" {
-		pyzFile, err := os.CreateTemp("", "python_zip_")
-		if err != nil {
-			log.Fatalf("failed to write parser zip: %v", err)
-		}
-		defer pyzFile.Close()
-		helperPath = pyzFile.Name()
-		l.pyzFilePath = helperPath
-		if _, err := pyzFile.Write(helperZip); err != nil {
-			log.Fatalf("cannot write %q: %v", helperPath, err)
-		}
-	}
-	startParserProcess(ctx)
-	startStdModuleProcess(ctx)
-}
-
-func (l *LifeCycleManager) DoneGeneratingRules() {
-	shutdownParserProcess()
-}
-
-func (l *LifeCycleManager) AfterResolvingDeps(ctx context.Context) {
-	shutdownStdModuleProcess()
-	if l.pyzFilePath != "" {
-		os.Remove(l.pyzFilePath)
-	}
-}
diff --git a/gazelle/python/parse.py b/gazelle/python/parse.py
deleted file mode 100644
index daa6d2b..0000000
--- a/gazelle/python/parse.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# parse.py is a long-living program that communicates over STDIN and STDOUT.
-# STDIN receives parse requests, one per line. It outputs the parsed modules and
-# comments from all the files from each request.
-
-import ast
-import concurrent.futures
-import json
-import os
-import sys
-from io import BytesIO
-from tokenize import COMMENT, NAME, OP, STRING, tokenize
-
-
-def parse_import_statements(content, filepath):
-    modules = list()
-    tree = ast.parse(content, filename=filepath)
-    for node in ast.walk(tree):
-        if isinstance(node, ast.Import):
-            for subnode in node.names:
-                module = {
-                    "name": subnode.name,
-                    "lineno": node.lineno,
-                    "filepath": filepath,
-                    "from": "",
-                }
-                modules.append(module)
-        elif isinstance(node, ast.ImportFrom) and node.level == 0:
-            for subnode in node.names:
-                module = {
-                    "name": f"{node.module}.{subnode.name}",
-                    "lineno": node.lineno,
-                    "filepath": filepath,
-                    "from": node.module,
-                }
-                modules.append(module)
-    return modules
-
-
-def parse_comments(content):
-    comments = list()
-    g = tokenize(BytesIO(content.encode("utf-8")).readline)
-    for toknum, tokval, _, _, _ in g:
-        if toknum == COMMENT:
-            comments.append(tokval)
-    return comments
-
-
-def parse_main(content):
-    g = tokenize(BytesIO(content.encode("utf-8")).readline)
-    for token_type, token_val, start, _, _ in g:
-        if token_type != NAME or token_val != "if" or start[1] != 0:
-            continue
-        try:
-            token_type, token_val, start, _, _ = next(g)
-            if token_type != NAME or token_val != "__name__":
-                continue
-            token_type, token_val, start, _, _ = next(g)
-            if token_type != OP or token_val != "==":
-                continue
-            token_type, token_val, start, _, _ = next(g)
-            if token_type != STRING or token_val.strip("\"'") != '__main__':
-                continue
-            token_type, token_val, start, _, _ = next(g)
-            if token_type != OP or token_val != ":":
-                continue
-            return True
-        except StopIteration:
-            break
-    return False
-
-
-def parse(repo_root, rel_package_path, filename):
-    rel_filepath = os.path.join(rel_package_path, filename)
-    abs_filepath = os.path.join(repo_root, rel_filepath)
-    with open(abs_filepath, "r") as file:
-        content = file.read()
-        # From simple benchmarks, 2 workers gave the best performance here.
-        with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
-            modules_future = executor.submit(
-                parse_import_statements, content, rel_filepath
-            )
-            comments_future = executor.submit(parse_comments, content)
-            main_future = executor.submit(parse_main, content)
-        modules = modules_future.result()
-        comments = comments_future.result()
-        has_main = main_future.result()
-
-        output = {
-            "filename": filename,
-            "modules": modules,
-            "comments": comments,
-            "has_main": has_main,
-        }
-        return output
-
-
-def main(stdin, stdout):
-    with concurrent.futures.ProcessPoolExecutor() as executor:
-        for parse_request in stdin:
-            parse_request = json.loads(parse_request)
-            repo_root = parse_request["repo_root"]
-            rel_package_path = parse_request["rel_package_path"]
-            filenames = parse_request["filenames"]
-            outputs = list()
-            if len(filenames) == 1:
-                outputs.append(parse(repo_root, rel_package_path, filenames[0]))
-            else:
-                futures = [
-                    executor.submit(parse, repo_root, rel_package_path, filename)
-                    for filename in filenames
-                    if filename != ""
-                ]
-                for future in concurrent.futures.as_completed(futures):
-                    outputs.append(future.result())
-            print(json.dumps(outputs), end="", file=stdout, flush=True)
-            stdout.buffer.write(bytes([0]))
-            stdout.flush()
-
-
-if __name__ == "__main__":
-    exit(main(sys.stdin, sys.stdout))
diff --git a/gazelle/python/parse_test.py b/gazelle/python/parse_test.py
deleted file mode 100644
index 3ebded4..0000000
--- a/gazelle/python/parse_test.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import unittest
-import parse
-
-class TestParse(unittest.TestCase):
-    def test_not_has_main(self):
-        content = "a = 1\nb = 2"
-        self.assertFalse(parse.parse_main(content))
-
-    def test_has_main_in_function(self):
-        content = """
-def foo():
-    if __name__ == "__main__":
-        a = 3
-"""
-        self.assertFalse(parse.parse_main(content))
-
-    def test_has_main(self):
-        content = """
-import unittest
-
-from lib import main
-
-
-class ExampleTest(unittest.TestCase):
-    def test_main(self):
-        self.assertEqual(
-            "",
-            main([["A", 1], ["B", 2]]),
-        )
-
-
-if __name__ == "__main__":
-    unittest.main()
-"""
-        self.assertTrue(parse.parse_main(content))
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/gazelle/python/parser.go b/gazelle/python/parser.go
index 9b00b83..1b2a90d 100644
--- a/gazelle/python/parser.go
+++ b/gazelle/python/parser.go
@@ -15,65 +15,16 @@
 package python
 
 import (
-	"bufio"
 	"context"
 	_ "embed"
-	"encoding/json"
 	"fmt"
-	"io"
-	"log"
-	"os"
-	"os/exec"
 	"strings"
-	"sync"
 
 	"github.com/emirpasic/gods/sets/treeset"
 	godsutils "github.com/emirpasic/gods/utils"
+	"golang.org/x/sync/errgroup"
 )
 
-var (
-	parserCmd    *exec.Cmd
-	parserStdin  io.WriteCloser
-	parserStdout io.Reader
-	parserMutex  sync.Mutex
-)
-
-func startParserProcess(ctx context.Context) {
-	// due to #691, we need a system interpreter to boostrap, part of which is
-	// to locate the hermetic interpreter.
-	parserCmd = exec.CommandContext(ctx, "python3", helperPath, "parse")
-	parserCmd.Stderr = os.Stderr
-
-	stdin, err := parserCmd.StdinPipe()
-	if err != nil {
-		log.Printf("failed to initialize parser: %v\n", err)
-		os.Exit(1)
-	}
-	parserStdin = stdin
-
-	stdout, err := parserCmd.StdoutPipe()
-	if err != nil {
-		log.Printf("failed to initialize parser: %v\n", err)
-		os.Exit(1)
-	}
-	parserStdout = stdout
-
-	if err := parserCmd.Start(); err != nil {
-		log.Printf("failed to initialize parser: %v\n", err)
-		os.Exit(1)
-	}
-}
-
-func shutdownParserProcess() {
-	if err := parserStdin.Close(); err != nil {
-		fmt.Fprintf(os.Stderr, "error closing parser: %v", err)
-	}
-
-	if err := parserCmd.Wait(); err != nil {
-		log.Printf("failed to wait for parser: %v\n", err)
-	}
-}
-
 // python3Parser implements a parser for Python files that extracts the modules
 // as seen in the import statements.
 type python3Parser struct {
@@ -101,7 +52,7 @@
 
 // parseSingle parses a single Python file and returns the extracted modules
 // from the import statements as well as the parsed comments.
-func (p *python3Parser) parseSingle(pyFilename string) (*treeset.Set, map[string]*treeset.Set, error) {
+func (p *python3Parser) parseSingle(pyFilename string) (*treeset.Set, map[string]*treeset.Set, *annotations, error) {
 	pyFilenames := treeset.NewWith(godsutils.StringComparator)
 	pyFilenames.Add(pyFilename)
 	return p.parse(pyFilenames)
@@ -109,41 +60,43 @@
 
 // parse parses multiple Python files and returns the extracted modules from
 // the import statements as well as the parsed comments.
-func (p *python3Parser) parse(pyFilenames *treeset.Set) (*treeset.Set, map[string]*treeset.Set, error) {
-	parserMutex.Lock()
-	defer parserMutex.Unlock()
-
+func (p *python3Parser) parse(pyFilenames *treeset.Set) (*treeset.Set, map[string]*treeset.Set, *annotations, error) {
 	modules := treeset.NewWith(moduleComparator)
 
-	req := map[string]interface{}{
-		"repo_root":        p.repoRoot,
-		"rel_package_path": p.relPackagePath,
-		"filenames":        pyFilenames.Values(),
+	g, ctx := errgroup.WithContext(context.Background())
+	ch := make(chan struct{}, 6) // Limit the number of concurrent parses.
+	chRes := make(chan *ParserOutput, len(pyFilenames.Values()))
+	for _, v := range pyFilenames.Values() {
+		ch <- struct{}{}
+		g.Go(func(filename string) func() error {
+			return func() error {
+				defer func() {
+					<-ch
+				}()
+				res, err := NewFileParser().ParseFile(ctx, p.repoRoot, p.relPackagePath, filename)
+				if err != nil {
+					return err
+				}
+				chRes <- res
+				return nil
+			}
+		}(v.(string)))
 	}
-	encoder := json.NewEncoder(parserStdin)
-	if err := encoder.Encode(&req); err != nil {
-		return nil, nil, fmt.Errorf("failed to parse: %w", err)
+	if err := g.Wait(); err != nil {
+		return nil, nil, nil, err
 	}
-
-	reader := bufio.NewReader(parserStdout)
-	data, err := reader.ReadBytes(0)
-	if err != nil {
-		return nil, nil, fmt.Errorf("failed to parse: %w", err)
-	}
-	data = data[:len(data)-1]
-	var allRes []parserResponse
-	if err := json.Unmarshal(data, &allRes); err != nil {
-		return nil, nil, fmt.Errorf("failed to parse: %w", err)
-	}
-
-	mainModules := make(map[string]*treeset.Set, len(allRes))
-	for _, res := range allRes {
+	close(ch)
+	close(chRes)
+	mainModules := make(map[string]*treeset.Set, len(chRes))
+	allAnnotations := new(annotations)
+	allAnnotations.ignore = make(map[string]struct{})
+	for res := range chRes {
 		if res.HasMain {
 			mainModules[res.FileName] = treeset.NewWith(moduleComparator)
 		}
 		annotations, err := annotationsFromComments(res.Comments)
 		if err != nil {
-			return nil, nil, fmt.Errorf("failed to parse annotations: %w", err)
+			return nil, nil, nil, fmt.Errorf("failed to parse annotations: %w", err)
 		}
 
 		for _, m := range res.Modules {
@@ -164,24 +117,32 @@
 				mainModules[res.FileName].Add(m)
 			}
 		}
+
+		// Collect all annotations from each file into a single annotations struct.
+		for k, v := range annotations.ignore {
+			allAnnotations.ignore[k] = v
+		}
+		allAnnotations.includeDeps = append(allAnnotations.includeDeps, annotations.includeDeps...)
 	}
 
-	return modules, mainModules, nil
+	allAnnotations.includeDeps = removeDupesFromStringTreeSetSlice(allAnnotations.includeDeps)
+
+	return modules, mainModules, allAnnotations, nil
 }
 
-// parserResponse represents a response returned by the parser.py for a given
-// parsed Python module.
-type parserResponse struct {
-	// FileName of the parsed module
-	FileName string
-	// The modules depended by the parsed module.
-	Modules []module `json:"modules"`
-	// The comments contained in the parsed module. This contains the
-	// annotations as they are comments in the Python module.
-	Comments []comment `json:"comments"`
-	// HasMain indicates whether the Python module has `if __name == "__main__"`
-	// at the top level
-	HasMain bool `json:"has_main"`
+// removeDupesFromStringTreeSetSlice takes a []string, makes a set out of the
+// elements, and then returns a new []string with all duplicates removed. Order
+// is preserved.
+func removeDupesFromStringTreeSetSlice(array []string) []string {
+	s := treeset.NewWith(godsutils.StringComparator)
+	for _, v := range array {
+		s.Add(v)
+	}
+	dedupe := make([]string, s.Size())
+	for i, v := range s.Values() {
+		dedupe[i] = fmt.Sprint(v)
+	}
+	return dedupe
 }
 
 // module represents a fully-qualified, dot-separated, Python module as seen on
@@ -211,7 +172,8 @@
 	// The Gazelle annotation prefix.
 	annotationPrefix string = "gazelle:"
 	// The ignore annotation kind. E.g. '# gazelle:ignore <module_name>'.
-	annotationKindIgnore annotationKind = "ignore"
+	annotationKindIgnore     annotationKind = "ignore"
+	annotationKindIncludeDep annotationKind = "include_dep"
 )
 
 // comment represents a Python comment.
@@ -247,12 +209,15 @@
 type annotations struct {
 	// The parsed modules to be ignored by Gazelle.
 	ignore map[string]struct{}
+	// Labels that Gazelle should include as deps of the generated target.
+	includeDeps []string
 }
 
 // annotationsFromComments returns all the annotations parsed out of the
 // comments of a Python module.
 func annotationsFromComments(comments []comment) (*annotations, error) {
 	ignore := make(map[string]struct{})
+	includeDeps := []string{}
 	for _, comment := range comments {
 		annotation, err := comment.asAnnotation()
 		if err != nil {
@@ -269,10 +234,21 @@
 					ignore[m] = struct{}{}
 				}
 			}
+			if annotation.kind == annotationKindIncludeDep {
+				targets := strings.Split(annotation.value, ",")
+				for _, t := range targets {
+					if t == "" {
+						continue
+					}
+					t = strings.TrimSpace(t)
+					includeDeps = append(includeDeps, t)
+				}
+			}
 		}
 	}
 	return &annotations{
-		ignore: ignore,
+		ignore:      ignore,
+		includeDeps: includeDeps,
 	}, nil
 }
 
diff --git a/tests/pip_install/BUILD.bazel b/gazelle/python/private/BUILD.bazel
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to gazelle/python/private/BUILD.bazel
diff --git a/gazelle/python/private/extensions.bzl b/gazelle/python/private/extensions.bzl
new file mode 100644
index 0000000..5de0713
--- /dev/null
+++ b/gazelle/python/private/extensions.bzl
@@ -0,0 +1,9 @@
+"python_stdlib_list module extension for use with bzlmod"
+
+load("@bazel_skylib//lib:modules.bzl", "modules")
+load("//:deps.bzl", "python_stdlib_list_deps")
+
+python_stdlib_list = modules.as_extension(
+    python_stdlib_list_deps,
+    doc = "This extension registers python stdlib list dependencies.",
+)
diff --git a/gazelle/python/python_test.go b/gazelle/python/python_test.go
index 617b3f8..dd8c241 100644
--- a/gazelle/python/python_test.go
+++ b/gazelle/python/python_test.go
@@ -31,7 +31,6 @@
 	"time"
 
 	"github.com/bazelbuild/bazel-gazelle/testtools"
-	"github.com/bazelbuild/rules_go/go/runfiles"
 	"github.com/bazelbuild/rules_go/go/tools/bazel"
 	"github.com/ghodss/yaml"
 )
@@ -42,9 +41,8 @@
 	gazelleBinaryName = "gazelle_binary"
 )
 
-var gazellePath = mustFindGazelle()
-
 func TestGazelleBinary(t *testing.T) {
+	gazellePath := mustFindGazelle()
 	tests := map[string][]bazel.RunfileEntry{}
 
 	runfiles, err := bazel.ListRunfiles()
@@ -67,13 +65,12 @@
 	if len(tests) == 0 {
 		t.Fatal("no tests found")
 	}
-
 	for testName, files := range tests {
-		testPath(t, testName, files)
+		testPath(t, gazellePath, testName, files)
 	}
 }
 
-func testPath(t *testing.T, name string, files []bazel.RunfileEntry) {
+func testPath(t *testing.T, gazellePath, name string, files []bazel.RunfileEntry) {
 	t.Run(name, func(t *testing.T) {
 		t.Parallel()
 		var inputs, goldens []testtools.FileSpec
@@ -160,11 +157,6 @@
 		cmd.Stdout = &stdout
 		cmd.Stderr = &stderr
 		cmd.Dir = workspaceRoot
-		helperScript, err := runfiles.Rlocation("rules_python_gazelle_plugin/python/helper")
-		if err != nil {
-			t.Fatalf("failed to initialize Python helper: %v", err)
-		}
-		cmd.Env = append(os.Environ(), "GAZELLE_PYTHON_HELPER="+helperScript)
 		if err := cmd.Run(); err != nil {
 			var e *exec.ExitError
 			if !errors.As(err, &e) {
diff --git a/gazelle/python/resolve.go b/gazelle/python/resolve.go
index f019a64..ca306c3 100644
--- a/gazelle/python/resolve.go
+++ b/gazelle/python/resolve.go
@@ -202,11 +202,7 @@
 						matches := ix.FindRulesByImportWithConfig(c, imp, languageName)
 						if len(matches) == 0 {
 							// Check if the imported module is part of the standard library.
-							if isStd, err := isStdModule(module{Name: moduleName}); err != nil {
-								log.Println("Error checking if standard module: ", err)
-								hasFatalError = true
-								continue POSSIBLE_MODULE_LOOP
-							} else if isStd {
+							if isStdModule(module{Name: moduleName}) {
 								continue MODULES_LOOP
 							} else if cfg.ValidateImportStatements() {
 								err := fmt.Errorf(
diff --git a/gazelle/python/std_modules.go b/gazelle/python/std_modules.go
index 8a016af..e10f87b 100644
--- a/gazelle/python/std_modules.go
+++ b/gazelle/python/std_modules.go
@@ -16,92 +16,25 @@
 
 import (
 	"bufio"
-	"context"
 	_ "embed"
-	"fmt"
-	"io"
-	"log"
-	"os"
-	"os/exec"
-	"strconv"
 	"strings"
-	"sync"
 )
 
 var (
-	stdModulesCmd    *exec.Cmd
-	stdModulesStdin  io.WriteCloser
-	stdModulesStdout io.Reader
-	stdModulesMutex  sync.Mutex
-	stdModulesSeen   map[string]struct{}
+	//go:embed stdlib_list.txt
+	stdlibList string
+	stdModules map[string]struct{}
 )
 
-func startStdModuleProcess(ctx context.Context) {
-	stdModulesSeen = make(map[string]struct{})
-
-	// due to #691, we need a system interpreter to boostrap, part of which is
-	// to locate the hermetic interpreter.
-	stdModulesCmd = exec.CommandContext(ctx, "python3", helperPath, "std_modules")
-	stdModulesCmd.Stderr = os.Stderr
-	// All userland site-packages should be ignored.
-	stdModulesCmd.Env = []string{"PYTHONNOUSERSITE=1"}
-
-	stdin, err := stdModulesCmd.StdinPipe()
-	if err != nil {
-		log.Printf("failed to initialize std_modules: %v\n", err)
-		os.Exit(1)
-	}
-	stdModulesStdin = stdin
-
-	stdout, err := stdModulesCmd.StdoutPipe()
-	if err != nil {
-		log.Printf("failed to initialize std_modules: %v\n", err)
-		os.Exit(1)
-	}
-	stdModulesStdout = stdout
-
-	if err := stdModulesCmd.Start(); err != nil {
-		log.Printf("failed to initialize std_modules: %v\n", err)
-		os.Exit(1)
+func init() {
+	stdModules = make(map[string]struct{})
+	scanner := bufio.NewScanner(strings.NewReader(stdlibList))
+	for scanner.Scan() {
+		stdModules[scanner.Text()] = struct{}{}
 	}
 }
 
-func shutdownStdModuleProcess() {
-	if err := stdModulesStdin.Close(); err != nil {
-		fmt.Fprintf(os.Stderr, "error closing std module: %v", err)
-	}
-
-	if err := stdModulesCmd.Wait(); err != nil {
-		log.Printf("failed to wait for std_modules: %v\n", err)
-	}
-}
-
-func isStdModule(m module) (bool, error) {
-	if _, seen := stdModulesSeen[m.Name]; seen {
-		return true, nil
-	}
-	stdModulesMutex.Lock()
-	defer stdModulesMutex.Unlock()
-
-	fmt.Fprintf(stdModulesStdin, "%s\n", m.Name)
-
-	stdoutReader := bufio.NewReader(stdModulesStdout)
-	line, err := stdoutReader.ReadString('\n')
-	if err != nil {
-		return false, err
-	}
-	if len(line) == 0 {
-		return false, fmt.Errorf("unexpected empty output from std_modules")
-	}
-
-	isStd, err := strconv.ParseBool(strings.TrimSpace(line))
-	if err != nil {
-		return false, err
-	}
-
-	if isStd {
-		stdModulesSeen[m.Name] = struct{}{}
-		return true, nil
-	}
-	return false, nil
+func isStdModule(m module) bool {
+	_, ok := stdModules[m.Name]
+	return ok
 }
diff --git a/gazelle/python/std_modules.py b/gazelle/python/std_modules.py
deleted file mode 100644
index 779a325..0000000
--- a/gazelle/python/std_modules.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# std_modules.py is a long-living program that communicates over STDIN and
-# STDOUT. STDIN receives module names, one per line. For each module statement
-# it evaluates, it outputs true/false for whether the module is part of the
-# standard library or not.
-
-import os
-import sys
-from contextlib import redirect_stdout
-
-
-def is_std_modules(module):
-    # If for some reason a module (such as pygame, see https://github.com/pygame/pygame/issues/542)
-    # prints to stdout upon import,
-    # the output of this script should still be parseable by golang.
-    # Therefore, redirect stdout while running the import.
-    with redirect_stdout(os.devnull):
-        try:
-            __import__(module, globals(), locals(), [], 0)
-            return True
-        except Exception:
-            return False
-
-
-def main(stdin, stdout):
-    for module in stdin:
-        module = module.strip()
-        # Don't print the boolean directly as it is capitalized in Python.
-        print(
-            "true" if is_std_modules(module) else "false",
-            end="\n",
-            file=stdout,
-        )
-        stdout.flush()
-
-
-if __name__ == "__main__":
-    exit(main(sys.stdin, sys.stdout))
diff --git a/gazelle/python/std_modules_test.go b/gazelle/python/std_modules_test.go
new file mode 100644
index 0000000..bc22638
--- /dev/null
+++ b/gazelle/python/std_modules_test.go
@@ -0,0 +1,27 @@
+// Copyright 2023 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package python
+
+import (
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+func TestIsStdModule(t *testing.T) {
+	assert.True(t, isStdModule(module{Name: "unittest"}))
+	assert.True(t, isStdModule(module{Name: "os.path"}))
+	assert.False(t, isStdModule(module{Name: "foo"}))
+}
diff --git a/gazelle/python/target.go b/gazelle/python/target.go
index e310405..c40d6fb 100644
--- a/gazelle/python/target.go
+++ b/gazelle/python/target.go
@@ -99,9 +99,20 @@
 	return t
 }
 
-// addVisibility adds a visibility to the target.
-func (t *targetBuilder) addVisibility(visibility string) *targetBuilder {
-	t.visibility.Add(visibility)
+// addResolvedDependencies adds multiple dependencies, that have already been
+// resolved or generated, to the target.
+func (t *targetBuilder) addResolvedDependencies(deps []string) *targetBuilder {
+	for _, dep := range deps {
+		t.addResolvedDependency(dep)
+	}
+	return t
+}
+
+// addVisibility adds visibility labels to the target.
+func (t *targetBuilder) addVisibility(visibility []string) *targetBuilder {
+	for _, item := range visibility {
+		t.visibility.Add(item)
+	}
 	return t
 }
 
diff --git a/gazelle/python/testdata/annotation_include_dep/BUILD.in b/gazelle/python/testdata/annotation_include_dep/BUILD.in
new file mode 100644
index 0000000..af2c2ce
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_generation_mode file
diff --git a/gazelle/python/testdata/annotation_include_dep/BUILD.out b/gazelle/python/testdata/annotation_include_dep/BUILD.out
new file mode 100644
index 0000000..1cff8f4
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/BUILD.out
@@ -0,0 +1,53 @@
+load("@rules_python//python:defs.bzl", "py_binary", "py_library", "py_test")
+
+# gazelle:python_generation_mode file
+
+py_library(
+    name = "__init__",
+    srcs = ["__init__.py"],
+    visibility = ["//:__subpackages__"],
+    deps = [
+        ":module1",
+        ":module2",
+        "//foo/bar:baz",
+        "//hello:world",
+        "@gazelle_python_test//foo",
+        "@star_wars//rebel_alliance/luke:skywalker",
+    ],
+)
+
+py_library(
+    name = "module1",
+    srcs = ["module1.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_library(
+    name = "module2",
+    srcs = ["module2.py"],
+    visibility = ["//:__subpackages__"],
+    deps = [
+        "//checking/py_binary/from/if:works",
+        "//foo:bar",
+    ],
+)
+
+py_binary(
+    name = "annotation_include_dep_bin",
+    srcs = ["__main__.py"],
+    main = "__main__.py",
+    visibility = ["//:__subpackages__"],
+    deps = [
+        ":module2",
+        "//checking/py_binary/from/__main__:works",
+    ],
+)
+
+py_test(
+    name = "module2_test",
+    srcs = ["module2_test.py"],
+    deps = [
+        ":module2",
+        "//checking/py_test/works:too",
+    ],
+)
diff --git a/gazelle/python/testdata/annotation_include_dep/README.md b/gazelle/python/testdata/annotation_include_dep/README.md
new file mode 100644
index 0000000..4c8afbe
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/README.md
@@ -0,0 +1,10 @@
+# Annotation: Include Dep
+
+Test that the Python gazelle annotation `# gazelle:include_dep` correctly adds dependences
+to the generated target even if those dependencies are not imported by the Python module.
+
+The root directory tests that all `py_*` targets will correctly include the additional
+dependencies.
+
+The `subpkg` directory tests that all `# gazlle:include_dep` annotations found in all source
+files are included in the generated target (such as during `generation_mode package`).
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/annotation_include_dep/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/annotation_include_dep/WORKSPACE
diff --git a/gazelle/python/testdata/annotation_include_dep/__init__.py b/gazelle/python/testdata/annotation_include_dep/__init__.py
new file mode 100644
index 0000000..a90a1b9
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/__init__.py
@@ -0,0 +1,9 @@
+import foo  # third party package
+import module1
+
+# gazelle:include_dep //foo/bar:baz
+# gazelle:include_dep //hello:world,@star_wars//rebel_alliance/luke:skywalker
+# gazelle:include_dep :module2
+
+del module1
+del foo
diff --git a/gazelle/python/testdata/annotation_include_dep/__main__.py b/gazelle/python/testdata/annotation_include_dep/__main__.py
new file mode 100644
index 0000000..6d9d8aa
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/__main__.py
@@ -0,0 +1,7 @@
+# gazelle:include_dep //checking/py_binary/from/__main__:works
+# Check deduping
+# gazelle:include_dep //checking/py_binary/from/__main__:works
+
+import module2
+
+del module2
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/annotation_include_dep/gazelle_python.yaml
similarity index 78%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/annotation_include_dep/gazelle_python.yaml
index bbdfb4c..7afe81f 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/annotation_include_dep/gazelle_python.yaml
@@ -1,4 +1,4 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+manifest:
+  modules_mapping:
+    foo: foo
+  pip_deps_repository_name: gazelle_python_test
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/annotation_include_dep/module1.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/annotation_include_dep/module1.py
diff --git a/gazelle/python/testdata/annotation_include_dep/module2.py b/gazelle/python/testdata/annotation_include_dep/module2.py
new file mode 100644
index 0000000..23a75af
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/module2.py
@@ -0,0 +1,5 @@
+# gazelle:include_dep //foo:bar
+
+if __name__ == "__main__":
+    # gazelle:include_dep //checking/py_binary/from/if:works
+    print("hello")
diff --git a/gazelle/python/testdata/annotation_include_dep/module2_test.py b/gazelle/python/testdata/annotation_include_dep/module2_test.py
new file mode 100644
index 0000000..6fa18c6
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/module2_test.py
@@ -0,0 +1,5 @@
+# gazelle:include_dep //checking/py_test/works:too
+
+import module2
+
+del module2
diff --git a/gazelle/python/testdata/annotation_include_dep/subpkg/BUILD.in b/gazelle/python/testdata/annotation_include_dep/subpkg/BUILD.in
new file mode 100644
index 0000000..421b486
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/subpkg/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_generation_mode package
diff --git a/gazelle/python/testdata/annotation_include_dep/subpkg/BUILD.out b/gazelle/python/testdata/annotation_include_dep/subpkg/BUILD.out
new file mode 100644
index 0000000..921c892
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/subpkg/BUILD.out
@@ -0,0 +1,29 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_generation_mode package
+
+py_library(
+    name = "subpkg",
+    srcs = [
+        "__init__.py",
+        "module1.py",
+        "module2.py",
+        "module3.py",
+    ],
+    visibility = ["//:__subpackages__"],
+    deps = [
+        ":nonexistant_target_from_include_dep_in_module3",
+        "//me_from_module1",
+        "//other/thing:from_include_dep_in_module2",
+        "//you_from_module1",
+    ],
+)
+
+py_test(
+    name = "module1_test",
+    srcs = ["module1_test.py"],
+    deps = [
+        ":subpkg",
+        "//:bagel_from_include_dep_in_module1_test",
+    ],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/annotation_include_dep/subpkg/__init__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/annotation_include_dep/subpkg/__init__.py
diff --git a/gazelle/python/testdata/annotation_include_dep/subpkg/module1.py b/gazelle/python/testdata/annotation_include_dep/subpkg/module1.py
new file mode 100644
index 0000000..01566a0
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/subpkg/module1.py
@@ -0,0 +1,3 @@
+def hello():
+    # gazelle:include_dep //you_from_module1,//me_from_module1
+    pass
diff --git a/gazelle/python/testdata/annotation_include_dep/subpkg/module1_test.py b/gazelle/python/testdata/annotation_include_dep/subpkg/module1_test.py
new file mode 100644
index 0000000..087763a
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/subpkg/module1_test.py
@@ -0,0 +1,5 @@
+# gazelle:include_dep //:bagel_from_include_dep_in_module1_test
+
+import module1
+
+del module1
diff --git a/gazelle/python/testdata/annotation_include_dep/subpkg/module2.py b/gazelle/python/testdata/annotation_include_dep/subpkg/module2.py
new file mode 100644
index 0000000..dabeb67
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/subpkg/module2.py
@@ -0,0 +1,4 @@
+# gazelle:include_dep //other/thing:from_include_dep_in_module2
+import module1
+
+del module1
diff --git a/gazelle/python/testdata/annotation_include_dep/subpkg/module3.py b/gazelle/python/testdata/annotation_include_dep/subpkg/module3.py
new file mode 100644
index 0000000..899a7c4
--- /dev/null
+++ b/gazelle/python/testdata/annotation_include_dep/subpkg/module3.py
@@ -0,0 +1,3 @@
+def goodbye():
+    # gazelle:include_dep :nonexistant_target_from_include_dep_in_module3
+    pass
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/annotation_include_dep/test.yaml
similarity index 94%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/annotation_include_dep/test.yaml
index bbdfb4c..2410223 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/annotation_include_dep/test.yaml
@@ -12,3 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
+expect:
+  exit_code: 0
diff --git a/gazelle/python/testdata/binary_without_entrypoint/collided_main.py b/gazelle/python/testdata/binary_without_entrypoint/collided_main.py
index 3bf59c7..ba73251 100644
--- a/gazelle/python/testdata/binary_without_entrypoint/collided_main.py
+++ b/gazelle/python/testdata/binary_without_entrypoint/collided_main.py
@@ -1,4 +1,4 @@
 import numpy
 
 if __name__ == "__main__":
-    run()
\ No newline at end of file
+    run()
diff --git a/gazelle/python/testdata/binary_without_entrypoint/main.py b/gazelle/python/testdata/binary_without_entrypoint/main.py
index f7b3170..49f1049 100644
--- a/gazelle/python/testdata/binary_without_entrypoint/main.py
+++ b/gazelle/python/testdata/binary_without_entrypoint/main.py
@@ -2,4 +2,4 @@
 import pandas
 
 if __name__ == "__main__":
-    run()
\ No newline at end of file
+    run()
diff --git a/gazelle/python/testdata/binary_without_entrypoint/main_test.py b/gazelle/python/testdata/binary_without_entrypoint/main_test.py
index 505a766..a010fe7 100644
--- a/gazelle/python/testdata/binary_without_entrypoint/main_test.py
+++ b/gazelle/python/testdata/binary_without_entrypoint/main_test.py
@@ -1,7 +1,9 @@
 import unittest
 
+
 class TestMain(unittest.unittest):
     pass
 
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/BUILD.in b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/BUILD.in
new file mode 100644
index 0000000..b24a823
--- /dev/null
+++ b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/BUILD.in
@@ -0,0 +1,4 @@
+# gazelle:python_generation_mode file
+
+# gazelle:resolve py numpy @pip//:numpy
+# gazelle:resolve py pandas @pip//:pandas
diff --git a/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/BUILD.out b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/BUILD.out
new file mode 100644
index 0000000..bffedb1
--- /dev/null
+++ b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/BUILD.out
@@ -0,0 +1,46 @@
+load("@rules_python//python:defs.bzl", "py_binary", "py_library")
+
+# gazelle:python_generation_mode file
+
+# gazelle:resolve py numpy @pip//:numpy
+# gazelle:resolve py pandas @pip//:pandas
+
+py_library(
+    name = "lib",
+    srcs = ["lib.py"],
+    visibility = ["//:__subpackages__"],
+    deps = [
+        "@pip//:numpy",
+        "@pip//:pandas",
+    ],
+)
+
+py_library(
+    name = "lib2",
+    srcs = ["lib2.py"],
+    visibility = ["//:__subpackages__"],
+    deps = [
+        ":lib",
+        ":lib_and_main",
+    ],
+)
+
+py_binary(
+    name = "lib_and_main",
+    srcs = ["lib_and_main.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_binary(
+    name = "main",
+    srcs = ["main.py"],
+    visibility = ["//:__subpackages__"],
+    deps = ["@pip//:pandas"],
+)
+
+py_binary(
+    name = "main2",
+    srcs = ["main2.py"],
+    visibility = ["//:__subpackages__"],
+    deps = [":lib2"],
+)
diff --git a/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/README.md b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/README.md
new file mode 100644
index 0000000..9cbe3e9
--- /dev/null
+++ b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/README.md
@@ -0,0 +1,4 @@
+# Binary without entrypoint
+
+This test case asserts that when there is no __main__.py, a py_binary is generated per file main module, and that this
+py_binary is instead of (not in addition to) any py_library target.
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/binary_without_entrypoint_per_file_generation/WORKSPACE
diff --git a/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/lib.py b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/lib.py
new file mode 100644
index 0000000..3e1e6b8
--- /dev/null
+++ b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/lib.py
@@ -0,0 +1,2 @@
+import numpy
+import pandas
diff --git a/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/lib2.py b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/lib2.py
new file mode 100644
index 0000000..592a2da
--- /dev/null
+++ b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/lib2.py
@@ -0,0 +1,2 @@
+import lib
+import lib_and_main
diff --git a/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/lib_and_main.py b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/lib_and_main.py
new file mode 100644
index 0000000..c6e2d49
--- /dev/null
+++ b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/lib_and_main.py
@@ -0,0 +1,6 @@
+def library_func():
+    print("library_func")
+
+
+if __name__ == "__main__":
+    library_func()
diff --git a/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/main.py b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/main.py
new file mode 100644
index 0000000..a068203
--- /dev/null
+++ b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/main.py
@@ -0,0 +1,4 @@
+import pandas
+
+if __name__ == "__main__":
+    run()
diff --git a/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/main2.py b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/main2.py
new file mode 100644
index 0000000..6f923b8
--- /dev/null
+++ b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/main2.py
@@ -0,0 +1,4 @@
+import lib2
+
+if __name__ == "__main__":
+    lib2.lib_and_main.library_func()
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/test.yaml
similarity index 94%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/binary_without_entrypoint_per_file_generation/test.yaml
index bbdfb4c..2410223 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/binary_without_entrypoint_per_file_generation/test.yaml
@@ -12,3 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
+expect:
+  exit_code: 0
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/different_packages_in_same_namespace/BUILD.in
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/different_packages_in_same_namespace/BUILD.in
diff --git a/gazelle/python/testdata/different_packages_in_same_namespace/BUILD.out b/gazelle/python/testdata/different_packages_in_same_namespace/BUILD.out
new file mode 100644
index 0000000..2ee0b53
--- /dev/null
+++ b/gazelle/python/testdata/different_packages_in_same_namespace/BUILD.out
@@ -0,0 +1,18 @@
+load("@rules_python//python:defs.bzl", "py_binary", "py_library")
+
+py_library(
+    name = "different_packages_in_same_namespace",
+    srcs = ["__init__.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_binary(
+    name = "different_packages_in_same_namespace_bin",
+    srcs = ["__main__.py"],
+    main = "__main__.py",
+    visibility = ["//:__subpackages__"],
+    deps = [
+        "@gazelle_python_test//arrow",
+        "@gazelle_python_test//arrow_plugin",
+    ],
+)
diff --git a/gazelle/python/testdata/different_packages_in_same_namespace/README.md b/gazelle/python/testdata/different_packages_in_same_namespace/README.md
new file mode 100644
index 0000000..dcaebb7
--- /dev/null
+++ b/gazelle/python/testdata/different_packages_in_same_namespace/README.md
@@ -0,0 +1,4 @@
+# Different Packages in Same Namespace
+
+This test case asserts that
+importing `arrow.plugin` correctly adds arrow_plugin to the deps.
\ No newline at end of file
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/different_packages_in_same_namespace/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/different_packages_in_same_namespace/WORKSPACE
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/different_packages_in_same_namespace/__init__.py
similarity index 95%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/different_packages_in_same_namespace/__init__.py
index bbdfb4c..7307559 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/different_packages_in_same_namespace/__init__.py
@@ -12,3 +12,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# For test purposes only.
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/different_packages_in_same_namespace/__main__.py
similarity index 94%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/different_packages_in_same_namespace/__main__.py
index bbdfb4c..e378628 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/different_packages_in_same_namespace/__main__.py
@@ -12,3 +12,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import arrow
+import arrow.plugin
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/different_packages_in_same_namespace/gazelle_python.yaml
similarity index 82%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/different_packages_in_same_namespace/gazelle_python.yaml
index bbdfb4c..a2ef070 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/different_packages_in_same_namespace/gazelle_python.yaml
@@ -12,3 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+manifest:
+  modules_mapping:
+    arrow: arrow
+    arrow.plugin: arrow_plugin
+  pip_deps_repository_name: gazelle_python_test
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/different_packages_in_same_namespace/test.yaml
similarity index 98%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/different_packages_in_same_namespace/test.yaml
index bbdfb4c..fcea777 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/different_packages_in_same_namespace/test.yaml
@@ -12,3 +12,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
diff --git a/gazelle/python/testdata/directive_python_default_visibility/README.md b/gazelle/python/testdata/directive_python_default_visibility/README.md
new file mode 100644
index 0000000..be42792
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/README.md
@@ -0,0 +1,21 @@
+# Directive: `python_default_visibility`
+
+This test case asserts that the `# gazelle:python_default_visibility` directive
+correctly:
+
+1.  Uses the default value when `python_default_visibility` is not set.
+2.  Uses the correct default value when `python_root` is set and
+    `python_default_visibility` is not set.
+3.  Supports injecting `python_root`
+4.  Supports multiple labels
+5.  Setting the label to "NONE" removes all visibility attibutes.
+6.  Setting the label to "DEFAULT" reverts to using the default.
+7.  Adding `python_visibility` directive with `python_default_visibility NONE`
+    only adds the items listed by `python_visibility`.
+8.  Multiple `python_root` dirs [GH #1682][gh-1682] uses correct value when
+    injecting `python_root`.
+9.  Setting both `python_default_visibility` and `python_visibility` and how
+    they interact with sub-packages.
+
+
+[gh-1682]: https://github.com/bazelbuild/rules_python/issues/1682
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/directive_python_default_visibility/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/directive_python_default_visibility/WORKSPACE
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/directive_python_default_visibility/test.yaml
similarity index 94%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/directive_python_default_visibility/test.yaml
index bbdfb4c..2410223 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/directive_python_default_visibility/test.yaml
@@ -12,3 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
+expect:
+  exit_code: 0
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test1_default/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test1_default/BUILD.in
new file mode 100644
index 0000000..690a651
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test1_default/BUILD.in
@@ -0,0 +1 @@
+# python_default_visibility is not set.
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test1_default/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test1_default/BUILD.out
new file mode 100644
index 0000000..47fd2d8
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test1_default/BUILD.out
@@ -0,0 +1,9 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# python_default_visibility is not set.
+
+py_library(
+    name = "test1_default",
+    srcs = ["test1.py"],
+    visibility = ["//:__subpackages__"],
+)
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test1_default/test1.py b/gazelle/python/testdata/directive_python_default_visibility/test1_default/test1.py
new file mode 100644
index 0000000..98907eb
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test1_default/test1.py
@@ -0,0 +1,2 @@
+def func():
+    print("library_func")
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/BUILD.in
new file mode 100644
index 0000000..6948b47
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_root
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/BUILD.out
new file mode 100644
index 0000000..c3b51bd
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/BUILD.out
@@ -0,0 +1,12 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_root
+
+py_library(
+    name = "test2_default_with_python_root",
+    srcs = [
+        "__init__.py",
+        "test2.py",
+    ],
+    visibility = ["//test2_default_with_python_root:__subpackages__"],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/__init__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/__init__.py
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/test2.py b/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/test2.py
new file mode 100644
index 0000000..98907eb
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test2_default_with_python_root/test2.py
@@ -0,0 +1,2 @@
+def func():
+    print("library_func")
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test3_injection/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test3_injection/BUILD.in
new file mode 100644
index 0000000..588f0c7
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test3_injection/BUILD.in
@@ -0,0 +1,2 @@
+# gazelle:python_root
+# gazelle:python_default_visibility //foo/$python_root$/bar:__pkg__
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test3_injection/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test3_injection/BUILD.out
new file mode 100644
index 0000000..d4140e8
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test3_injection/BUILD.out
@@ -0,0 +1,13 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_root
+# gazelle:python_default_visibility //foo/$python_root$/bar:__pkg__
+
+py_library(
+    name = "test3_injection",
+    srcs = [
+        "__init__.py",
+        "test3.py",
+    ],
+    visibility = ["//foo/test3_injection/bar:__pkg__"],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test3_injection/__init__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_default_visibility/test3_injection/__init__.py
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test3_injection/test3.py b/gazelle/python/testdata/directive_python_default_visibility/test3_injection/test3.py
new file mode 100644
index 0000000..98907eb
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test3_injection/test3.py
@@ -0,0 +1,2 @@
+def func():
+    print("library_func")
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test4_multiple_labels/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test4_multiple_labels/BUILD.in
new file mode 100644
index 0000000..53eb8a3
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test4_multiple_labels/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_default_visibility //foo/bar:__pkg__,//tests:__subpackages__,//a:b
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test4_multiple_labels/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test4_multiple_labels/BUILD.out
new file mode 100644
index 0000000..2c3a433
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test4_multiple_labels/BUILD.out
@@ -0,0 +1,13 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_default_visibility //foo/bar:__pkg__,//tests:__subpackages__,//a:b
+
+py_library(
+    name = "test4_multiple_labels",
+    srcs = ["test4.py"],
+    visibility = [
+        "//a:b",
+        "//foo/bar:__pkg__",
+        "//tests:__subpackages__",
+    ],
+)
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test4_multiple_labels/test4.py b/gazelle/python/testdata/directive_python_default_visibility/test4_multiple_labels/test4.py
new file mode 100644
index 0000000..98907eb
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test4_multiple_labels/test4.py
@@ -0,0 +1,2 @@
+def func():
+    print("library_func")
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test5_none_label/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test5_none_label/BUILD.in
new file mode 100644
index 0000000..7810eea
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test5_none_label/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_default_visibility NONE
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test5_none_label/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test5_none_label/BUILD.out
new file mode 100644
index 0000000..fc410f6
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test5_none_label/BUILD.out
@@ -0,0 +1,8 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_default_visibility NONE
+
+py_library(
+    name = "test5_none_label",
+    srcs = ["test5.py"],
+)
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test5_none_label/test5.py b/gazelle/python/testdata/directive_python_default_visibility/test5_none_label/test5.py
new file mode 100644
index 0000000..98907eb
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test5_none_label/test5.py
@@ -0,0 +1,2 @@
+def func():
+    print("library_func")
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/BUILD.in
new file mode 100644
index 0000000..65b51e3
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_default_visibility //foo:bar
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/BUILD.out
new file mode 100644
index 0000000..3df11b4
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/BUILD.out
@@ -0,0 +1,9 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_default_visibility //foo:bar
+
+py_library(
+    name = "test6_default_label",
+    srcs = ["test6.py"],
+    visibility = ["//foo:bar"],
+)
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/subpkg/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/subpkg/BUILD.in
new file mode 100644
index 0000000..2a54cfd
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/subpkg/BUILD.in
@@ -0,0 +1,2 @@
+# Reset the default visibility to the default for all child packages.
+# gazelle:python_default_visibility DEFAULT
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/subpkg/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/subpkg/BUILD.out
new file mode 100644
index 0000000..6169367
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/subpkg/BUILD.out
@@ -0,0 +1,10 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# Reset the default visibility to the default for all child packages.
+# gazelle:python_default_visibility DEFAULT
+
+py_library(
+    name = "subpkg",
+    srcs = ["test6_sub.py"],
+    visibility = ["//:__subpackages__"],
+)
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/subpkg/test6_sub.py b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/subpkg/test6_sub.py
new file mode 100644
index 0000000..98907eb
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/subpkg/test6_sub.py
@@ -0,0 +1,2 @@
+def func():
+    print("library_func")
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/test6.py b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/test6.py
new file mode 100644
index 0000000..98907eb
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test6_default_label/test6.py
@@ -0,0 +1,2 @@
+def func():
+    print("library_func")
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test7_none_label_with_extra_vis/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test7_none_label_with_extra_vis/BUILD.in
new file mode 100644
index 0000000..d64169f
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test7_none_label_with_extra_vis/BUILD.in
@@ -0,0 +1,5 @@
+# python_visibility directives that happen either before _or_ after the
+# NONE reset both get applied.
+# gazelle:python_visibility //foo:bar
+# gazelle:python_default_visibility NONE
+# gazelle:python_visibility //bar:baz
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test7_none_label_with_extra_vis/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test7_none_label_with_extra_vis/BUILD.out
new file mode 100644
index 0000000..f912ac6
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test7_none_label_with_extra_vis/BUILD.out
@@ -0,0 +1,16 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# python_visibility directives that happen either before _or_ after the
+# NONE reset both get applied.
+# gazelle:python_visibility //foo:bar
+# gazelle:python_default_visibility NONE
+# gazelle:python_visibility //bar:baz
+
+py_library(
+    name = "test7_none_label_with_extra_vis",
+    srcs = ["test7.py"],
+    visibility = [
+        "//bar:baz",
+        "//foo:bar",
+    ],
+)
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test7_none_label_with_extra_vis/test7.py b/gazelle/python/testdata/directive_python_default_visibility/test7_none_label_with_extra_vis/test7.py
new file mode 100644
index 0000000..98907eb
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test7_none_label_with_extra_vis/test7.py
@@ -0,0 +1,2 @@
+def func():
+    print("library_func")
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/BUILD.in
new file mode 100644
index 0000000..4e90bdc
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/BUILD.in
@@ -0,0 +1,2 @@
+# For funzies, also throw in some additional visibility.
+# gazelle:python_visibility //tests:__pkg__
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/BUILD.out
new file mode 100644
index 0000000..4e90bdc
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/BUILD.out
@@ -0,0 +1,2 @@
+# For funzies, also throw in some additional visibility.
+# gazelle:python_visibility //tests:__pkg__
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/BUILD.in
new file mode 100644
index 0000000..6948b47
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_root
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/BUILD.out
new file mode 100644
index 0000000..6948b47
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/BUILD.out
@@ -0,0 +1 @@
+# gazelle:python_root
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/pkg1/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/pkg1/BUILD.in
new file mode 100644
index 0000000..0151a68
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/pkg1/BUILD.in
@@ -0,0 +1,2 @@
+# proj1 depends on proj2
+# We can leave the default visibility.
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/pkg1/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/pkg1/BUILD.out
new file mode 100644
index 0000000..a473ba5
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/pkg1/BUILD.out
@@ -0,0 +1,14 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# proj1 depends on proj2
+# We can leave the default visibility.
+
+py_library(
+    name = "pkg1",
+    srcs = ["file1.py"],
+    imports = [".."],
+    visibility = [
+        "//test8_multiple_python_root_dirs/proj1/src:__subpackages__",
+        "//tests:__pkg__",
+    ],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/pkg1/file1.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj1/src/pkg1/file1.py
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/BUILD.in
new file mode 100644
index 0000000..6948b47
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_root
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/BUILD.out
new file mode 100644
index 0000000..6948b47
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/BUILD.out
@@ -0,0 +1 @@
+# gazelle:python_root
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/pkg2/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/pkg2/BUILD.in
new file mode 100644
index 0000000..ebaccfd
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/pkg2/BUILD.in
@@ -0,0 +1,3 @@
+# proj1 depends on proj2
+# So we have to make sure that proj2 is visible by proj1
+# gazelle:python_default_visibility //$python_root$:__subpackages__,//test8_multiple_python_root_dirs/proj1/src:__subpackages__
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/pkg2/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/pkg2/BUILD.out
new file mode 100644
index 0000000..8b30e97
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/pkg2/BUILD.out
@@ -0,0 +1,16 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# proj1 depends on proj2
+# So we have to make sure that proj2 is visible by proj1
+# gazelle:python_default_visibility //$python_root$:__subpackages__,//test8_multiple_python_root_dirs/proj1/src:__subpackages__
+
+py_library(
+    name = "pkg2",
+    srcs = ["file2.py"],
+    imports = [".."],
+    visibility = [
+        "//test8_multiple_python_root_dirs/proj1/src:__subpackages__",
+        "//test8_multiple_python_root_dirs/proj2/src:__subpackages__",
+        "//tests:__pkg__",
+    ],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/pkg2/file2.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_default_visibility/test8_multiple_python_root_dirs/proj2/src/pkg2/file2.py
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/BUILD.in
new file mode 100644
index 0000000..44e23ed
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_default_visibility //tests:__pkg__
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/BUILD.out
new file mode 100644
index 0000000..69587b1
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/BUILD.out
@@ -0,0 +1,9 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_default_visibility //tests:__pkg__
+
+py_library(
+    name = "test9_default_vis_with_python_vis",
+    srcs = ["test9.py"],
+    visibility = ["//tests:__pkg__"],
+)
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg1/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg1/BUILD.in
new file mode 100644
index 0000000..6e484ff
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg1/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_visibility //some/new:target
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg1/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg1/BUILD.out
new file mode 100644
index 0000000..6b7f7c3
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg1/BUILD.out
@@ -0,0 +1,12 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_visibility //some/new:target
+
+py_library(
+    name = "subpkg1",
+    srcs = ["foo.py"],
+    visibility = [
+        "//some/new:target",
+        "//tests:__pkg__",
+    ],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg1/foo.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg1/foo.py
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg2/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg2/BUILD.in
new file mode 100644
index 0000000..912134a
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg2/BUILD.in
@@ -0,0 +1,3 @@
+# gazelle:python_default_visibility //a:b,//a:c
+# gazelle:python_visibility //c:d
+# gazelle:python_visibility //e:f
diff --git a/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg2/BUILD.out b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg2/BUILD.out
new file mode 100644
index 0000000..a43fc0c
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg2/BUILD.out
@@ -0,0 +1,16 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_default_visibility //a:b,//a:c
+# gazelle:python_visibility //c:d
+# gazelle:python_visibility //e:f
+
+py_library(
+    name = "subpkg2",
+    srcs = ["foo.py"],
+    visibility = [
+        "//a:b",
+        "//a:c",
+        "//c:d",
+        "//e:f",
+    ],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg2/foo.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/subpkg2/foo.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/test9.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_default_visibility/test9_default_vis_with_python_vis/test9.py
diff --git a/gazelle/python/testdata/directive_python_label_convention/README.md b/gazelle/python/testdata/directive_python_label_convention/README.md
new file mode 100644
index 0000000..8ce0155
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_convention/README.md
@@ -0,0 +1,4 @@
+# Directive: `python_label_convention`
+
+This test case asserts that the `# gazelle:python_label_convention` directive
+works as intended when set.
\ No newline at end of file
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_label_convention/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_label_convention/WORKSPACE
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_label_convention/test.yaml
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_label_convention/test.yaml
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_label_convention/test1_unset/BUILD.in
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_label_convention/test1_unset/BUILD.in
diff --git a/gazelle/python/testdata/directive_python_label_convention/test1_unset/BUILD.out b/gazelle/python/testdata/directive_python_label_convention/test1_unset/BUILD.out
new file mode 100644
index 0000000..697a202
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_convention/test1_unset/BUILD.out
@@ -0,0 +1,11 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+py_library(
+    name = "test1_unset",
+    srcs = ["bar.py"],
+    visibility = ["//:__subpackages__"],
+    deps = [
+        "@gazelle_python_test//google_cloud_aiplatform",
+        "@gazelle_python_test//google_cloud_storage",
+    ],
+)
diff --git a/gazelle/python/testdata/directive_python_label_convention/test1_unset/bar.py b/gazelle/python/testdata/directive_python_label_convention/test1_unset/bar.py
new file mode 100644
index 0000000..99a4b1c
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_convention/test1_unset/bar.py
@@ -0,0 +1,6 @@
+from google.cloud import aiplatform, storage
+
+
+def main():
+    a = dir(aiplatform)
+    b = dir(storage)
diff --git a/gazelle/python/testdata/directive_python_label_convention/test1_unset/gazelle_python.yaml b/gazelle/python/testdata/directive_python_label_convention/test1_unset/gazelle_python.yaml
new file mode 100644
index 0000000..bd5efab
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_convention/test1_unset/gazelle_python.yaml
@@ -0,0 +1,6 @@
+manifest:
+  modules_mapping:
+    google.cloud.aiplatform: google_cloud_aiplatform
+    google.cloud.storage: google_cloud_storage
+  pip_repository:
+    name: gazelle_python_test
diff --git a/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/BUILD.in b/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/BUILD.in
new file mode 100644
index 0000000..83ce6af
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_label_convention :$distribution_name$
\ No newline at end of file
diff --git a/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/BUILD.out b/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/BUILD.out
new file mode 100644
index 0000000..061c8e5
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/BUILD.out
@@ -0,0 +1,13 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_label_convention :$distribution_name$
+
+py_library(
+    name = "test2_custom_prefix_colon",
+    srcs = ["bar.py"],
+    visibility = ["//:__subpackages__"],
+    deps = [
+        "@gazelle_python_test//:google_cloud_aiplatform",
+        "@gazelle_python_test//:google_cloud_storage",
+    ],
+)
diff --git a/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/bar.py b/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/bar.py
new file mode 100644
index 0000000..99a4b1c
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/bar.py
@@ -0,0 +1,6 @@
+from google.cloud import aiplatform, storage
+
+
+def main():
+    a = dir(aiplatform)
+    b = dir(storage)
diff --git a/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/gazelle_python.yaml b/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/gazelle_python.yaml
new file mode 100644
index 0000000..bd5efab
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_convention/test2_custom_prefix_colon/gazelle_python.yaml
@@ -0,0 +1,6 @@
+manifest:
+  modules_mapping:
+    google.cloud.aiplatform: google_cloud_aiplatform
+    google.cloud.storage: google_cloud_storage
+  pip_repository:
+    name: gazelle_python_test
diff --git a/gazelle/python/testdata/directive_python_label_normalization/README.md b/gazelle/python/testdata/directive_python_label_normalization/README.md
new file mode 100644
index 0000000..a2e1801
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/README.md
@@ -0,0 +1,4 @@
+# Directive: `python_label_normalization`
+
+This test case asserts that the `# gazelle:python_label_normalization` directive
+works as intended when set.
\ No newline at end of file
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_label_normalization/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_label_normalization/WORKSPACE
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_label_normalization/test.yaml
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_label_normalization/test.yaml
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/BUILD.in b/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/BUILD.in
new file mode 100644
index 0000000..5f5620a
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_label_normalization none
\ No newline at end of file
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/BUILD.out b/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/BUILD.out
new file mode 100644
index 0000000..6e70778
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/BUILD.out
@@ -0,0 +1,10 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_label_normalization none
+
+py_library(
+    name = "test1_type_none",
+    srcs = ["bar.py"],
+    visibility = ["//:__subpackages__"],
+    deps = ["@gazelle_python_test//google.cloud.storage"],
+)
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/bar.py b/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/bar.py
new file mode 100644
index 0000000..8b3839e
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/bar.py
@@ -0,0 +1,5 @@
+from google.cloud import storage
+
+
+def main():
+    b = dir(storage)
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/gazelle_python.yaml b/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/gazelle_python.yaml
new file mode 100644
index 0000000..5bfada4
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test1_type_none/gazelle_python.yaml
@@ -0,0 +1,6 @@
+manifest:
+  modules_mapping:
+    # Weird google.cloud.storage here on purpose to make normalization apparent
+    google.cloud.storage: google.cloud.storage
+  pip_repository:
+    name: gazelle_python_test
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/BUILD.in b/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/BUILD.in
new file mode 100644
index 0000000..a2cca53
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_label_normalization pep503
\ No newline at end of file
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/BUILD.out b/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/BUILD.out
new file mode 100644
index 0000000..7a88c8b
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/BUILD.out
@@ -0,0 +1,10 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_label_normalization pep503
+
+py_library(
+    name = "test2_type_pep503",
+    srcs = ["bar.py"],
+    visibility = ["//:__subpackages__"],
+    deps = ["@gazelle_python_test//google-cloud-storage"],
+)
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/bar.py b/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/bar.py
new file mode 100644
index 0000000..8b3839e
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/bar.py
@@ -0,0 +1,5 @@
+from google.cloud import storage
+
+
+def main():
+    b = dir(storage)
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/gazelle_python.yaml b/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/gazelle_python.yaml
new file mode 100644
index 0000000..5bfada4
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test2_type_pep503/gazelle_python.yaml
@@ -0,0 +1,6 @@
+manifest:
+  modules_mapping:
+    # Weird google.cloud.storage here on purpose to make normalization apparent
+    google.cloud.storage: google.cloud.storage
+  pip_repository:
+    name: gazelle_python_test
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/BUILD.in b/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/BUILD.in
new file mode 100644
index 0000000..5d1a19a
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_label_normalization snake_case
\ No newline at end of file
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/BUILD.out b/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/BUILD.out
new file mode 100644
index 0000000..77f180c
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/BUILD.out
@@ -0,0 +1,10 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_label_normalization snake_case
+
+py_library(
+    name = "test3_type_snake_case",
+    srcs = ["bar.py"],
+    visibility = ["//:__subpackages__"],
+    deps = ["@gazelle_python_test//google_cloud_storage"],
+)
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/bar.py b/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/bar.py
new file mode 100644
index 0000000..8b3839e
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/bar.py
@@ -0,0 +1,5 @@
+from google.cloud import storage
+
+
+def main():
+    b = dir(storage)
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/gazelle_python.yaml b/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/gazelle_python.yaml
new file mode 100644
index 0000000..5bfada4
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test3_type_snake_case/gazelle_python.yaml
@@ -0,0 +1,6 @@
+manifest:
+  modules_mapping:
+    # Weird google.cloud.storage here on purpose to make normalization apparent
+    google.cloud.storage: google.cloud.storage
+  pip_repository:
+    name: gazelle_python_test
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/BUILD.in
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/BUILD.in
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/BUILD.out b/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/BUILD.out
new file mode 100644
index 0000000..2297193
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/BUILD.out
@@ -0,0 +1,8 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+py_library(
+    name = "test4_unset_defaults_to_snake_case",
+    srcs = ["bar.py"],
+    visibility = ["//:__subpackages__"],
+    deps = ["@gazelle_python_test//google_cloud_storage"],
+)
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/bar.py b/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/bar.py
new file mode 100644
index 0000000..8b3839e
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/bar.py
@@ -0,0 +1,5 @@
+from google.cloud import storage
+
+
+def main():
+    b = dir(storage)
diff --git a/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/gazelle_python.yaml b/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/gazelle_python.yaml
new file mode 100644
index 0000000..5bfada4
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_label_normalization/test4_unset_defaults_to_snake_case/gazelle_python.yaml
@@ -0,0 +1,6 @@
+manifest:
+  modules_mapping:
+    # Weird google.cloud.storage here on purpose to make normalization apparent
+    google.cloud.storage: google.cloud.storage
+  pip_repository:
+    name: gazelle_python_test
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/README.md b/gazelle/python/testdata/directive_python_test_file_pattern/README.md
new file mode 100644
index 0000000..99142f7
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/README.md
@@ -0,0 +1,19 @@
+# Directive: `python_test_file_pattern`
+
+This test case asserts that the `# gazelle:python_test_file_pattern` directive
+works as intended.
+
+It consists of 6 cases:
+
+1.  When not set, both `*_test.py` and `test_*.py` files are mapped to the `py_test`
+    rule.
+2.  When set to a single value `*_test.py`, `test_*.py` files are mapped to the
+    `py_library` rule.
+3.  When set to a single value `test_*.py`, `*_test.py` files are mapped to the
+    `py_library` rule (ie: the inverse of case 2, but also with "file" generation
+    mode).
+4.  Arbitrary `glob` patterns are supported.
+5.  Multiple `glob` patterns are supported and that patterns don't technically
+    need to end in `.py` if they end in a wildcard (eg: we won't make a `py_test`
+    target for the extensionless file `test_foo`).
+6.  Sub-packages can override the directive's value.
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/WORKSPACE
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test.yaml
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test.yaml
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/BUILD.in
new file mode 100644
index 0000000..af2c2ce
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_generation_mode file
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/BUILD.out b/gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/BUILD.out
new file mode 100644
index 0000000..724b913
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/BUILD.out
@@ -0,0 +1,18 @@
+load("@rules_python//python:defs.bzl", "py_test")
+
+# gazelle:python_generation_mode file
+
+py_test(
+    name = "hello_test",
+    srcs = ["hello_test.py"],
+)
+
+py_test(
+    name = "test_goodbye",
+    srcs = ["test_goodbye.py"],
+)
+
+py_test(
+    name = "test_hello",
+    srcs = ["test_hello.py"],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/hello_test.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/hello_test.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/test_goodbye.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/test_goodbye.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/test_hello.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test1_unset/test_hello.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/BUILD.in
new file mode 100644
index 0000000..57becc6
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_test_file_pattern *_test.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/BUILD.out b/gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/BUILD.out
new file mode 100644
index 0000000..be5917b
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/BUILD.out
@@ -0,0 +1,17 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_test_file_pattern *_test.py
+
+py_library(
+    name = "test2_star_test_py",
+    srcs = [
+        "test_goodbye.py",
+        "test_hello.py",
+    ],
+    visibility = ["//:__subpackages__"],
+)
+
+py_test(
+    name = "hello_test",
+    srcs = ["hello_test.py"],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/hello_test.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/hello_test.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/test_goodbye.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/test_goodbye.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/test_hello.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test2_star_test_py/test_hello.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/BUILD.in
new file mode 100644
index 0000000..cc91589
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/BUILD.in
@@ -0,0 +1,2 @@
+# gazelle:python_generation_mode file
+# gazelle:python_test_file_pattern test_*.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/BUILD.out b/gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/BUILD.out
new file mode 100644
index 0000000..7ff0d5d
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/BUILD.out
@@ -0,0 +1,20 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_generation_mode file
+# gazelle:python_test_file_pattern test_*.py
+
+py_library(
+    name = "hello_test",
+    srcs = ["hello_test.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_test(
+    name = "test_goodbye",
+    srcs = ["test_goodbye.py"],
+)
+
+py_test(
+    name = "test_hello",
+    srcs = ["test_hello.py"],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/hello_test.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/hello_test.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/test_goodbye.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/test_goodbye.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/test_hello.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test3_test_star_py/test_hello.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/BUILD.in
new file mode 100644
index 0000000..8bffaa1
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/BUILD.in
@@ -0,0 +1,2 @@
+# gazelle:python_generation_mode file
+# gazelle:python_test_file_pattern foo_*_[A-Z]_test?.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/BUILD.out b/gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/BUILD.out
new file mode 100644
index 0000000..ff0034c
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/BUILD.out
@@ -0,0 +1,20 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_generation_mode file
+# gazelle:python_test_file_pattern foo_*_[A-Z]_test?.py
+
+py_library(
+    name = "foo_nota_test0_Z1",
+    srcs = ["foo_nota_test0_Z1.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_test(
+    name = "foo_helloworld_A_testA",
+    srcs = ["foo_helloworld_A_testA.py"],
+)
+
+py_test(
+    name = "foo_my_filename_B_test1",
+    srcs = ["foo_my_filename_B_test1.py"],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/foo_helloworld_A_testA.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/foo_helloworld_A_testA.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/foo_my_filename_B_test1.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/foo_my_filename_B_test1.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/foo_nota_test0_Z1.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test4_glob/foo_nota_test0_Z1.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/BUILD.in
new file mode 100644
index 0000000..a0e25aa
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/BUILD.in
@@ -0,0 +1,3 @@
+# gazelle:python_test_file_pattern *_hello.py,hello_*,unittest_*,*_unittest.py
+
+# Note that "foo_unittest.pyc" and "test_bar" files are ignored.
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/BUILD.out b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/BUILD.out
new file mode 100644
index 0000000..1dcf9a4
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/BUILD.out
@@ -0,0 +1,34 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_test_file_pattern *_hello.py,hello_*,unittest_*,*_unittest.py
+
+# Note that "foo_unittest.pyc" and "test_bar" files are ignored.
+
+py_library(
+    name = "test5_multiple_patterns",
+    srcs = [
+        "mylib.py",
+        "mylib2.py",
+    ],
+    visibility = ["//:__subpackages__"],
+)
+
+py_test(
+    name = "foo_hello",
+    srcs = ["foo_hello.py"],
+)
+
+py_test(
+    name = "foo_unittest",
+    srcs = ["foo_unittest.py"],
+)
+
+py_test(
+    name = "hello_foo",
+    srcs = ["hello_foo.py"],
+)
+
+py_test(
+    name = "unittest_foo",
+    srcs = ["unittest_foo.py"],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/foo_hello.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/foo_hello.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/foo_unittest.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/foo_unittest.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/foo_unittest.pyc
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/foo_unittest.pyc
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/hello_foo.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/hello_foo.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/mylib.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/mylib.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/mylib2.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/mylib2.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/test_bar
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/test_bar
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/unittest_foo.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test5_multiple_patterns/unittest_foo.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/BUILD.in
new file mode 100644
index 0000000..2acff9b
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/BUILD.in
@@ -0,0 +1,2 @@
+# gazelle:python_generation_mode file
+# gazelle:python_test_file_pattern *_unittest.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/BUILD.out b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/BUILD.out
new file mode 100644
index 0000000..7b9f557
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/BUILD.out
@@ -0,0 +1,15 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_generation_mode file
+# gazelle:python_test_file_pattern *_unittest.py
+
+py_library(
+    name = "not_a_test",
+    srcs = ["not_a_test.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_test(
+    name = "hello_unittest",
+    srcs = ["hello_unittest.py"],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/hello_unittest.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/hello_unittest.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/not_a_test.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/not_a_test.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/BUILD.in
new file mode 100644
index 0000000..cc91589
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/BUILD.in
@@ -0,0 +1,2 @@
+# gazelle:python_generation_mode file
+# gazelle:python_test_file_pattern test_*.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/BUILD.out b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/BUILD.out
new file mode 100644
index 0000000..49107ee
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/BUILD.out
@@ -0,0 +1,21 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_generation_mode file
+# gazelle:python_test_file_pattern test_*.py
+
+py_library(
+    name = "not_a_test",
+    srcs = ["not_a_test.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_library(
+    name = "not_a_unittest",
+    srcs = ["not_a_unittest.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_test(
+    name = "test_bar",
+    srcs = ["test_bar.py"],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/not_a_test.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/not_a_test.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/not_a_unittest.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/not_a_unittest.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/test_bar.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern/test6_nesting/subpkg/test_bar.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/BUILD.in
new file mode 100644
index 0000000..19ed002
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_test_file_pattern foo_*_[A-Z_test?.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/BUILD.out b/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/BUILD.out
new file mode 100644
index 0000000..19ed002
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/BUILD.out
@@ -0,0 +1 @@
+# gazelle:python_test_file_pattern foo_*_[A-Z_test?.py
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/README.md b/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/README.md
new file mode 100644
index 0000000..42ff635
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/README.md
@@ -0,0 +1,4 @@
+# Directive: `python_test_file_pattern`
+
+This test case asserts that the `# gazelle:python_test_file_pattern` directive
+fails with a nice message (rather than panicking) if the glob pattern is invalid.
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/WORKSPACE
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/test.yaml
similarity index 85%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/test.yaml
index bbdfb4c..6bae723 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/directive_python_test_file_pattern_bad_glob/test.yaml
@@ -12,3 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
+expect:
+  exit_code: 1
+  stderr: |
+    gazelle: invalid glob pattern 'foo_*_[A-Z_test?.py'
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern_no_value/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/BUILD.in
new file mode 100644
index 0000000..4e2b4cc
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_test_file_pattern
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern_no_value/BUILD.out b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/BUILD.out
new file mode 100644
index 0000000..4e2b4cc
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/BUILD.out
@@ -0,0 +1 @@
+# gazelle:python_test_file_pattern
diff --git a/gazelle/python/testdata/directive_python_test_file_pattern_no_value/README.md b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/README.md
new file mode 100644
index 0000000..2c38eb7
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/README.md
@@ -0,0 +1,8 @@
+# Directive: `python_test_file_pattern`
+
+This test case asserts that the `# gazelle:python_test_file_pattern` directive
+fails with a nice message if the directive has no value.
+
+See discussion in [PR #1819 (comment)][comment].
+
+[comment]: https://github.com/bazelbuild/rules_python/pull/1819#discussion_r1536906287
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern_no_value/WORKSPACE
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/foo_test.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_test_file_pattern_no_value/foo_test.py
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/test.yaml
similarity index 84%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/directive_python_test_file_pattern_no_value/test.yaml
index bbdfb4c..8eaa659 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/directive_python_test_file_pattern_no_value/test.yaml
@@ -12,3 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
+expect:
+  exit_code: 1
+  stderr: |
+    gazelle: directive 'python_test_file_pattern' requires a value
diff --git a/gazelle/python/testdata/directive_python_visibility/BUILD.in b/gazelle/python/testdata/directive_python_visibility/BUILD.in
new file mode 100644
index 0000000..c1ba9e4
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/BUILD.in
@@ -0,0 +1,4 @@
+# Directives can be added in any order. They will be ordered alphabetically
+# when added.
+# gazelle:python_visibility //tests:__pkg__
+# gazelle:python_visibility //bar:baz
diff --git a/gazelle/python/testdata/directive_python_visibility/BUILD.out b/gazelle/python/testdata/directive_python_visibility/BUILD.out
new file mode 100644
index 0000000..70715e8
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/BUILD.out
@@ -0,0 +1,16 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# Directives can be added in any order. They will be ordered alphabetically
+# when added.
+# gazelle:python_visibility //tests:__pkg__
+# gazelle:python_visibility //bar:baz
+
+py_library(
+    name = "directive_python_visibility",
+    srcs = ["foo.py"],
+    visibility = [
+        "//:__subpackages__",
+        "//bar:baz",
+        "//tests:__pkg__",
+    ],
+)
diff --git a/gazelle/python/testdata/directive_python_visibility/README.md b/gazelle/python/testdata/directive_python_visibility/README.md
new file mode 100644
index 0000000..51ab7ae
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/README.md
@@ -0,0 +1,4 @@
+# Directive: `python_visibility`
+
+This test case asserts that the `# gazelle:python_visibility` directive correctly
+appends multiple labels to the target's `visibility` parameter.
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/directive_python_visibility/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/directive_python_visibility/WORKSPACE
diff --git a/gazelle/python/testdata/directive_python_visibility/foo.py b/gazelle/python/testdata/directive_python_visibility/foo.py
new file mode 100644
index 0000000..98907eb
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/foo.py
@@ -0,0 +1,2 @@
+def func():
+    print("library_func")
diff --git a/gazelle/python/testdata/directive_python_visibility/subdir/BUILD.in b/gazelle/python/testdata/directive_python_visibility/subdir/BUILD.in
new file mode 100644
index 0000000..5193e69
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/subdir/BUILD.in
@@ -0,0 +1,4 @@
+# python_visibilty directive applies to all child bazel packages.
+# Thus, the generated file for this package will also have vis for
+# //tests:__pkg__ and //bar:baz in addition to the default.
+# gazelle:python_visibility //tests:__subpackages__
diff --git a/gazelle/python/testdata/directive_python_visibility/subdir/BUILD.out b/gazelle/python/testdata/directive_python_visibility/subdir/BUILD.out
new file mode 100644
index 0000000..722c840
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/subdir/BUILD.out
@@ -0,0 +1,20 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# python_visibilty directive applies to all child bazel packages.
+# Thus, the generated file for this package will also have vis for
+# //tests:__pkg__ and //bar:baz in addition to the default.
+# gazelle:python_visibility //tests:__subpackages__
+
+py_library(
+    name = "subdir",
+    srcs = [
+        "__init__.py",
+        "bar.py",
+    ],
+    visibility = [
+        "//:__subpackages__",
+        "//bar:baz",
+        "//tests:__pkg__",
+        "//tests:__subpackages__",
+    ],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_visibility/subdir/__init__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_visibility/subdir/__init__.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_visibility/subdir/bar.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_visibility/subdir/bar.py
diff --git a/gazelle/python/testdata/directive_python_visibility/subdir_python_root/BUILD.in b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/BUILD.in
new file mode 100644
index 0000000..6948b47
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/BUILD.in
@@ -0,0 +1 @@
+# gazelle:python_root
diff --git a/gazelle/python/testdata/directive_python_visibility/subdir_python_root/BUILD.out b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/BUILD.out
new file mode 100644
index 0000000..6948b47
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/BUILD.out
@@ -0,0 +1 @@
+# gazelle:python_root
diff --git a/gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/BUILD.in b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/BUILD.in
new file mode 100644
index 0000000..41ff631
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/BUILD.in
@@ -0,0 +1,6 @@
+# The default visibility is "//$python_root$:__subpackages" so the generated
+# target will also have "//subdir_python_root:__subpackages__" in the visibility
+# attribute.
+#
+# gazelle:python_visibility //$python_root$/anywhere:__pkg__
+# gazelle:python_visibility //$python_root$/and/also:here
diff --git a/gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/BUILD.out b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/BUILD.out
new file mode 100644
index 0000000..25ec8de
--- /dev/null
+++ b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/BUILD.out
@@ -0,0 +1,24 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# The default visibility is "//$python_root$:__subpackages" so the generated
+# target will also have "//subdir_python_root:__subpackages__" in the visibility
+# attribute.
+#
+# gazelle:python_visibility //$python_root$/anywhere:__pkg__
+# gazelle:python_visibility //$python_root$/and/also:here
+
+py_library(
+    name = "subdir",
+    srcs = [
+        "__init__.py",
+        "baz.py",
+    ],
+    imports = [".."],
+    visibility = [
+        "//bar:baz",
+        "//subdir_python_root:__subpackages__",
+        "//subdir_python_root/and/also:here",
+        "//subdir_python_root/anywhere:__pkg__",
+        "//tests:__pkg__",
+    ],
+)
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/__init__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/__init__.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/baz.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/directive_python_visibility/subdir_python_root/subdir/baz.py
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/directive_python_visibility/test.yaml
similarity index 94%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/directive_python_visibility/test.yaml
index bbdfb4c..2410223 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/directive_python_visibility/test.yaml
@@ -12,3 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
+expect:
+  exit_code: 0
diff --git a/gazelle/python/testdata/dont_rename_target/__init__.py b/gazelle/python/testdata/dont_rename_target/__init__.py
index bbdfb4c..4101095 100644
--- a/gazelle/python/testdata/dont_rename_target/__init__.py
+++ b/gazelle/python/testdata/dont_rename_target/__init__.py
@@ -11,4 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
diff --git a/gazelle/python/testdata/first_party_file_and_directory_modules/baz.py b/gazelle/python/testdata/first_party_file_and_directory_modules/baz.py
index e03a9ec..8f8820d 100644
--- a/gazelle/python/testdata/first_party_file_and_directory_modules/baz.py
+++ b/gazelle/python/testdata/first_party_file_and_directory_modules/baz.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def baz():
     return "baz from baz.py"
diff --git a/gazelle/python/testdata/first_party_file_and_directory_modules/foo.py b/gazelle/python/testdata/first_party_file_and_directory_modules/foo.py
index 04474d8..be6d7dd 100644
--- a/gazelle/python/testdata/first_party_file_and_directory_modules/foo.py
+++ b/gazelle/python/testdata/first_party_file_and_directory_modules/foo.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def foo():
     print("foo")
diff --git a/gazelle/python/testdata/first_party_file_and_directory_modules/one/two.py b/gazelle/python/testdata/first_party_file_and_directory_modules/one/two.py
index 94cca3d..d1909b1 100644
--- a/gazelle/python/testdata/first_party_file_and_directory_modules/one/two.py
+++ b/gazelle/python/testdata/first_party_file_and_directory_modules/one/two.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def two():
     return "two"
diff --git a/gazelle/python/testdata/first_party_file_and_directory_modules/undiscoverable/package1/subpackage1/module1.py b/gazelle/python/testdata/first_party_file_and_directory_modules/undiscoverable/package1/subpackage1/module1.py
index 76c7227..c5ccb87 100644
--- a/gazelle/python/testdata/first_party_file_and_directory_modules/undiscoverable/package1/subpackage1/module1.py
+++ b/gazelle/python/testdata/first_party_file_and_directory_modules/undiscoverable/package1/subpackage1/module1.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def find_me():
     return "found"
diff --git a/gazelle/python/testdata/generated_test_entrypoint/foo.py b/gazelle/python/testdata/generated_test_entrypoint/foo.py
index 932de45..3f049df 100644
--- a/gazelle/python/testdata/generated_test_entrypoint/foo.py
+++ b/gazelle/python/testdata/generated_test_entrypoint/foo.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def foo():
     return "foo"
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/invalid_annotation_exclude/BUILD.in
similarity index 100%
rename from gazelle/python/testdata/invalid_annotation/BUILD.in
rename to gazelle/python/testdata/invalid_annotation_exclude/BUILD.in
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.out b/gazelle/python/testdata/invalid_annotation_exclude/BUILD.out
similarity index 100%
rename from gazelle/python/testdata/invalid_annotation/BUILD.out
rename to gazelle/python/testdata/invalid_annotation_exclude/BUILD.out
diff --git a/gazelle/python/testdata/invalid_annotation/README.md b/gazelle/python/testdata/invalid_annotation_exclude/README.md
similarity index 100%
rename from gazelle/python/testdata/invalid_annotation/README.md
rename to gazelle/python/testdata/invalid_annotation_exclude/README.md
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/invalid_annotation_exclude/WORKSPACE
similarity index 100%
rename from gazelle/python/testdata/invalid_annotation/WORKSPACE
rename to gazelle/python/testdata/invalid_annotation_exclude/WORKSPACE
diff --git a/gazelle/python/testdata/invalid_annotation/__init__.py b/gazelle/python/testdata/invalid_annotation_exclude/__init__.py
similarity index 100%
rename from gazelle/python/testdata/invalid_annotation/__init__.py
rename to gazelle/python/testdata/invalid_annotation_exclude/__init__.py
diff --git a/gazelle/python/testdata/invalid_annotation/test.yaml b/gazelle/python/testdata/invalid_annotation_exclude/test.yaml
similarity index 100%
rename from gazelle/python/testdata/invalid_annotation/test.yaml
rename to gazelle/python/testdata/invalid_annotation_exclude/test.yaml
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/invalid_annotation_include_dep/BUILD.in
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/invalid_annotation_include_dep/BUILD.in
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.out b/gazelle/python/testdata/invalid_annotation_include_dep/BUILD.out
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.out
copy to gazelle/python/testdata/invalid_annotation_include_dep/BUILD.out
diff --git a/gazelle/python/testdata/invalid_annotation_include_dep/README.md b/gazelle/python/testdata/invalid_annotation_include_dep/README.md
new file mode 100644
index 0000000..2f8e024
--- /dev/null
+++ b/gazelle/python/testdata/invalid_annotation_include_dep/README.md
@@ -0,0 +1,3 @@
+# Invalid  annotation
+This test case asserts that the parse step fails as expected due to invalid annotation format of
+the `include_dep` annotation.
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/invalid_annotation_include_dep/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/invalid_annotation_include_dep/WORKSPACE
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/invalid_annotation_include_dep/__init__.py
similarity index 87%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/invalid_annotation_include_dep/__init__.py
index bbdfb4c..61f4c76 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/invalid_annotation_include_dep/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +12,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# gazelle:include_dep
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/invalid_annotation_include_dep/test.yaml
similarity index 73%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/invalid_annotation_include_dep/test.yaml
index bbdfb4c..f2159a6 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/invalid_annotation_include_dep/test.yaml
@@ -1,4 +1,4 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
+expect:
+  exit_code: 1
+  stderr: |
+    gazelle: ERROR: failed to parse annotations: `# gazelle:include_dep` requires a value
diff --git a/gazelle/python/testdata/monorepo/coarse_grained/BUILD.out b/gazelle/python/testdata/monorepo/coarse_grained/BUILD.out
index 3a33111..af01460 100644
--- a/gazelle/python/testdata/monorepo/coarse_grained/BUILD.out
+++ b/gazelle/python/testdata/monorepo/coarse_grained/BUILD.out
@@ -25,5 +25,4 @@
         "bar/bar_test.py",
         "foo/bar/bar_test.py",
     ],
-    main = "__test__.py",
 )
diff --git a/gazelle/python/testdata/monorepo/coarse_grained/_boundary/__init__.py b/gazelle/python/testdata/monorepo/coarse_grained/_boundary/__init__.py
index bbdfb4c..4101095 100644
--- a/gazelle/python/testdata/monorepo/coarse_grained/_boundary/__init__.py
+++ b/gazelle/python/testdata/monorepo/coarse_grained/_boundary/__init__.py
@@ -11,4 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
diff --git a/gazelle/python/testdata/naming_convention/__main__.py b/gazelle/python/testdata/naming_convention/__main__.py
index a3afc79..9795589 100644
--- a/gazelle/python/testdata/naming_convention/__main__.py
+++ b/gazelle/python/testdata/naming_convention/__main__.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # For test purposes only.
-import __init__
\ No newline at end of file
+import __init__
diff --git a/gazelle/python/testdata/naming_convention/__test__.py b/gazelle/python/testdata/naming_convention/__test__.py
index a3afc79..9795589 100644
--- a/gazelle/python/testdata/naming_convention/__test__.py
+++ b/gazelle/python/testdata/naming_convention/__test__.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # For test purposes only.
-import __init__
\ No newline at end of file
+import __init__
diff --git a/gazelle/python/testdata/naming_convention/dont_rename/__main__.py b/gazelle/python/testdata/naming_convention/dont_rename/__main__.py
index a3afc79..9795589 100644
--- a/gazelle/python/testdata/naming_convention/dont_rename/__main__.py
+++ b/gazelle/python/testdata/naming_convention/dont_rename/__main__.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # For test purposes only.
-import __init__
\ No newline at end of file
+import __init__
diff --git a/gazelle/python/testdata/naming_convention/dont_rename/__test__.py b/gazelle/python/testdata/naming_convention/dont_rename/__test__.py
index a3afc79..9795589 100644
--- a/gazelle/python/testdata/naming_convention/dont_rename/__test__.py
+++ b/gazelle/python/testdata/naming_convention/dont_rename/__test__.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # For test purposes only.
-import __init__
\ No newline at end of file
+import __init__
diff --git a/gazelle/python/testdata/naming_convention/resolve_conflict/__main__.py b/gazelle/python/testdata/naming_convention/resolve_conflict/__main__.py
index a3afc79..9795589 100644
--- a/gazelle/python/testdata/naming_convention/resolve_conflict/__main__.py
+++ b/gazelle/python/testdata/naming_convention/resolve_conflict/__main__.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # For test purposes only.
-import __init__
\ No newline at end of file
+import __init__
diff --git a/gazelle/python/testdata/naming_convention/resolve_conflict/__test__.py b/gazelle/python/testdata/naming_convention/resolve_conflict/__test__.py
index a3afc79..9795589 100644
--- a/gazelle/python/testdata/naming_convention/resolve_conflict/__test__.py
+++ b/gazelle/python/testdata/naming_convention/resolve_conflict/__test__.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # For test purposes only.
-import __init__
\ No newline at end of file
+import __init__
diff --git a/gazelle/python/testdata/per_file_subdirs/bar/foo.py b/gazelle/python/testdata/per_file_subdirs/bar/foo.py
index 59eb08c..506f028 100644
--- a/gazelle/python/testdata/per_file_subdirs/bar/foo.py
+++ b/gazelle/python/testdata/per_file_subdirs/bar/foo.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def func():
     pass
diff --git a/gazelle/python/testdata/per_package_test_target_without_entry_point/BUILD.in b/gazelle/python/testdata/per_package_test_target_without_entry_point/BUILD.in
new file mode 100644
index 0000000..27120f3
--- /dev/null
+++ b/gazelle/python/testdata/per_package_test_target_without_entry_point/BUILD.in
@@ -0,0 +1,2 @@
+# gazelle:python_generation_mode package
+# gazelle:python_generation_mode_per_package_require_test_entry_point false
\ No newline at end of file
diff --git a/gazelle/python/testdata/per_package_test_target_without_entry_point/BUILD.out b/gazelle/python/testdata/per_package_test_target_without_entry_point/BUILD.out
new file mode 100644
index 0000000..c4ec331
--- /dev/null
+++ b/gazelle/python/testdata/per_package_test_target_without_entry_point/BUILD.out
@@ -0,0 +1,18 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_generation_mode package
+# gazelle:python_generation_mode_per_package_require_test_entry_point false
+
+py_library(
+    name = "per_package_test_target_without_entry_point",
+    srcs = ["__init__.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_test(
+    name = "per_package_test_target_without_entry_point_test",
+    srcs = [
+        "bar_test.py",
+        "foo_test.py",
+    ],
+)
diff --git a/gazelle/python/testdata/per_package_test_target_without_entry_point/README.md b/gazelle/python/testdata/per_package_test_target_without_entry_point/README.md
new file mode 100644
index 0000000..8decb00
--- /dev/null
+++ b/gazelle/python/testdata/per_package_test_target_without_entry_point/README.md
@@ -0,0 +1,3 @@
+# One test target per package without entry point
+
+This test case asserts that one test target is generated per package without entry point when `gazelle:python_generation_mode_per_package_require_test_entry_point false`
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/per_package_test_target_without_entry_point/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/per_package_test_target_without_entry_point/WORKSPACE
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/per_package_test_target_without_entry_point/__init__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/per_package_test_target_without_entry_point/__init__.py
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/per_package_test_target_without_entry_point/bar_test.py
similarity index 80%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/per_package_test_target_without_entry_point/bar_test.py
index bbdfb4c..9948f1c 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/per_package_test_target_without_entry_point/bar_test.py
@@ -12,3 +12,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import unittest
+
+
+class BarTest(unittest.TestCase):
+    def test_foo(self):
+        pass
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/per_package_test_target_without_entry_point/foo_test.py
similarity index 80%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/per_package_test_target_without_entry_point/foo_test.py
index bbdfb4c..a128adf 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/per_package_test_target_without_entry_point/foo_test.py
@@ -12,3 +12,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import unittest
+
+
+class FooTest(unittest.TestCase):
+    def test_foo(self):
+        pass
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/per_package_test_target_without_entry_point/test.yaml
similarity index 94%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/per_package_test_target_without_entry_point/test.yaml
index bbdfb4c..2410223 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/per_package_test_target_without_entry_point/test.yaml
@@ -12,3 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
+expect:
+  exit_code: 0
diff --git a/gazelle/python/testdata/project_generation_mode/BUILD.in b/gazelle/python/testdata/project_generation_mode/BUILD.in
new file mode 100644
index 0000000..130a625
--- /dev/null
+++ b/gazelle/python/testdata/project_generation_mode/BUILD.in
@@ -0,0 +1,2 @@
+# gazelle:python_extension enabled
+# gazelle:python_generation_mode project
diff --git a/gazelle/python/testdata/project_generation_mode/BUILD.out b/gazelle/python/testdata/project_generation_mode/BUILD.out
new file mode 100644
index 0000000..1f30b6d
--- /dev/null
+++ b/gazelle/python/testdata/project_generation_mode/BUILD.out
@@ -0,0 +1,14 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+# gazelle:python_extension enabled
+# gazelle:python_generation_mode project
+
+py_library(
+    name = "project_generation_mode",
+    srcs = [
+        "__init__.py",
+        "bar/bar.py",
+        "foo/foo.py",
+    ],
+    visibility = ["//:__subpackages__"],
+)
diff --git a/gazelle/python/testdata/project_generation_mode/README.md b/gazelle/python/testdata/project_generation_mode/README.md
new file mode 100644
index 0000000..6d8f138
--- /dev/null
+++ b/gazelle/python/testdata/project_generation_mode/README.md
@@ -0,0 +1,3 @@
+# Project generation mode
+
+Simple example using `gazelle:python_generation_mode project` in a project with no tests.
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/project_generation_mode/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/project_generation_mode/WORKSPACE
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/project_generation_mode/__init__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/project_generation_mode/__init__.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/project_generation_mode/bar/bar.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/project_generation_mode/bar/bar.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/project_generation_mode/foo/foo.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/project_generation_mode/foo/foo.py
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/project_generation_mode/test.yaml
similarity index 98%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/project_generation_mode/test.yaml
index bbdfb4c..fcea777 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/project_generation_mode/test.yaml
@@ -12,3 +12,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
diff --git a/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/BUILD.in b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/BUILD.in
new file mode 100644
index 0000000..130a625
--- /dev/null
+++ b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/BUILD.in
@@ -0,0 +1,2 @@
+# gazelle:python_extension enabled
+# gazelle:python_generation_mode project
diff --git a/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/BUILD.out b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/BUILD.out
new file mode 100644
index 0000000..05cf353
--- /dev/null
+++ b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/BUILD.out
@@ -0,0 +1,19 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_extension enabled
+# gazelle:python_generation_mode project
+
+py_library(
+    name = "project_generation_mode_with_test_entrypoint",
+    srcs = ["__init__.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_test(
+    name = "project_generation_mode_with_test_entrypoint_test",
+    srcs = [
+        "__test__.py",
+        "foo/foo_test.py",
+    ],
+    main = "__test__.py",
+)
diff --git a/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/README.md b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/README.md
new file mode 100644
index 0000000..8db5728
--- /dev/null
+++ b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/README.md
@@ -0,0 +1,3 @@
+# Project generation mode with test entrypoint
+
+Example using `gazelle:python_generation_mode project` in a project with tests that use an explicit `__test__.py` entrypoint.
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/project_generation_mode_with_test_entrypoint/WORKSPACE
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/__init__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/project_generation_mode_with_test_entrypoint/__init__.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/__test__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/project_generation_mode_with_test_entrypoint/__test__.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/foo/foo_test.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/project_generation_mode_with_test_entrypoint/foo/foo_test.py
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/test.yaml
similarity index 98%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/project_generation_mode_with_test_entrypoint/test.yaml
index bbdfb4c..fcea777 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/project_generation_mode_with_test_entrypoint/test.yaml
@@ -12,3 +12,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
diff --git a/gazelle/python/testdata/project_generation_mode_with_tests/BUILD.in b/gazelle/python/testdata/project_generation_mode_with_tests/BUILD.in
new file mode 100644
index 0000000..130a625
--- /dev/null
+++ b/gazelle/python/testdata/project_generation_mode_with_tests/BUILD.in
@@ -0,0 +1,2 @@
+# gazelle:python_extension enabled
+# gazelle:python_generation_mode project
diff --git a/gazelle/python/testdata/project_generation_mode_with_tests/BUILD.out b/gazelle/python/testdata/project_generation_mode_with_tests/BUILD.out
new file mode 100644
index 0000000..8756978
--- /dev/null
+++ b/gazelle/python/testdata/project_generation_mode_with_tests/BUILD.out
@@ -0,0 +1,15 @@
+load("@rules_python//python:defs.bzl", "py_library", "py_test")
+
+# gazelle:python_extension enabled
+# gazelle:python_generation_mode project
+
+py_library(
+    name = "project_generation_mode_with_tests",
+    srcs = ["__init__.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_test(
+    name = "project_generation_mode_with_tests_test",
+    srcs = ["foo/foo_test.py"],
+)
diff --git a/gazelle/python/testdata/project_generation_mode_with_tests/README.md b/gazelle/python/testdata/project_generation_mode_with_tests/README.md
new file mode 100644
index 0000000..4a5f012
--- /dev/null
+++ b/gazelle/python/testdata/project_generation_mode_with_tests/README.md
@@ -0,0 +1,7 @@
+# Project generation mode with tests
+
+Example using `gazelle:python_generation_mode project` in a project with tests, but no `__test__.py` entrypoint.
+
+Note that, in this mode, the `py_test` rule will have no `main` set, which will fail to run with the standard
+`py_test` rule. However, this can be used in conjunction with `gazelle:map_kind` to use some other implementation
+of `py_test` that is able to handle this sitation (such as `rules_python_pytest`).
\ No newline at end of file
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/project_generation_mode_with_tests/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/project_generation_mode_with_tests/WORKSPACE
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/project_generation_mode_with_tests/__init__.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/project_generation_mode_with_tests/__init__.py
diff --git a/gazelle/python/testdata/invalid_annotation/BUILD.in b/gazelle/python/testdata/project_generation_mode_with_tests/foo/foo_test.py
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/BUILD.in
copy to gazelle/python/testdata/project_generation_mode_with_tests/foo/foo_test.py
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/project_generation_mode_with_tests/test.yaml
similarity index 98%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/project_generation_mode_with_tests/test.yaml
index bbdfb4c..fcea777 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/project_generation_mode_with_tests/test.yaml
@@ -12,3 +12,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
diff --git a/gazelle/python/testdata/python_target_with_test_in_name/real_test.py b/gazelle/python/testdata/python_target_with_test_in_name/real_test.py
index e390866..b25d5bd 100644
--- a/gazelle/python/testdata/python_target_with_test_in_name/real_test.py
+++ b/gazelle/python/testdata/python_target_with_test_in_name/real_test.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import boto3
 import __init__
+import boto3
 
 _ = boto3
diff --git a/gazelle/python/testdata/python_target_with_test_in_name/test_reality.py b/gazelle/python/testdata/python_target_with_test_in_name/test_reality.py
index a3afc79..9795589 100644
--- a/gazelle/python/testdata/python_target_with_test_in_name/test_reality.py
+++ b/gazelle/python/testdata/python_target_with_test_in_name/test_reality.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # For test purposes only.
-import __init__
\ No newline at end of file
+import __init__
diff --git a/gazelle/python/testdata/relative_imports/package1/module2.py b/gazelle/python/testdata/relative_imports/package1/module2.py
index f8893b2..0cbc5f0 100644
--- a/gazelle/python/testdata/relative_imports/package1/module2.py
+++ b/gazelle/python/testdata/relative_imports/package1/module2.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def function2():
     return "function2"
diff --git a/gazelle/python/testdata/relative_imports/package2/__init__.py b/gazelle/python/testdata/relative_imports/package2/__init__.py
index 0f59568..fcaa330 100644
--- a/gazelle/python/testdata/relative_imports/package2/__init__.py
+++ b/gazelle/python/testdata/relative_imports/package2/__init__.py
@@ -12,6 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 class Class1:
     def method1(self):
         return "method1"
diff --git a/gazelle/python/testdata/relative_imports/package2/module3.py b/gazelle/python/testdata/relative_imports/package2/module3.py
index 478dea9..29bb571 100644
--- a/gazelle/python/testdata/relative_imports/package2/module3.py
+++ b/gazelle/python/testdata/relative_imports/package2/module3.py
@@ -12,10 +12,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import resolved_package
+
 from . import Class1
 from .subpackage1.module5 import function5
 
-import resolved_package
 
 def function3():
     c1 = Class1()
diff --git a/gazelle/python/testdata/relative_imports/package2/module4.py b/gazelle/python/testdata/relative_imports/package2/module4.py
index b7509dc..28cdc13 100644
--- a/gazelle/python/testdata/relative_imports/package2/module4.py
+++ b/gazelle/python/testdata/relative_imports/package2/module4.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def function4():
     return "function4"
diff --git a/gazelle/python/testdata/remove_invalid_library/BUILD.in b/gazelle/python/testdata/remove_invalid_library/BUILD.in
new file mode 100644
index 0000000..3f24c8d
--- /dev/null
+++ b/gazelle/python/testdata/remove_invalid_library/BUILD.in
@@ -0,0 +1,16 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+py_library(
+    name = "remove_invalid_library",
+    srcs = ["__init__.py"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_library(
+    name = "deps_with_no_srcs_library",
+    deps = [
+        "//:remove_invalid_library",
+        "@pypi//bar",
+        "@pypi//foo",
+    ],
+)
diff --git a/gazelle/python/testdata/remove_invalid_library/BUILD.out b/gazelle/python/testdata/remove_invalid_library/BUILD.out
new file mode 100644
index 0000000..4a6fffa
--- /dev/null
+++ b/gazelle/python/testdata/remove_invalid_library/BUILD.out
@@ -0,0 +1,10 @@
+load("@rules_python//python:defs.bzl", "py_library")
+
+py_library(
+    name = "deps_with_no_srcs_library",
+    deps = [
+        "//:remove_invalid_library",
+        "@pypi//bar",
+        "@pypi//foo",
+    ],
+)
diff --git a/gazelle/python/testdata/remove_invalid_library/README.md b/gazelle/python/testdata/remove_invalid_library/README.md
new file mode 100644
index 0000000..7a35167
--- /dev/null
+++ b/gazelle/python/testdata/remove_invalid_library/README.md
@@ -0,0 +1,3 @@
+# Remove invalid
+
+This test case asserts that `py_library` should be deleted if invalid.
diff --git a/gazelle/python/testdata/invalid_annotation/WORKSPACE b/gazelle/python/testdata/remove_invalid_library/WORKSPACE
similarity index 100%
copy from gazelle/python/testdata/invalid_annotation/WORKSPACE
copy to gazelle/python/testdata/remove_invalid_library/WORKSPACE
diff --git a/gazelle/python/testdata/remove_invalid_library/others/BUILD.in b/gazelle/python/testdata/remove_invalid_library/others/BUILD.in
new file mode 100644
index 0000000..5578327
--- /dev/null
+++ b/gazelle/python/testdata/remove_invalid_library/others/BUILD.in
@@ -0,0 +1,5 @@
+genrule(
+    name = "others",  # same to directory name
+    outs = ["data.txt"],
+    cmd = "echo foo bar baz > $@",
+)
\ No newline at end of file
diff --git a/gazelle/python/testdata/remove_invalid_library/others/BUILD.out b/gazelle/python/testdata/remove_invalid_library/others/BUILD.out
new file mode 100644
index 0000000..5578327
--- /dev/null
+++ b/gazelle/python/testdata/remove_invalid_library/others/BUILD.out
@@ -0,0 +1,5 @@
+genrule(
+    name = "others",  # same to directory name
+    outs = ["data.txt"],
+    cmd = "echo foo bar baz > $@",
+)
\ No newline at end of file
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/gazelle/python/testdata/remove_invalid_library/test.yaml
similarity index 98%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to gazelle/python/testdata/remove_invalid_library/test.yaml
index bbdfb4c..fcea777 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/gazelle/python/testdata/remove_invalid_library/test.yaml
@@ -12,3 +12,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+---
diff --git a/gazelle/python/testdata/respect_kind_mapping/foo.py b/gazelle/python/testdata/respect_kind_mapping/foo.py
index 932de45..3f049df 100644
--- a/gazelle/python/testdata/respect_kind_mapping/foo.py
+++ b/gazelle/python/testdata/respect_kind_mapping/foo.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def foo():
     return "foo"
diff --git a/gazelle/python/testdata/sibling_imports/pkg/b.py b/gazelle/python/testdata/sibling_imports/pkg/b.py
index 7095bdc..d04d423 100644
--- a/gazelle/python/testdata/sibling_imports/pkg/b.py
+++ b/gazelle/python/testdata/sibling_imports/pkg/b.py
@@ -1,2 +1,2 @@
 def run():
-    pass
\ No newline at end of file
+    pass
diff --git a/gazelle/python/testdata/sibling_imports/pkg/unit_test.py b/gazelle/python/testdata/sibling_imports/pkg/unit_test.py
index a3218e2..f42878a 100644
--- a/gazelle/python/testdata/sibling_imports/pkg/unit_test.py
+++ b/gazelle/python/testdata/sibling_imports/pkg/unit_test.py
@@ -1,3 +1,3 @@
 import a
+import test_util
 from b import run
-import test_util
\ No newline at end of file
diff --git a/gazelle/python/testdata/simple_test/foo.py b/gazelle/python/testdata/simple_test/foo.py
index 932de45..3f049df 100644
--- a/gazelle/python/testdata/simple_test/foo.py
+++ b/gazelle/python/testdata/simple_test/foo.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def foo():
     return "foo"
diff --git a/gazelle/python/testdata/simple_test_with_conftest/conftest.py b/gazelle/python/testdata/simple_test_with_conftest/conftest.py
index bbdfb4c..4101095 100644
--- a/gazelle/python/testdata/simple_test_with_conftest/conftest.py
+++ b/gazelle/python/testdata/simple_test_with_conftest/conftest.py
@@ -11,4 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
diff --git a/gazelle/python/testdata/simple_test_with_conftest/foo.py b/gazelle/python/testdata/simple_test_with_conftest/foo.py
index 932de45..3f049df 100644
--- a/gazelle/python/testdata/simple_test_with_conftest/foo.py
+++ b/gazelle/python/testdata/simple_test_with_conftest/foo.py
@@ -12,5 +12,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+
 def foo():
     return "foo"
diff --git a/gazelle/python/testdata/subdir_sources/foo/has_main/__main__.py b/gazelle/python/testdata/subdir_sources/foo/has_main/__main__.py
index bd0fe61..78d2348 100644
--- a/gazelle/python/testdata/subdir_sources/foo/has_main/__main__.py
+++ b/gazelle/python/testdata/subdir_sources/foo/has_main/__main__.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # For test purposes only.
-import foo.has_main.python.my_module
\ No newline at end of file
+import foo.has_main.python.my_module
diff --git a/gazelle/python/testdata/subdir_sources/foo/has_test/__test__.py b/gazelle/python/testdata/subdir_sources/foo/has_test/__test__.py
index 3c9ed1a..ad77cb7 100644
--- a/gazelle/python/testdata/subdir_sources/foo/has_test/__test__.py
+++ b/gazelle/python/testdata/subdir_sources/foo/has_test/__test__.py
@@ -13,4 +13,4 @@
 # limitations under the License.
 
 # For test purposes only.
-import foo.has_test.python.my_module
\ No newline at end of file
+import foo.has_test.python.my_module
diff --git a/gazelle/python/testdata/with_third_party_requirements_from_imports/gazelle_python.yaml b/gazelle/python/testdata/with_third_party_requirements_from_imports/gazelle_python.yaml
index 8b5694b..d3ce6ee 100644
--- a/gazelle/python/testdata/with_third_party_requirements_from_imports/gazelle_python.yaml
+++ b/gazelle/python/testdata/with_third_party_requirements_from_imports/gazelle_python.yaml
@@ -15,1511 +15,49 @@
 manifest:
   modules_mapping:
     cachetools: cachetools
-    cachetools.__init__: cachetools
-    cachetools.func: cachetools
-    cachetools.keys: cachetools
     certifi: certifi
-    certifi.__init__: certifi
-    certifi.__main__: certifi
-    certifi.core: certifi
     charset_normalizer: charset_normalizer
-    charset_normalizer.__init__: charset_normalizer
-    charset_normalizer.api: charset_normalizer
-    charset_normalizer.assets: charset_normalizer
-    charset_normalizer.assets.__init__: charset_normalizer
-    charset_normalizer.cd: charset_normalizer
-    charset_normalizer.cli: charset_normalizer
-    charset_normalizer.cli.__init__: charset_normalizer
-    charset_normalizer.cli.normalizer: charset_normalizer
-    charset_normalizer.constant: charset_normalizer
-    charset_normalizer.legacy: charset_normalizer
-    charset_normalizer.md: charset_normalizer
-    charset_normalizer.models: charset_normalizer
-    charset_normalizer.utils: charset_normalizer
-    charset_normalizer.version: charset_normalizer
     dateutil: python_dateutil
-    dateutil.__init__: python_dateutil
-    dateutil._common: python_dateutil
-    dateutil._version: python_dateutil
-    dateutil.easter: python_dateutil
-    dateutil.parser: python_dateutil
-    dateutil.parser.__init__: python_dateutil
-    dateutil.parser._parser: python_dateutil
-    dateutil.parser.isoparser: python_dateutil
-    dateutil.relativedelta: python_dateutil
-    dateutil.rrule: python_dateutil
-    dateutil.tz: python_dateutil
-    dateutil.tz.__init__: python_dateutil
-    dateutil.tz._common: python_dateutil
-    dateutil.tz._factories: python_dateutil
-    dateutil.tz.tz: python_dateutil
-    dateutil.tz.win: python_dateutil
-    dateutil.tzwin: python_dateutil
-    dateutil.utils: python_dateutil
-    dateutil.zoneinfo: python_dateutil
-    dateutil.zoneinfo.__init__: python_dateutil
-    dateutil.zoneinfo.rebuild: python_dateutil
     docs.conf: google_cloud_resource_manager
-    google._async_resumable_media: google_resumable_media
-    google._async_resumable_media.__init__: google_resumable_media
-    google._async_resumable_media._download: google_resumable_media
-    google._async_resumable_media._helpers: google_resumable_media
-    google._async_resumable_media._upload: google_resumable_media
-    google._async_resumable_media.requests: google_resumable_media
-    google._async_resumable_media.requests.__init__: google_resumable_media
-    google._async_resumable_media.requests._request_helpers: google_resumable_media
-    google._async_resumable_media.requests.download: google_resumable_media
-    google._async_resumable_media.requests.upload: google_resumable_media
     google.api: googleapis_common_protos
-    google.api.__init__: googleapis_common_protos
-    google.api.annotations_pb2: googleapis_common_protos
-    google.api.auth_pb2: googleapis_common_protos
-    google.api.backend_pb2: googleapis_common_protos
-    google.api.billing_pb2: googleapis_common_protos
-    google.api.client_pb2: googleapis_common_protos
-    google.api.config_change_pb2: googleapis_common_protos
-    google.api.consumer_pb2: googleapis_common_protos
-    google.api.context_pb2: googleapis_common_protos
-    google.api.control_pb2: googleapis_common_protos
-    google.api.distribution_pb2: googleapis_common_protos
-    google.api.documentation_pb2: googleapis_common_protos
-    google.api.endpoint_pb2: googleapis_common_protos
-    google.api.error_reason_pb2: googleapis_common_protos
-    google.api.field_behavior_pb2: googleapis_common_protos
-    google.api.http_pb2: googleapis_common_protos
-    google.api.httpbody_pb2: googleapis_common_protos
-    google.api.label_pb2: googleapis_common_protos
-    google.api.launch_stage_pb2: googleapis_common_protos
-    google.api.log_pb2: googleapis_common_protos
-    google.api.logging_pb2: googleapis_common_protos
-    google.api.metric_pb2: googleapis_common_protos
-    google.api.monitored_resource_pb2: googleapis_common_protos
-    google.api.monitoring_pb2: googleapis_common_protos
-    google.api.quota_pb2: googleapis_common_protos
-    google.api.resource_pb2: googleapis_common_protos
-    google.api.routing_pb2: googleapis_common_protos
-    google.api.service_pb2: googleapis_common_protos
-    google.api.source_info_pb2: googleapis_common_protos
-    google.api.system_parameter_pb2: googleapis_common_protos
-    google.api.usage_pb2: googleapis_common_protos
-    google.api.visibility_pb2: googleapis_common_protos
     google.api_core: google_api_core
-    google.api_core.__init__: google_api_core
-    google.api_core.bidi: google_api_core
-    google.api_core.client_info: google_api_core
-    google.api_core.client_options: google_api_core
-    google.api_core.datetime_helpers: google_api_core
-    google.api_core.exceptions: google_api_core
-    google.api_core.extended_operation: google_api_core
-    google.api_core.future: google_api_core
-    google.api_core.future.__init__: google_api_core
-    google.api_core.future._helpers: google_api_core
-    google.api_core.future.async_future: google_api_core
-    google.api_core.future.base: google_api_core
-    google.api_core.future.polling: google_api_core
-    google.api_core.gapic_v1: google_api_core
-    google.api_core.gapic_v1.__init__: google_api_core
-    google.api_core.gapic_v1.client_info: google_api_core
-    google.api_core.gapic_v1.config: google_api_core
-    google.api_core.gapic_v1.config_async: google_api_core
-    google.api_core.gapic_v1.method: google_api_core
-    google.api_core.gapic_v1.method_async: google_api_core
-    google.api_core.gapic_v1.routing_header: google_api_core
-    google.api_core.general_helpers: google_api_core
-    google.api_core.grpc_helpers: google_api_core
-    google.api_core.grpc_helpers_async: google_api_core
-    google.api_core.iam: google_api_core
-    google.api_core.operation: google_api_core
-    google.api_core.operation_async: google_api_core
-    google.api_core.operations_v1: google_api_core
-    google.api_core.operations_v1.__init__: google_api_core
-    google.api_core.operations_v1.abstract_operations_client: google_api_core
-    google.api_core.operations_v1.operations_async_client: google_api_core
-    google.api_core.operations_v1.operations_client: google_api_core
-    google.api_core.operations_v1.operations_client_config: google_api_core
-    google.api_core.operations_v1.pagers: google_api_core
-    google.api_core.operations_v1.transports: google_api_core
-    google.api_core.operations_v1.transports.__init__: google_api_core
-    google.api_core.operations_v1.transports.base: google_api_core
-    google.api_core.operations_v1.transports.rest: google_api_core
-    google.api_core.page_iterator: google_api_core
-    google.api_core.page_iterator_async: google_api_core
-    google.api_core.path_template: google_api_core
-    google.api_core.protobuf_helpers: google_api_core
-    google.api_core.rest_helpers: google_api_core
-    google.api_core.rest_streaming: google_api_core
-    google.api_core.retry: google_api_core
-    google.api_core.retry_async: google_api_core
-    google.api_core.timeout: google_api_core
-    google.api_core.version: google_api_core
     google.auth: google_auth
-    google.auth.__init__: google_auth
-    google.auth._cloud_sdk: google_auth
-    google.auth._credentials_async: google_auth
-    google.auth._default: google_auth
-    google.auth._default_async: google_auth
-    google.auth._helpers: google_auth
-    google.auth._jwt_async: google_auth
-    google.auth._oauth2client: google_auth
-    google.auth._service_account_info: google_auth
-    google.auth.app_engine: google_auth
-    google.auth.aws: google_auth
-    google.auth.compute_engine: google_auth
-    google.auth.compute_engine.__init__: google_auth
-    google.auth.compute_engine._metadata: google_auth
-    google.auth.compute_engine.credentials: google_auth
-    google.auth.credentials: google_auth
-    google.auth.crypt: google_auth
-    google.auth.crypt.__init__: google_auth
-    google.auth.crypt._cryptography_rsa: google_auth
-    google.auth.crypt._helpers: google_auth
-    google.auth.crypt._python_rsa: google_auth
-    google.auth.crypt.base: google_auth
-    google.auth.crypt.es256: google_auth
-    google.auth.crypt.rsa: google_auth
-    google.auth.downscoped: google_auth
-    google.auth.environment_vars: google_auth
-    google.auth.exceptions: google_auth
-    google.auth.external_account: google_auth
-    google.auth.iam: google_auth
-    google.auth.identity_pool: google_auth
-    google.auth.impersonated_credentials: google_auth
-    google.auth.jwt: google_auth
-    google.auth.transport: google_auth
-    google.auth.transport.__init__: google_auth
-    google.auth.transport._aiohttp_requests: google_auth
-    google.auth.transport._http_client: google_auth
-    google.auth.transport._mtls_helper: google_auth
-    google.auth.transport.grpc: google_auth
-    google.auth.transport.mtls: google_auth
-    google.auth.transport.requests: google_auth
-    google.auth.transport.urllib3: google_auth
-    google.auth.version: google_auth
-    google.cloud._helpers: google_cloud_core
-    google.cloud._helpers.__init__: google_cloud_core
-    google.cloud._http: google_cloud_core
-    google.cloud._http.__init__: google_cloud_core
-    google.cloud._testing: google_cloud_core
-    google.cloud._testing.__init__: google_cloud_core
     google.cloud.aiplatform: google_cloud_aiplatform
-    google.cloud.aiplatform.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform._matching_engine: google_cloud_aiplatform
-    google.cloud.aiplatform._matching_engine.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform._matching_engine.match_service_pb2: google_cloud_aiplatform
-    google.cloud.aiplatform._matching_engine.match_service_pb2_grpc: google_cloud_aiplatform
-    google.cloud.aiplatform._matching_engine.matching_engine_index: google_cloud_aiplatform
-    google.cloud.aiplatform._matching_engine.matching_engine_index_config: google_cloud_aiplatform
-    google.cloud.aiplatform._matching_engine.matching_engine_index_endpoint: google_cloud_aiplatform
-    google.cloud.aiplatform.base: google_cloud_aiplatform
-    google.cloud.aiplatform.compat: google_cloud_aiplatform
-    google.cloud.aiplatform.compat.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.compat.services: google_cloud_aiplatform
-    google.cloud.aiplatform.compat.services.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.compat.types: google_cloud_aiplatform
-    google.cloud.aiplatform.compat.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.constants: google_cloud_aiplatform
-    google.cloud.aiplatform.constants.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.constants.base: google_cloud_aiplatform
-    google.cloud.aiplatform.constants.prediction: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets._datasources: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets.column_names_dataset: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets.dataset: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets.image_dataset: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets.tabular_dataset: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets.text_dataset: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets.time_series_dataset: google_cloud_aiplatform
-    google.cloud.aiplatform.datasets.video_dataset: google_cloud_aiplatform
-    google.cloud.aiplatform.explain: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.lit: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.metadata_builder: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.tf: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.tf.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.tf.v1: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.tf.v1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.tf.v1.saved_model_metadata_builder: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.tf.v2: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.tf.v2.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.explain.metadata.tf.v2.saved_model_metadata_builder: google_cloud_aiplatform
-    google.cloud.aiplatform.featurestore: google_cloud_aiplatform
-    google.cloud.aiplatform.featurestore.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.featurestore.entity_type: google_cloud_aiplatform
-    google.cloud.aiplatform.featurestore.feature: google_cloud_aiplatform
-    google.cloud.aiplatform.featurestore.featurestore: google_cloud_aiplatform
-    google.cloud.aiplatform.gapic: google_cloud_aiplatform
-    google.cloud.aiplatform.gapic.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.gapic.schema: google_cloud_aiplatform
-    google.cloud.aiplatform.gapic.schema.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.helpers: google_cloud_aiplatform
-    google.cloud.aiplatform.helpers.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.helpers.container_uri_builders: google_cloud_aiplatform
-    google.cloud.aiplatform.hyperparameter_tuning: google_cloud_aiplatform
-    google.cloud.aiplatform.initializer: google_cloud_aiplatform
-    google.cloud.aiplatform.jobs: google_cloud_aiplatform
-    google.cloud.aiplatform.metadata: google_cloud_aiplatform
-    google.cloud.aiplatform.metadata.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.metadata.artifact: google_cloud_aiplatform
-    google.cloud.aiplatform.metadata.constants: google_cloud_aiplatform
-    google.cloud.aiplatform.metadata.context: google_cloud_aiplatform
-    google.cloud.aiplatform.metadata.execution: google_cloud_aiplatform
-    google.cloud.aiplatform.metadata.metadata: google_cloud_aiplatform
-    google.cloud.aiplatform.metadata.metadata_store: google_cloud_aiplatform
-    google.cloud.aiplatform.metadata.resource: google_cloud_aiplatform
-    google.cloud.aiplatform.model_evaluation: google_cloud_aiplatform
-    google.cloud.aiplatform.model_evaluation.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.model_evaluation.model_evaluation: google_cloud_aiplatform
-    google.cloud.aiplatform.models: google_cloud_aiplatform
-    google.cloud.aiplatform.pipeline_jobs: google_cloud_aiplatform
-    google.cloud.aiplatform.schema: google_cloud_aiplatform
-    google.cloud.aiplatform.tensorboard: google_cloud_aiplatform
-    google.cloud.aiplatform.tensorboard.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.tensorboard.plugins.tf_profiler.profile_uploader: google_cloud_aiplatform
-    google.cloud.aiplatform.tensorboard.tensorboard_resource: google_cloud_aiplatform
-    google.cloud.aiplatform.tensorboard.uploader: google_cloud_aiplatform
-    google.cloud.aiplatform.tensorboard.uploader_main: google_cloud_aiplatform
-    google.cloud.aiplatform.tensorboard.uploader_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.training_jobs: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.cloud_profiler: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.cloud_profiler.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.cloud_profiler.cloud_profiler_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.cloud_profiler.initializer: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.cloud_profiler.plugins.base_plugin: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.cloud_profiler.plugins.tensorflow.tensorboard_api: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.cloud_profiler.plugins.tensorflow.tf_profiler: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.cloud_profiler.webserver: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.cloud_profiler.wsgi_types: google_cloud_aiplatform
-    google.cloud.aiplatform.training_utils.environment_variables: google_cloud_aiplatform
-    google.cloud.aiplatform.utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.column_transformations_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.console_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.enhanced_library: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.enhanced_library.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.enhanced_library._decorators: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.enhanced_library.value_converter: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.featurestore_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.gcs_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.pipeline_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.resource_manager_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.source_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.tensorboard_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.worker_spec_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.utils.yaml_utils: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.image_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.image_object_detection: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.image_segmentation: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.text_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.text_extraction: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.text_sentiment: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.video_action_recognition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.video_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.instance_v1.types.video_object_tracking: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1.types: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1.types.image_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1.types.image_object_detection: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1.types.image_segmentation: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1.types.video_action_recognition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1.types.video_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.params_v1.types.video_object_tracking: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.image_object_detection: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.image_segmentation: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.tabular_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.tabular_regression: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.text_extraction: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.text_sentiment: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.video_action_recognition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.video_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.predict.prediction_v1.types.video_object_tracking: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_image_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_image_object_detection: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_image_segmentation: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_tables: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_text_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_text_extraction: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_text_sentiment: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_video_action_recognition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_video_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.automl_video_object_tracking: google_cloud_aiplatform
-    google.cloud.aiplatform.v1.schema.trainingjob.definition_v1.types.export_evaluated_data_items_config: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.image_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.image_object_detection: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.image_segmentation: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.text_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.text_extraction: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.text_sentiment: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.video_action_recognition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.video_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.instance_v1beta1.types.video_object_tracking: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1.types: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1.types.image_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1.types.image_object_detection: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1.types.image_segmentation: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1.types.video_action_recognition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1.types.video_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.params_v1beta1.types.video_object_tracking: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.image_object_detection: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.image_segmentation: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.tabular_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.tabular_regression: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.text_extraction: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.text_sentiment: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.time_series_forecasting: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.video_action_recognition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.video_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.predict.prediction_v1beta1.types.video_object_tracking: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_forecasting: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_image_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_image_object_detection: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_image_segmentation: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_tables: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_text_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_text_extraction: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_text_sentiment: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_time_series_forecasting: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_video_action_recognition: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_video_classification: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.automl_video_object_tracking: google_cloud_aiplatform
-    google.cloud.aiplatform.v1beta1.schema.trainingjob.definition_v1beta1.types.export_evaluated_data_items_config: google_cloud_aiplatform
-    google.cloud.aiplatform.version: google_cloud_aiplatform
     google.cloud.aiplatform_v1: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.dataset_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.endpoint_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_online_serving_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_online_serving_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_online_serving_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_online_serving_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_online_serving_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_online_serving_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_online_serving_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_online_serving_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_online_serving_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.featurestore_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_endpoint_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.index_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.job_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.metadata_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.migration_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.model_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.pipeline_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.prediction_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.prediction_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.prediction_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.prediction_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.prediction_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.prediction_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.prediction_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.prediction_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.prediction_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.specialist_pool_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.tensorboard_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.services.vizier_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.accelerator_type: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.annotation: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.annotation_spec: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.artifact: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.batch_prediction_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.completion_stats: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.context: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.custom_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.data_item: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.data_labeling_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.dataset: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.dataset_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.deployed_index_ref: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.deployed_model_ref: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.encryption_spec: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.endpoint: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.endpoint_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.entity_type: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.env_var: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.event: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.execution: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.explanation: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.explanation_metadata: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.feature: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.feature_monitoring_stats: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.feature_selector: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.featurestore: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.featurestore_monitoring: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.featurestore_online_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.featurestore_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.hyperparameter_tuning_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.index: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.index_endpoint: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.index_endpoint_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.index_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.io: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.job_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.job_state: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.lineage_subgraph: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.machine_resources: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.manual_batch_tuning_parameters: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.metadata_schema: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.metadata_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.metadata_store: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.migratable_resource: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.migration_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.model: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.model_deployment_monitoring_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.model_evaluation: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.model_evaluation_slice: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.model_monitoring: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.model_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.operation: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.pipeline_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.pipeline_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.pipeline_state: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.prediction_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.specialist_pool: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.specialist_pool_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.study: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.tensorboard: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.tensorboard_data: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.tensorboard_experiment: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.tensorboard_run: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.tensorboard_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.tensorboard_time_series: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.training_pipeline: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.types: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.unmanaged_container_model: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.user_action_reference: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.value: google_cloud_aiplatform
-    google.cloud.aiplatform_v1.types.vizier_service: google_cloud_aiplatform
     google.cloud.aiplatform_v1beta1: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.dataset_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.endpoint_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_online_serving_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_online_serving_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_online_serving_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_online_serving_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_online_serving_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_online_serving_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_online_serving_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_online_serving_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_online_serving_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.featurestore_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_endpoint_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.index_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.job_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.metadata_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.migration_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.model_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.pipeline_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.prediction_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.prediction_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.prediction_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.prediction_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.prediction_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.prediction_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.prediction_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.prediction_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.prediction_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.specialist_pool_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.tensorboard_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service.async_client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service.client: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service.pagers: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service.transports: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service.transports.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service.transports.base: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service.transports.grpc: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.services.vizier_service.transports.grpc_asyncio: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.__init__: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.accelerator_type: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.annotation: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.annotation_spec: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.artifact: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.batch_prediction_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.completion_stats: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.context: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.custom_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.data_item: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.data_labeling_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.dataset: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.dataset_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.deployed_index_ref: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.deployed_model_ref: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.encryption_spec: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.endpoint: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.endpoint_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.entity_type: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.env_var: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.event: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.execution: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.explanation: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.explanation_metadata: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.feature: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.feature_monitoring_stats: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.feature_selector: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.featurestore: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.featurestore_monitoring: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.featurestore_online_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.featurestore_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.hyperparameter_tuning_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.index: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.index_endpoint: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.index_endpoint_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.index_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.io: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.job_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.job_state: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.lineage_subgraph: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.machine_resources: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.manual_batch_tuning_parameters: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.metadata_schema: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.metadata_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.metadata_store: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.migratable_resource: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.migration_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.model: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.model_deployment_monitoring_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.model_evaluation: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.model_evaluation_slice: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.model_monitoring: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.model_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.operation: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.pipeline_job: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.pipeline_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.pipeline_state: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.prediction_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.specialist_pool: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.specialist_pool_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.study: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.tensorboard: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.tensorboard_data: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.tensorboard_experiment: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.tensorboard_run: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.tensorboard_service: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.tensorboard_time_series: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.training_pipeline: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.types: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.unmanaged_container_model: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.user_action_reference: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.value: google_cloud_aiplatform
-    google.cloud.aiplatform_v1beta1.types.vizier_service: google_cloud_aiplatform
     google.cloud.bigquery: google_cloud_bigquery
-    google.cloud.bigquery.__init__: google_cloud_bigquery
-    google.cloud.bigquery._helpers: google_cloud_bigquery
-    google.cloud.bigquery._http: google_cloud_bigquery
-    google.cloud.bigquery._pandas_helpers: google_cloud_bigquery
-    google.cloud.bigquery._tqdm_helpers: google_cloud_bigquery
-    google.cloud.bigquery.client: google_cloud_bigquery
-    google.cloud.bigquery.dataset: google_cloud_bigquery
-    google.cloud.bigquery.dbapi: google_cloud_bigquery
-    google.cloud.bigquery.dbapi.__init__: google_cloud_bigquery
-    google.cloud.bigquery.dbapi._helpers: google_cloud_bigquery
-    google.cloud.bigquery.dbapi.connection: google_cloud_bigquery
-    google.cloud.bigquery.dbapi.cursor: google_cloud_bigquery
-    google.cloud.bigquery.dbapi.exceptions: google_cloud_bigquery
-    google.cloud.bigquery.dbapi.types: google_cloud_bigquery
-    google.cloud.bigquery.encryption_configuration: google_cloud_bigquery
-    google.cloud.bigquery.enums: google_cloud_bigquery
-    google.cloud.bigquery.exceptions: google_cloud_bigquery
-    google.cloud.bigquery.external_config: google_cloud_bigquery
-    google.cloud.bigquery.format_options: google_cloud_bigquery
-    google.cloud.bigquery.iam: google_cloud_bigquery
-    google.cloud.bigquery.job: google_cloud_bigquery
-    google.cloud.bigquery.job.__init__: google_cloud_bigquery
-    google.cloud.bigquery.job.base: google_cloud_bigquery
-    google.cloud.bigquery.job.copy_: google_cloud_bigquery
-    google.cloud.bigquery.job.extract: google_cloud_bigquery
-    google.cloud.bigquery.job.load: google_cloud_bigquery
-    google.cloud.bigquery.job.query: google_cloud_bigquery
-    google.cloud.bigquery.magics: google_cloud_bigquery
-    google.cloud.bigquery.magics.__init__: google_cloud_bigquery
-    google.cloud.bigquery.magics.line_arg_parser: google_cloud_bigquery
-    google.cloud.bigquery.magics.line_arg_parser.__init__: google_cloud_bigquery
-    google.cloud.bigquery.magics.line_arg_parser.exceptions: google_cloud_bigquery
-    google.cloud.bigquery.magics.line_arg_parser.lexer: google_cloud_bigquery
-    google.cloud.bigquery.magics.line_arg_parser.parser: google_cloud_bigquery
-    google.cloud.bigquery.magics.line_arg_parser.visitors: google_cloud_bigquery
-    google.cloud.bigquery.magics.magics: google_cloud_bigquery
-    google.cloud.bigquery.model: google_cloud_bigquery
-    google.cloud.bigquery.opentelemetry_tracing: google_cloud_bigquery
-    google.cloud.bigquery.query: google_cloud_bigquery
-    google.cloud.bigquery.retry: google_cloud_bigquery
-    google.cloud.bigquery.routine: google_cloud_bigquery
-    google.cloud.bigquery.routine.__init__: google_cloud_bigquery
-    google.cloud.bigquery.routine.routine: google_cloud_bigquery
-    google.cloud.bigquery.schema: google_cloud_bigquery
-    google.cloud.bigquery.table: google_cloud_bigquery
-    google.cloud.bigquery.version: google_cloud_bigquery
     google.cloud.bigquery_v2: google_cloud_bigquery
-    google.cloud.bigquery_v2.__init__: google_cloud_bigquery
-    google.cloud.bigquery_v2.types: google_cloud_bigquery
-    google.cloud.bigquery_v2.types.__init__: google_cloud_bigquery
-    google.cloud.bigquery_v2.types.encryption_config: google_cloud_bigquery
-    google.cloud.bigquery_v2.types.model: google_cloud_bigquery
-    google.cloud.bigquery_v2.types.model_reference: google_cloud_bigquery
-    google.cloud.bigquery_v2.types.standard_sql: google_cloud_bigquery
-    google.cloud.bigquery_v2.types.table_reference: google_cloud_bigquery
     google.cloud.client: google_cloud_core
-    google.cloud.client.__init__: google_cloud_core
     google.cloud.environment_vars: google_cloud_core
-    google.cloud.environment_vars.__init__: google_cloud_core
     google.cloud.exceptions: google_cloud_core
-    google.cloud.exceptions.__init__: google_cloud_core
     google.cloud.extended_operations_pb2: googleapis_common_protos
     google.cloud.location.locations_pb2: googleapis_common_protos
     google.cloud.obsolete: google_cloud_core
-    google.cloud.obsolete.__init__: google_cloud_core
     google.cloud.operation: google_cloud_core
-    google.cloud.operation.__init__: google_cloud_core
     google.cloud.resourcemanager: google_cloud_resource_manager
-    google.cloud.resourcemanager.__init__: google_cloud_resource_manager
     google.cloud.resourcemanager_v3: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders.async_client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders.client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders.pagers: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders.transports: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders.transports.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders.transports.base: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders.transports.grpc: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.folders.transports.grpc_asyncio: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations.async_client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations.client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations.pagers: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations.transports: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations.transports.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations.transports.base: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations.transports.grpc: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.organizations.transports.grpc_asyncio: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects.async_client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects.client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects.pagers: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects.transports: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects.transports.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects.transports.base: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects.transports.grpc: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.projects.transports.grpc_asyncio: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings.async_client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings.client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings.pagers: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings.transports: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings.transports.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings.transports.base: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings.transports.grpc: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_bindings.transports.grpc_asyncio: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys.async_client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys.client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys.pagers: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys.transports: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys.transports.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys.transports.base: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys.transports.grpc: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_keys.transports.grpc_asyncio: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values.async_client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values.client: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values.pagers: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values.transports: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values.transports.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values.transports.base: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values.transports.grpc: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.services.tag_values.transports.grpc_asyncio: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.types: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.types.__init__: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.types.folders: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.types.organizations: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.types.projects: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.types.tag_bindings: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.types.tag_keys: google_cloud_resource_manager
-    google.cloud.resourcemanager_v3.types.tag_values: google_cloud_resource_manager
     google.cloud.storage: google_cloud_storage
-    google.cloud.storage.__init__: google_cloud_storage
-    google.cloud.storage._helpers: google_cloud_storage
-    google.cloud.storage._http: google_cloud_storage
-    google.cloud.storage._signing: google_cloud_storage
-    google.cloud.storage.acl: google_cloud_storage
-    google.cloud.storage.batch: google_cloud_storage
-    google.cloud.storage.blob: google_cloud_storage
-    google.cloud.storage.bucket: google_cloud_storage
-    google.cloud.storage.client: google_cloud_storage
-    google.cloud.storage.constants: google_cloud_storage
-    google.cloud.storage.fileio: google_cloud_storage
-    google.cloud.storage.hmac_key: google_cloud_storage
-    google.cloud.storage.iam: google_cloud_storage
-    google.cloud.storage.notification: google_cloud_storage
-    google.cloud.storage.retry: google_cloud_storage
-    google.cloud.storage.version: google_cloud_storage
     google.cloud.version: google_cloud_core
     google.gapic.metadata: googleapis_common_protos
-    google.gapic.metadata.__init__: googleapis_common_protos
-    google.gapic.metadata.gapic_metadata_pb2: googleapis_common_protos
     google.iam.v1: grpc_google_iam_v1
-    google.iam.v1.__init__: grpc_google_iam_v1
-    google.iam.v1.iam_policy_pb2: grpc_google_iam_v1
-    google.iam.v1.iam_policy_pb2_grpc: grpc_google_iam_v1
-    google.iam.v1.logging: grpc_google_iam_v1
-    google.iam.v1.logging.__init__: grpc_google_iam_v1
-    google.iam.v1.logging.audit_data_pb2: grpc_google_iam_v1
-    google.iam.v1.options_pb2: grpc_google_iam_v1
-    google.iam.v1.options_pb2_grpc: grpc_google_iam_v1
-    google.iam.v1.policy_pb2: grpc_google_iam_v1
-    google.iam.v1.policy_pb2_grpc: grpc_google_iam_v1
     google.logging.type: googleapis_common_protos
-    google.logging.type.__init__: googleapis_common_protos
-    google.logging.type.http_request_pb2: googleapis_common_protos
-    google.logging.type.log_severity_pb2: googleapis_common_protos
     google.longrunning: googleapis_common_protos
-    google.longrunning.__init__: googleapis_common_protos
-    google.longrunning.operations_grpc: googleapis_common_protos
-    google.longrunning.operations_grpc_pb2: googleapis_common_protos
-    google.longrunning.operations_pb2: googleapis_common_protos
-    google.longrunning.operations_pb2_grpc: googleapis_common_protos
-    google.longrunning.operations_proto: googleapis_common_protos
-    google.longrunning.operations_proto_pb2: googleapis_common_protos
     google.oauth2: google_auth
-    google.oauth2.__init__: google_auth
-    google.oauth2._client: google_auth
-    google.oauth2._client_async: google_auth
-    google.oauth2._credentials_async: google_auth
-    google.oauth2._id_token_async: google_auth
-    google.oauth2._reauth_async: google_auth
-    google.oauth2._service_account_async: google_auth
-    google.oauth2.challenges: google_auth
-    google.oauth2.credentials: google_auth
-    google.oauth2.id_token: google_auth
-    google.oauth2.reauth: google_auth
-    google.oauth2.service_account: google_auth
-    google.oauth2.sts: google_auth
-    google.oauth2.utils: google_auth
     google.protobuf: protobuf
-    google.protobuf.__init__: protobuf
-    google.protobuf.any_pb2: protobuf
-    google.protobuf.api_pb2: protobuf
-    google.protobuf.compiler: protobuf
-    google.protobuf.compiler.__init__: protobuf
-    google.protobuf.compiler.plugin_pb2: protobuf
-    google.protobuf.descriptor: protobuf
-    google.protobuf.descriptor_database: protobuf
-    google.protobuf.descriptor_pb2: protobuf
-    google.protobuf.descriptor_pool: protobuf
-    google.protobuf.duration_pb2: protobuf
-    google.protobuf.empty_pb2: protobuf
-    google.protobuf.field_mask_pb2: protobuf
-    google.protobuf.internal: protobuf
-    google.protobuf.internal.__init__: protobuf
-    google.protobuf.internal._api_implementation: protobuf
-    google.protobuf.internal.api_implementation: protobuf
-    google.protobuf.internal.builder: protobuf
-    google.protobuf.internal.containers: protobuf
-    google.protobuf.internal.decoder: protobuf
-    google.protobuf.internal.encoder: protobuf
-    google.protobuf.internal.enum_type_wrapper: protobuf
-    google.protobuf.internal.extension_dict: protobuf
-    google.protobuf.internal.message_listener: protobuf
-    google.protobuf.internal.python_message: protobuf
-    google.protobuf.internal.type_checkers: protobuf
-    google.protobuf.internal.well_known_types: protobuf
-    google.protobuf.internal.wire_format: protobuf
-    google.protobuf.json_format: protobuf
-    google.protobuf.message: protobuf
-    google.protobuf.message_factory: protobuf
-    google.protobuf.proto_builder: protobuf
-    google.protobuf.pyext: protobuf
-    google.protobuf.pyext.__init__: protobuf
-    google.protobuf.pyext._message: protobuf
-    google.protobuf.pyext.cpp_message: protobuf
-    google.protobuf.reflection: protobuf
-    google.protobuf.service: protobuf
-    google.protobuf.service_reflection: protobuf
-    google.protobuf.source_context_pb2: protobuf
-    google.protobuf.struct_pb2: protobuf
-    google.protobuf.symbol_database: protobuf
-    google.protobuf.text_encoding: protobuf
-    google.protobuf.text_format: protobuf
-    google.protobuf.timestamp_pb2: protobuf
-    google.protobuf.type_pb2: protobuf
-    google.protobuf.util: protobuf
-    google.protobuf.util.__init__: protobuf
-    google.protobuf.util.json_format_pb2: protobuf
-    google.protobuf.util.json_format_proto3_pb2: protobuf
-    google.protobuf.wrappers_pb2: protobuf
     google.resumable_media: google_resumable_media
-    google.resumable_media.__init__: google_resumable_media
-    google.resumable_media._download: google_resumable_media
-    google.resumable_media._helpers: google_resumable_media
-    google.resumable_media._upload: google_resumable_media
-    google.resumable_media.common: google_resumable_media
-    google.resumable_media.requests: google_resumable_media
-    google.resumable_media.requests.__init__: google_resumable_media
-    google.resumable_media.requests._request_helpers: google_resumable_media
-    google.resumable_media.requests.download: google_resumable_media
-    google.resumable_media.requests.upload: google_resumable_media
     google.rpc: googleapis_common_protos
-    google.rpc.__init__: googleapis_common_protos
-    google.rpc.code_pb2: googleapis_common_protos
-    google.rpc.context: googleapis_common_protos
-    google.rpc.context.__init__: googleapis_common_protos
-    google.rpc.context.attribute_context_pb2: googleapis_common_protos
-    google.rpc.error_details_pb2: googleapis_common_protos
-    google.rpc.status_pb2: googleapis_common_protos
     google.type: googleapis_common_protos
-    google.type.__init__: googleapis_common_protos
-    google.type.calendar_period_pb2: googleapis_common_protos
-    google.type.color_pb2: googleapis_common_protos
-    google.type.date_pb2: googleapis_common_protos
-    google.type.datetime_pb2: googleapis_common_protos
-    google.type.dayofweek_pb2: googleapis_common_protos
-    google.type.decimal_pb2: googleapis_common_protos
-    google.type.expr_pb2: googleapis_common_protos
-    google.type.fraction_pb2: googleapis_common_protos
-    google.type.interval_pb2: googleapis_common_protos
-    google.type.latlng_pb2: googleapis_common_protos
-    google.type.localized_text_pb2: googleapis_common_protos
-    google.type.money_pb2: googleapis_common_protos
-    google.type.month_pb2: googleapis_common_protos
-    google.type.phone_number_pb2: googleapis_common_protos
-    google.type.postal_address_pb2: googleapis_common_protos
-    google.type.quaternion_pb2: googleapis_common_protos
-    google.type.timeofday_pb2: googleapis_common_protos
     google_crc32c: google_crc32c
-    google_crc32c.__config__: google_crc32c
-    google_crc32c.__init__: google_crc32c
-    google_crc32c._checksum: google_crc32c
-    google_crc32c._crc32c: google_crc32c
-    google_crc32c.cext: google_crc32c
-    google_crc32c.libs.libcrc32c-672e1704: google_crc32c
-    google_crc32c.python: google_crc32c
     grpc: grpcio
-    grpc.__init__: grpcio
-    grpc._auth: grpcio
-    grpc._channel: grpcio
-    grpc._common: grpcio
-    grpc._compression: grpcio
-    grpc._cython: grpcio
-    grpc._cython.__init__: grpcio
-    grpc._cython._cygrpc: grpcio
-    grpc._cython._cygrpc.__init__: grpcio
-    grpc._cython.cygrpc: grpcio
-    grpc._grpcio_metadata: grpcio
-    grpc._interceptor: grpcio
-    grpc._plugin_wrapping: grpcio
-    grpc._runtime_protos: grpcio
-    grpc._server: grpcio
-    grpc._simple_stubs: grpcio
-    grpc._utilities: grpcio
-    grpc.aio: grpcio
-    grpc.aio.__init__: grpcio
-    grpc.aio._base_call: grpcio
-    grpc.aio._base_channel: grpcio
-    grpc.aio._base_server: grpcio
-    grpc.aio._call: grpcio
-    grpc.aio._channel: grpcio
-    grpc.aio._interceptor: grpcio
-    grpc.aio._metadata: grpcio
-    grpc.aio._server: grpcio
-    grpc.aio._typing: grpcio
-    grpc.aio._utils: grpcio
-    grpc.beta: grpcio
-    grpc.beta.__init__: grpcio
-    grpc.beta._client_adaptations: grpcio
-    grpc.beta._metadata: grpcio
-    grpc.beta._server_adaptations: grpcio
-    grpc.beta.implementations: grpcio
-    grpc.beta.interfaces: grpcio
-    grpc.beta.utilities: grpcio
-    grpc.experimental: grpcio
-    grpc.experimental.__init__: grpcio
-    grpc.experimental.aio: grpcio
-    grpc.experimental.aio.__init__: grpcio
-    grpc.experimental.gevent: grpcio
-    grpc.experimental.session_cache: grpcio
-    grpc.framework: grpcio
-    grpc.framework.__init__: grpcio
-    grpc.framework.common: grpcio
-    grpc.framework.common.__init__: grpcio
-    grpc.framework.common.cardinality: grpcio
-    grpc.framework.common.style: grpcio
-    grpc.framework.foundation: grpcio
-    grpc.framework.foundation.__init__: grpcio
-    grpc.framework.foundation.abandonment: grpcio
-    grpc.framework.foundation.callable_util: grpcio
-    grpc.framework.foundation.future: grpcio
-    grpc.framework.foundation.logging_pool: grpcio
-    grpc.framework.foundation.stream: grpcio
-    grpc.framework.foundation.stream_util: grpcio
-    grpc.framework.interfaces: grpcio
-    grpc.framework.interfaces.__init__: grpcio
-    grpc.framework.interfaces.base: grpcio
-    grpc.framework.interfaces.base.__init__: grpcio
-    grpc.framework.interfaces.base.base: grpcio
-    grpc.framework.interfaces.base.utilities: grpcio
-    grpc.framework.interfaces.face: grpcio
-    grpc.framework.interfaces.face.__init__: grpcio
-    grpc.framework.interfaces.face.face: grpcio
-    grpc.framework.interfaces.face.utilities: grpcio
     grpc_status: grpcio_status
-    grpc_status.__init__: grpcio_status
-    grpc_status._async: grpcio_status
-    grpc_status._common: grpcio_status
-    grpc_status.rpc_status: grpcio_status
     idna: idna
-    idna.__init__: idna
-    idna.codec: idna
-    idna.compat: idna
-    idna.core: idna
-    idna.idnadata: idna
-    idna.intranges: idna
-    idna.package_data: idna
-    idna.uts46data: idna
     packaging: packaging
-    packaging.__about__: packaging
-    packaging.__init__: packaging
-    packaging._manylinux: packaging
-    packaging._musllinux: packaging
-    packaging._structures: packaging
-    packaging.markers: packaging
-    packaging.requirements: packaging
-    packaging.specifiers: packaging
-    packaging.tags: packaging
-    packaging.utils: packaging
-    packaging.version: packaging
     proto: proto_plus
-    proto.__init__: proto_plus
-    proto._file_info: proto_plus
-    proto._package_info: proto_plus
-    proto.datetime_helpers: proto_plus
-    proto.enums: proto_plus
-    proto.fields: proto_plus
-    proto.marshal: proto_plus
-    proto.marshal.__init__: proto_plus
-    proto.marshal.collections: proto_plus
-    proto.marshal.collections.__init__: proto_plus
-    proto.marshal.collections.maps: proto_plus
-    proto.marshal.collections.repeated: proto_plus
-    proto.marshal.compat: proto_plus
-    proto.marshal.marshal: proto_plus
-    proto.marshal.rules: proto_plus
-    proto.marshal.rules.__init__: proto_plus
-    proto.marshal.rules.bytes: proto_plus
-    proto.marshal.rules.dates: proto_plus
-    proto.marshal.rules.enums: proto_plus
-    proto.marshal.rules.message: proto_plus
-    proto.marshal.rules.stringy_numbers: proto_plus
-    proto.marshal.rules.struct: proto_plus
-    proto.marshal.rules.wrappers: proto_plus
-    proto.message: proto_plus
-    proto.modules: proto_plus
-    proto.primitives: proto_plus
-    proto.utils: proto_plus
     pyasn1: pyasn1
-    pyasn1.__init__: pyasn1
-    pyasn1.codec: pyasn1
-    pyasn1.codec.__init__: pyasn1
-    pyasn1.codec.ber: pyasn1
-    pyasn1.codec.ber.__init__: pyasn1
-    pyasn1.codec.ber.decoder: pyasn1
-    pyasn1.codec.ber.encoder: pyasn1
-    pyasn1.codec.ber.eoo: pyasn1
-    pyasn1.codec.cer: pyasn1
-    pyasn1.codec.cer.__init__: pyasn1
-    pyasn1.codec.cer.decoder: pyasn1
-    pyasn1.codec.cer.encoder: pyasn1
-    pyasn1.codec.der: pyasn1
-    pyasn1.codec.der.__init__: pyasn1
-    pyasn1.codec.der.decoder: pyasn1
-    pyasn1.codec.der.encoder: pyasn1
-    pyasn1.codec.native: pyasn1
-    pyasn1.codec.native.__init__: pyasn1
-    pyasn1.codec.native.decoder: pyasn1
-    pyasn1.codec.native.encoder: pyasn1
-    pyasn1.compat: pyasn1
-    pyasn1.compat.__init__: pyasn1
-    pyasn1.compat.binary: pyasn1
-    pyasn1.compat.calling: pyasn1
-    pyasn1.compat.dateandtime: pyasn1
-    pyasn1.compat.integer: pyasn1
-    pyasn1.compat.octets: pyasn1
-    pyasn1.compat.string: pyasn1
-    pyasn1.debug: pyasn1
-    pyasn1.error: pyasn1
-    pyasn1.type: pyasn1
-    pyasn1.type.__init__: pyasn1
-    pyasn1.type.base: pyasn1
-    pyasn1.type.char: pyasn1
-    pyasn1.type.constraint: pyasn1
-    pyasn1.type.error: pyasn1
-    pyasn1.type.namedtype: pyasn1
-    pyasn1.type.namedval: pyasn1
-    pyasn1.type.opentype: pyasn1
-    pyasn1.type.tag: pyasn1
-    pyasn1.type.tagmap: pyasn1
-    pyasn1.type.univ: pyasn1
-    pyasn1.type.useful: pyasn1
     pyasn1_modules: pyasn1_modules
-    pyasn1_modules.__init__: pyasn1_modules
-    pyasn1_modules.pem: pyasn1_modules
-    pyasn1_modules.rfc1155: pyasn1_modules
-    pyasn1_modules.rfc1157: pyasn1_modules
-    pyasn1_modules.rfc1901: pyasn1_modules
-    pyasn1_modules.rfc1902: pyasn1_modules
-    pyasn1_modules.rfc1905: pyasn1_modules
-    pyasn1_modules.rfc2251: pyasn1_modules
-    pyasn1_modules.rfc2314: pyasn1_modules
-    pyasn1_modules.rfc2315: pyasn1_modules
-    pyasn1_modules.rfc2437: pyasn1_modules
-    pyasn1_modules.rfc2459: pyasn1_modules
-    pyasn1_modules.rfc2511: pyasn1_modules
-    pyasn1_modules.rfc2560: pyasn1_modules
-    pyasn1_modules.rfc2631: pyasn1_modules
-    pyasn1_modules.rfc2634: pyasn1_modules
-    pyasn1_modules.rfc2985: pyasn1_modules
-    pyasn1_modules.rfc2986: pyasn1_modules
-    pyasn1_modules.rfc3114: pyasn1_modules
-    pyasn1_modules.rfc3161: pyasn1_modules
-    pyasn1_modules.rfc3274: pyasn1_modules
-    pyasn1_modules.rfc3279: pyasn1_modules
-    pyasn1_modules.rfc3280: pyasn1_modules
-    pyasn1_modules.rfc3281: pyasn1_modules
-    pyasn1_modules.rfc3412: pyasn1_modules
-    pyasn1_modules.rfc3414: pyasn1_modules
-    pyasn1_modules.rfc3447: pyasn1_modules
-    pyasn1_modules.rfc3560: pyasn1_modules
-    pyasn1_modules.rfc3565: pyasn1_modules
-    pyasn1_modules.rfc3709: pyasn1_modules
-    pyasn1_modules.rfc3770: pyasn1_modules
-    pyasn1_modules.rfc3779: pyasn1_modules
-    pyasn1_modules.rfc3852: pyasn1_modules
-    pyasn1_modules.rfc4043: pyasn1_modules
-    pyasn1_modules.rfc4055: pyasn1_modules
-    pyasn1_modules.rfc4073: pyasn1_modules
-    pyasn1_modules.rfc4108: pyasn1_modules
-    pyasn1_modules.rfc4210: pyasn1_modules
-    pyasn1_modules.rfc4211: pyasn1_modules
-    pyasn1_modules.rfc4334: pyasn1_modules
-    pyasn1_modules.rfc4985: pyasn1_modules
-    pyasn1_modules.rfc5035: pyasn1_modules
-    pyasn1_modules.rfc5083: pyasn1_modules
-    pyasn1_modules.rfc5084: pyasn1_modules
-    pyasn1_modules.rfc5208: pyasn1_modules
-    pyasn1_modules.rfc5280: pyasn1_modules
-    pyasn1_modules.rfc5480: pyasn1_modules
-    pyasn1_modules.rfc5649: pyasn1_modules
-    pyasn1_modules.rfc5652: pyasn1_modules
-    pyasn1_modules.rfc5751: pyasn1_modules
-    pyasn1_modules.rfc5755: pyasn1_modules
-    pyasn1_modules.rfc5913: pyasn1_modules
-    pyasn1_modules.rfc5914: pyasn1_modules
-    pyasn1_modules.rfc5915: pyasn1_modules
-    pyasn1_modules.rfc5916: pyasn1_modules
-    pyasn1_modules.rfc5917: pyasn1_modules
-    pyasn1_modules.rfc5924: pyasn1_modules
-    pyasn1_modules.rfc5934: pyasn1_modules
-    pyasn1_modules.rfc5940: pyasn1_modules
-    pyasn1_modules.rfc5958: pyasn1_modules
-    pyasn1_modules.rfc5990: pyasn1_modules
-    pyasn1_modules.rfc6010: pyasn1_modules
-    pyasn1_modules.rfc6019: pyasn1_modules
-    pyasn1_modules.rfc6031: pyasn1_modules
-    pyasn1_modules.rfc6032: pyasn1_modules
-    pyasn1_modules.rfc6120: pyasn1_modules
-    pyasn1_modules.rfc6170: pyasn1_modules
-    pyasn1_modules.rfc6187: pyasn1_modules
-    pyasn1_modules.rfc6210: pyasn1_modules
-    pyasn1_modules.rfc6211: pyasn1_modules
-    pyasn1_modules.rfc6402: pyasn1_modules
-    pyasn1_modules.rfc6402-1: pyasn1_modules
-    pyasn1_modules.rfc6482: pyasn1_modules
-    pyasn1_modules.rfc6486: pyasn1_modules
-    pyasn1_modules.rfc6487: pyasn1_modules
-    pyasn1_modules.rfc6664: pyasn1_modules
-    pyasn1_modules.rfc6955: pyasn1_modules
-    pyasn1_modules.rfc6960: pyasn1_modules
-    pyasn1_modules.rfc7030: pyasn1_modules
-    pyasn1_modules.rfc7191: pyasn1_modules
-    pyasn1_modules.rfc7229: pyasn1_modules
-    pyasn1_modules.rfc7292: pyasn1_modules
-    pyasn1_modules.rfc7296: pyasn1_modules
-    pyasn1_modules.rfc7508: pyasn1_modules
-    pyasn1_modules.rfc7585: pyasn1_modules
-    pyasn1_modules.rfc7633: pyasn1_modules
-    pyasn1_modules.rfc7773: pyasn1_modules
-    pyasn1_modules.rfc7894: pyasn1_modules
-    pyasn1_modules.rfc7894-1: pyasn1_modules
-    pyasn1_modules.rfc7906: pyasn1_modules
-    pyasn1_modules.rfc7914: pyasn1_modules
-    pyasn1_modules.rfc8017: pyasn1_modules
-    pyasn1_modules.rfc8018: pyasn1_modules
-    pyasn1_modules.rfc8103: pyasn1_modules
-    pyasn1_modules.rfc8209: pyasn1_modules
-    pyasn1_modules.rfc8226: pyasn1_modules
-    pyasn1_modules.rfc8358: pyasn1_modules
-    pyasn1_modules.rfc8360: pyasn1_modules
-    pyasn1_modules.rfc8398: pyasn1_modules
-    pyasn1_modules.rfc8410: pyasn1_modules
-    pyasn1_modules.rfc8418: pyasn1_modules
-    pyasn1_modules.rfc8419: pyasn1_modules
-    pyasn1_modules.rfc8479: pyasn1_modules
-    pyasn1_modules.rfc8494: pyasn1_modules
-    pyasn1_modules.rfc8520: pyasn1_modules
-    pyasn1_modules.rfc8619: pyasn1_modules
-    pyasn1_modules.rfc8649: pyasn1_modules
     pyparsing: pyparsing
-    pyparsing.__init__: pyparsing
-    pyparsing.actions: pyparsing
-    pyparsing.common: pyparsing
-    pyparsing.core: pyparsing
-    pyparsing.diagram: pyparsing
-    pyparsing.diagram.__init__: pyparsing
-    pyparsing.exceptions: pyparsing
-    pyparsing.helpers: pyparsing
-    pyparsing.results: pyparsing
-    pyparsing.testing: pyparsing
-    pyparsing.unicode: pyparsing
-    pyparsing.util: pyparsing
     requests: requests
-    requests.__init__: requests
-    requests.__version__: requests
-    requests._internal_utils: requests
-    requests.adapters: requests
-    requests.api: requests
-    requests.auth: requests
-    requests.certs: requests
-    requests.compat: requests
-    requests.cookies: requests
-    requests.exceptions: requests
-    requests.help: requests
-    requests.hooks: requests
-    requests.models: requests
-    requests.packages: requests
-    requests.sessions: requests
-    requests.status_codes: requests
-    requests.structures: requests
-    requests.utils: requests
     rsa: rsa
-    rsa.__init__: rsa
-    rsa._compat: rsa
-    rsa.asn1: rsa
-    rsa.cli: rsa
-    rsa.common: rsa
-    rsa.core: rsa
-    rsa.key: rsa
-    rsa.parallel: rsa
-    rsa.pem: rsa
-    rsa.pkcs1: rsa
-    rsa.pkcs1_v2: rsa
-    rsa.prime: rsa
-    rsa.randnum: rsa
-    rsa.transform: rsa
-    rsa.util: rsa
     samples.generated_samples.cloudresourcemanager_v3_generated_folders_create_folder_async: google_cloud_resource_manager
     samples.generated_samples.cloudresourcemanager_v3_generated_folders_create_folder_sync: google_cloud_resource_manager
     samples.generated_samples.cloudresourcemanager_v3_generated_folders_delete_folder_async: google_cloud_resource_manager
@@ -1616,63 +154,7 @@
     scripts.readme-gen.readme_gen: google_cloud_resource_manager
     six: six
     tests: google_cloud_resource_manager
-    tests.__init__: google_cloud_resource_manager
-    tests.unit: google_cloud_resource_manager
-    tests.unit.__init__: google_cloud_resource_manager
-    tests.unit.gapic: google_cloud_resource_manager
-    tests.unit.gapic.__init__: google_cloud_resource_manager
-    tests.unit.gapic.resourcemanager_v3: google_cloud_resource_manager
-    tests.unit.gapic.resourcemanager_v3.__init__: google_cloud_resource_manager
-    tests.unit.gapic.resourcemanager_v3.test_folders: google_cloud_resource_manager
-    tests.unit.gapic.resourcemanager_v3.test_organizations: google_cloud_resource_manager
-    tests.unit.gapic.resourcemanager_v3.test_projects: google_cloud_resource_manager
-    tests.unit.gapic.resourcemanager_v3.test_tag_bindings: google_cloud_resource_manager
-    tests.unit.gapic.resourcemanager_v3.test_tag_keys: google_cloud_resource_manager
-    tests.unit.gapic.resourcemanager_v3.test_tag_values: google_cloud_resource_manager
     urllib3: urllib3
-    urllib3.__init__: urllib3
-    urllib3._collections: urllib3
-    urllib3._version: urllib3
-    urllib3.connection: urllib3
-    urllib3.connectionpool: urllib3
-    urllib3.contrib: urllib3
-    urllib3.contrib.__init__: urllib3
-    urllib3.contrib._appengine_environ: urllib3
-    urllib3.contrib._securetransport: urllib3
-    urllib3.contrib._securetransport.__init__: urllib3
-    urllib3.contrib._securetransport.bindings: urllib3
-    urllib3.contrib._securetransport.low_level: urllib3
-    urllib3.contrib.appengine: urllib3
-    urllib3.contrib.ntlmpool: urllib3
-    urllib3.contrib.pyopenssl: urllib3
-    urllib3.contrib.securetransport: urllib3
-    urllib3.contrib.socks: urllib3
-    urllib3.exceptions: urllib3
-    urllib3.fields: urllib3
-    urllib3.filepost: urllib3
-    urllib3.packages: urllib3
-    urllib3.packages.__init__: urllib3
-    urllib3.packages.backports: urllib3
-    urllib3.packages.backports.__init__: urllib3
-    urllib3.packages.backports.makefile: urllib3
-    urllib3.packages.six: urllib3
-    urllib3.poolmanager: urllib3
-    urllib3.request: urllib3
-    urllib3.response: urllib3
-    urllib3.util: urllib3
-    urllib3.util.__init__: urllib3
-    urllib3.util.connection: urllib3
-    urllib3.util.proxy: urllib3
-    urllib3.util.queue: urllib3
-    urllib3.util.request: urllib3
-    urllib3.util.response: urllib3
-    urllib3.util.retry: urllib3
-    urllib3.util.ssl_: urllib3
-    urllib3.util.ssl_match_hostname: urllib3
-    urllib3.util.ssltransport: urllib3
-    urllib3.util.timeout: urllib3
-    urllib3.util.url: urllib3
-    urllib3.util.wait: urllib3
   pip_repository:
     name: gazelle_python_test
-integrity: 32e38932043eca090a64ca741758d8e4a5817c2cd7dc821fc927914c32fb3114
+integrity: 366852b36882c766f23173b8673e934a1f84685f529dc06aabab837f697ba9f8
diff --git a/gazelle/pythonconfig/pythonconfig.go b/gazelle/pythonconfig/pythonconfig.go
index 09d308a..166b575 100644
--- a/gazelle/pythonconfig/pythonconfig.go
+++ b/gazelle/pythonconfig/pythonconfig.go
@@ -16,7 +16,8 @@
 
 import (
 	"fmt"
-	"path/filepath"
+	"path"
+	"regexp"
 	"strings"
 
 	"github.com/emirpasic/gods/lists/singlylinkedlist"
@@ -54,6 +55,10 @@
 	// the "per_file" GenerationMode by including the package's __init__.py file.
 	// This is a boolean directive.
 	GenerationModePerFileIncludeInit = "python_generation_mode_per_file_include_init"
+	// GenerationModePerPackageRequireTestEntryPoint represents the directive that
+	// requires a test entry point to generate test targets in "package" GenerationMode.
+	// This is a boolean directive.
+	GenerationModePerPackageRequireTestEntryPoint = "python_generation_mode_per_package_require_test_entry_point"
 	// LibraryNamingConvention represents the directive that controls the
 	// py_library naming convention. It interpolates $package_name$ with the
 	// Bazel package name. E.g. if the Bazel package name is `foo`, setting this
@@ -67,6 +72,23 @@
 	// naming convention. See python_library_naming_convention for more info on
 	// the package name interpolation.
 	TestNamingConvention = "python_test_naming_convention"
+	// DefaultVisibilty represents the directive that controls what visibility
+	// labels are added to generated python targets.
+	DefaultVisibilty = "python_default_visibility"
+	// Visibility represents the directive that controls what additional
+	// visibility labels are added to generated targets. It mimics the behavior
+	// of the `go_visibility` directive.
+	Visibility = "python_visibility"
+	// TestFilePattern represents the directive that controls which python
+	// files are mapped to `py_test` targets.
+	TestFilePattern = "python_test_file_pattern"
+	// LabelConvention represents the directive that defines the format of the
+	// labels to third-party dependencies.
+	LabelConvention = "python_label_convention"
+	// LabelNormalization represents the directive that controls how distribution
+	// names of labels to third-party dependencies are normalized. Supported values
+	// are 'none', 'pep503' and 'snake_case' (default). See LabelNormalizationType.
+	LabelNormalization = "python_label_normalization"
 )
 
 // GenerationModeType represents one of the generation modes for the Python
@@ -86,7 +108,19 @@
 )
 
 const (
-	packageNameNamingConventionSubstitution = "$package_name$"
+	packageNameNamingConventionSubstitution     = "$package_name$"
+	distributionNameLabelConventionSubstitution = "$distribution_name$"
+)
+
+const (
+	// The default visibility label, including a format placeholder for `python_root`.
+	DefaultVisibilityFmtString = "//%s:__subpackages__"
+	// The default globs used to determine pt_test targets.
+	DefaultTestFilePatternString = "*_test.py,test_*.py"
+	// The default convention of label of third-party dependencies.
+	DefaultLabelConvention = "$distribution_name$"
+	// The default normalization applied to distribution names of third-party dependency labels.
+	DefaultLabelNormalizationType = SnakeCaseLabelNormalizationType
 )
 
 // defaultIgnoreFiles is the list of default values used in the
@@ -95,21 +129,13 @@
 	"setup.py": {},
 }
 
-func SanitizeDistribution(distributionName string) string {
-	sanitizedDistribution := strings.ToLower(distributionName)
-	sanitizedDistribution = strings.ReplaceAll(sanitizedDistribution, "-", "_")
-	sanitizedDistribution = strings.ReplaceAll(sanitizedDistribution, ".", "_")
-
-	return sanitizedDistribution
-}
-
 // Configs is an extension of map[string]*Config. It provides finding methods
 // on top of the mapping.
 type Configs map[string]*Config
 
 // ParentForPackage returns the parent Config for the given Bazel package.
 func (c *Configs) ParentForPackage(pkg string) *Config {
-	dir := filepath.Dir(pkg)
+	dir := path.Dir(pkg)
 	if dir == "." {
 		dir = ""
 	}
@@ -126,37 +152,57 @@
 	pythonProjectRoot string
 	gazelleManifest   *manifest.Manifest
 
-	excludedPatterns             *singlylinkedlist.List
-	ignoreFiles                  map[string]struct{}
-	ignoreDependencies           map[string]struct{}
-	validateImportStatements     bool
-	coarseGrainedGeneration      bool
-	perFileGeneration            bool
-	perFileGenerationIncludeInit bool
-	libraryNamingConvention      string
-	binaryNamingConvention       string
-	testNamingConvention         string
+	excludedPatterns                          *singlylinkedlist.List
+	ignoreFiles                               map[string]struct{}
+	ignoreDependencies                        map[string]struct{}
+	validateImportStatements                  bool
+	coarseGrainedGeneration                   bool
+	perFileGeneration                         bool
+	perFileGenerationIncludeInit              bool
+	perPackageGenerationRequireTestEntryPoint bool
+	libraryNamingConvention                   string
+	binaryNamingConvention                    string
+	testNamingConvention                      string
+	defaultVisibility                         []string
+	visibility                                []string
+	testFilePattern                           []string
+	labelConvention                           string
+	labelNormalization                        LabelNormalizationType
 }
 
+type LabelNormalizationType int
+
+const (
+	NoLabelNormalizationType LabelNormalizationType = iota
+	Pep503LabelNormalizationType
+	SnakeCaseLabelNormalizationType
+)
+
 // New creates a new Config.
 func New(
 	repoRoot string,
 	pythonProjectRoot string,
 ) *Config {
 	return &Config{
-		extensionEnabled:             true,
-		repoRoot:                     repoRoot,
-		pythonProjectRoot:            pythonProjectRoot,
-		excludedPatterns:             singlylinkedlist.New(),
-		ignoreFiles:                  make(map[string]struct{}),
-		ignoreDependencies:           make(map[string]struct{}),
-		validateImportStatements:     true,
-		coarseGrainedGeneration:      false,
-		perFileGeneration:            false,
-		perFileGenerationIncludeInit: false,
-		libraryNamingConvention:      packageNameNamingConventionSubstitution,
-		binaryNamingConvention:       fmt.Sprintf("%s_bin", packageNameNamingConventionSubstitution),
-		testNamingConvention:         fmt.Sprintf("%s_test", packageNameNamingConventionSubstitution),
+		extensionEnabled:                          true,
+		repoRoot:                                  repoRoot,
+		pythonProjectRoot:                         pythonProjectRoot,
+		excludedPatterns:                          singlylinkedlist.New(),
+		ignoreFiles:                               make(map[string]struct{}),
+		ignoreDependencies:                        make(map[string]struct{}),
+		validateImportStatements:                  true,
+		coarseGrainedGeneration:                   false,
+		perFileGeneration:                         false,
+		perFileGenerationIncludeInit:              false,
+		perPackageGenerationRequireTestEntryPoint: true,
+		libraryNamingConvention:                   packageNameNamingConventionSubstitution,
+		binaryNamingConvention:                    fmt.Sprintf("%s_bin", packageNameNamingConventionSubstitution),
+		testNamingConvention:                      fmt.Sprintf("%s_test", packageNameNamingConventionSubstitution),
+		defaultVisibility:                         []string{fmt.Sprintf(DefaultVisibilityFmtString, "")},
+		visibility:                                []string{},
+		testFilePattern:                           strings.Split(DefaultTestFilePatternString, ","),
+		labelConvention:                           DefaultLabelConvention,
+		labelNormalization:                        DefaultLabelNormalizationType,
 	}
 }
 
@@ -180,9 +226,15 @@
 		coarseGrainedGeneration:      c.coarseGrainedGeneration,
 		perFileGeneration:            c.perFileGeneration,
 		perFileGenerationIncludeInit: c.perFileGenerationIncludeInit,
-		libraryNamingConvention:      c.libraryNamingConvention,
-		binaryNamingConvention:       c.binaryNamingConvention,
-		testNamingConvention:         c.testNamingConvention,
+		perPackageGenerationRequireTestEntryPoint: c.perPackageGenerationRequireTestEntryPoint,
+		libraryNamingConvention:                   c.libraryNamingConvention,
+		binaryNamingConvention:                    c.binaryNamingConvention,
+		testNamingConvention:                      c.testNamingConvention,
+		defaultVisibility:                         c.defaultVisibility,
+		visibility:                                c.visibility,
+		testFilePattern:                           c.testFilePattern,
+		labelConvention:                           c.labelConvention,
+		labelNormalization:                        c.labelNormalization,
 	}
 }
 
@@ -230,26 +282,23 @@
 	for currentCfg := c; currentCfg != nil; currentCfg = currentCfg.parent {
 		if currentCfg.gazelleManifest != nil {
 			gazelleManifest := currentCfg.gazelleManifest
-			if distributionName, ok := gazelleManifest.ModulesMapping[modName]; ok {
-				var distributionRepositoryName string
-				if gazelleManifest.PipDepsRepositoryName != "" {
-					distributionRepositoryName = gazelleManifest.PipDepsRepositoryName
-				} else if gazelleManifest.PipRepository != nil {
-					distributionRepositoryName = gazelleManifest.PipRepository.Name
-				}
-				sanitizedDistribution := SanitizeDistribution(distributionName)
+			for {
+				if distributionName, ok := gazelleManifest.ModulesMapping[modName]; ok {
+					var distributionRepositoryName string
+					if gazelleManifest.PipDepsRepositoryName != "" {
+						distributionRepositoryName = gazelleManifest.PipDepsRepositoryName
+					} else if gazelleManifest.PipRepository != nil {
+						distributionRepositoryName = gazelleManifest.PipRepository.Name
+					}
 
-				if repo := gazelleManifest.PipRepository; repo != nil && (repo.UsePipRepositoryAliases != nil && *repo.UsePipRepositoryAliases == false) {
-					// TODO @aignas 2023-10-31: to be removed later.
-					// @<repository_name>_<distribution_name>//:pkg
-					distributionRepositoryName = distributionRepositoryName + "_" + sanitizedDistribution
-					lbl := label.New(distributionRepositoryName, "", "pkg")
+					lbl := currentCfg.FormatThirdPartyDependency(distributionRepositoryName, distributionName)
 					return lbl.String(), true
 				}
-
-				// @<repository_name>//<distribution_name>
-				lbl := label.New(distributionRepositoryName, sanitizedDistribution, sanitizedDistribution)
-				return lbl.String(), true
+				i := strings.LastIndex(modName, ".")
+				if i == -1 {
+					break
+				}
+				modName = modName[:i]
 			}
 		}
 	}
@@ -363,6 +412,14 @@
 	return c.perFileGenerationIncludeInit
 }
 
+func (c *Config) SetPerPackageGenerationRequireTestEntryPoint(perPackageGenerationRequireTestEntryPoint bool) {
+	c.perPackageGenerationRequireTestEntryPoint = perPackageGenerationRequireTestEntryPoint
+}
+
+func (c *Config) PerPackageGenerationRequireTestEntryPoint() bool {
+	return c.perPackageGenerationRequireTestEntryPoint
+}
+
 // SetLibraryNamingConvention sets the py_library target naming convention.
 func (c *Config) SetLibraryNamingConvention(libraryNamingConvention string) {
 	c.libraryNamingConvention = libraryNamingConvention
@@ -395,3 +452,78 @@
 func (c *Config) RenderTestName(packageName string) string {
 	return strings.ReplaceAll(c.testNamingConvention, packageNameNamingConventionSubstitution, packageName)
 }
+
+// AppendVisibility adds additional items to the target's visibility.
+func (c *Config) AppendVisibility(visibility string) {
+	c.visibility = append(c.visibility, visibility)
+}
+
+// Visibility returns the target's visibility.
+func (c *Config) Visibility() []string {
+	return append(c.defaultVisibility, c.visibility...)
+}
+
+// SetDefaultVisibility sets the default visibility of the target.
+func (c *Config) SetDefaultVisibility(visibility []string) {
+	c.defaultVisibility = visibility
+}
+
+// DefaultVisibilty returns the target's default visibility.
+func (c *Config) DefaultVisibilty() []string {
+	return c.defaultVisibility
+}
+
+// SetTestFilePattern sets the file patterns that should be mapped to 'py_test' rules.
+func (c *Config) SetTestFilePattern(patterns []string) {
+	c.testFilePattern = patterns
+}
+
+// TestFilePattern returns the patterns that should be mapped to 'py_test' rules.
+func (c *Config) TestFilePattern() []string {
+	return c.testFilePattern
+}
+
+// SetLabelConvention sets the label convention used for third-party dependencies.
+func (c *Config) SetLabelConvention(convention string) {
+	c.labelConvention = convention
+}
+
+// LabelConvention returns the label convention used for third-party dependencies.
+func (c *Config) LabelConvention() string {
+	return c.labelConvention
+}
+
+// SetLabelConvention sets the label normalization applied to distribution names of third-party dependencies.
+func (c *Config) SetLabelNormalization(normalizationType LabelNormalizationType) {
+	c.labelNormalization = normalizationType
+}
+
+// LabelConvention returns the label normalization applied to distribution names of third-party dependencies.
+func (c *Config) LabelNormalization() LabelNormalizationType {
+	return c.labelNormalization
+}
+
+// FormatThirdPartyDependency returns a label to a third-party dependency performing all formating and normalization.
+func (c *Config) FormatThirdPartyDependency(repositoryName string, distributionName string) label.Label {
+	conventionalDistributionName := strings.ReplaceAll(c.labelConvention, distributionNameLabelConventionSubstitution, distributionName)
+
+	var normConventionalDistributionName string
+	switch norm := c.LabelNormalization(); norm {
+	case SnakeCaseLabelNormalizationType:
+		// See /python/private/normalize_name.bzl
+		normConventionalDistributionName = strings.ToLower(conventionalDistributionName)
+		normConventionalDistributionName = regexp.MustCompile(`[-_.]+`).ReplaceAllString(normConventionalDistributionName, "_")
+		normConventionalDistributionName = strings.Trim(normConventionalDistributionName, "_")
+	case Pep503LabelNormalizationType:
+		// See https://packaging.python.org/en/latest/specifications/name-normalization/#name-format
+		normConventionalDistributionName = strings.ToLower(conventionalDistributionName)                                        // ... "should be lowercased"
+		normConventionalDistributionName = regexp.MustCompile(`[-_.]+`).ReplaceAllString(normConventionalDistributionName, "-") // ... "all runs of the characters ., -, or _ replaced with a single -"
+		normConventionalDistributionName = strings.Trim(normConventionalDistributionName, "-")                                  // ... "must start and end with a letter or number"
+	default:
+		fallthrough
+	case NoLabelNormalizationType:
+		normConventionalDistributionName = conventionalDistributionName
+	}
+
+	return label.New(repositoryName, normConventionalDistributionName, normConventionalDistributionName)
+}
diff --git a/gazelle/pythonconfig/pythonconfig_test.go b/gazelle/pythonconfig/pythonconfig_test.go
index bf31106..7cdb9af 100644
--- a/gazelle/pythonconfig/pythonconfig_test.go
+++ b/gazelle/pythonconfig/pythonconfig_test.go
@@ -4,20 +4,244 @@
 	"testing"
 )
 
-func TestDistributionSanitizing(t *testing.T) {
+func TestFormatThirdPartyDependency(t *testing.T) {
+	type testInput struct {
+		RepositoryName     string
+		DistributionName   string
+		LabelNormalization LabelNormalizationType
+		LabelConvention    string
+	}
+
 	tests := map[string]struct {
-		input string
+		input testInput
 		want  string
 	}{
-		"upper case": {input: "DistWithUpperCase", want: "distwithuppercase"},
-		"dashes":     {input: "dist-with-dashes", want: "dist_with_dashes"},
-		"dots":       {input: "dist.with.dots", want: "dist_with_dots"},
-		"mixed":      {input: "To-be.sanitized", want: "to_be_sanitized"},
+		"default / upper case": {
+			input: testInput{
+				DistributionName:   "DistWithUpperCase",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//distwithuppercase",
+		},
+		"default / dashes": {
+			input: testInput{
+				DistributionName:   "dist-with-dashes",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//dist_with_dashes",
+		},
+		"default / repeating dashes inside": {
+			input: testInput{
+				DistributionName:   "friendly--bard",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//friendly_bard",
+		},
+		"default / repeating underscores inside": {
+			input: testInput{
+				DistributionName:   "hello___something",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello_something",
+		},
+		"default / prefix repeating underscores": {
+			input: testInput{
+				DistributionName:   "__hello-something",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello_something",
+		},
+		"default / suffix repeating underscores": {
+			input: testInput{
+				DistributionName:   "hello-something___",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello_something",
+		},
+		"default / prefix repeating dashes": {
+			input: testInput{
+				DistributionName:   "---hello-something",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello_something",
+		},
+		"default / suffix repeating dashes": {
+			input: testInput{
+				DistributionName:   "hello-something----",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello_something",
+		},
+		"default / dots": {
+			input: testInput{
+				DistributionName:   "dist.with.dots",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//dist_with_dots",
+		},
+		"default / mixed": {
+			input: testInput{
+				DistributionName:   "FrIeNdLy-._.-bArD",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//friendly_bard",
+		},
+		"default / upper case / custom prefix & suffix": {
+			input: testInput{
+				DistributionName:   "DistWithUpperCase",
+				RepositoryName:     "pip",
+				LabelNormalization: DefaultLabelNormalizationType,
+				LabelConvention:    "pReFiX-$distribution_name$-sUfFiX",
+			},
+			want: "@pip//prefix_distwithuppercase_suffix",
+		},
+		"noop normalization / mixed": {
+			input: testInput{
+				DistributionName:   "not-TO-be.sanitized",
+				RepositoryName:     "pip",
+				LabelNormalization: NoLabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//not-TO-be.sanitized",
+		},
+		"noop normalization / mixed / custom prefix & suffix": {
+			input: testInput{
+				DistributionName:   "not-TO-be.sanitized",
+				RepositoryName:     "pip",
+				LabelNormalization: NoLabelNormalizationType,
+				LabelConvention:    "pre___$distribution_name$___fix",
+			},
+			want: "@pip//pre___not-TO-be.sanitized___fix",
+		},
+		"pep503 / upper case": {
+			input: testInput{
+				DistributionName:   "DistWithUpperCase",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//distwithuppercase",
+		},
+		"pep503 / underscores": {
+			input: testInput{
+				DistributionName:   "dist_with_underscores",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//dist-with-underscores",
+		},
+		"pep503 / repeating dashes inside": {
+			input: testInput{
+				DistributionName:   "friendly--bard",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//friendly-bard",
+		},
+		"pep503 / repeating underscores inside": {
+			input: testInput{
+				DistributionName:   "hello___something",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello-something",
+		},
+		"pep503 / prefix repeating underscores": {
+			input: testInput{
+				DistributionName:   "__hello-something",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello-something",
+		},
+		"pep503 / suffix repeating underscores": {
+			input: testInput{
+				DistributionName:   "hello-something___",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello-something",
+		},
+		"pep503 / prefix repeating dashes": {
+			input: testInput{
+				DistributionName:   "---hello-something",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello-something",
+		},
+		"pep503 / suffix repeating dashes": {
+			input: testInput{
+				DistributionName:   "hello-something----",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//hello-something",
+		},
+		"pep503 / dots": {
+			input: testInput{
+				DistributionName:   "dist.with.dots",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//dist-with-dots",
+		},
+		"pep503 / mixed": {
+			input: testInput{
+				DistributionName:   "To-be.sanitized",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    DefaultLabelConvention,
+			},
+			want: "@pip//to-be-sanitized",
+		},
+		"pep503 / underscores / custom prefix & suffix": {
+			input: testInput{
+				DistributionName:   "dist_with_underscores",
+				RepositoryName:     "pip",
+				LabelNormalization: Pep503LabelNormalizationType,
+				LabelConvention:    "pre___$distribution_name$___fix",
+			},
+			want: "@pip//pre-dist-with-underscores-fix",
+		},
 	}
 
 	for name, tc := range tests {
 		t.Run(name, func(t *testing.T) {
-			got := SanitizeDistribution(tc.input)
+			c := Config{
+				labelNormalization: tc.input.LabelNormalization,
+				labelConvention:    tc.input.LabelConvention,
+			}
+			gotLabel := c.FormatThirdPartyDependency(tc.input.RepositoryName, tc.input.DistributionName)
+			got := gotLabel.String()
 			if tc.want != got {
 				t.Fatalf("expected %q, got %q", tc.want, got)
 			}
diff --git a/internal_deps.bzl b/internal_deps.bzl
index 3835cd6..56962cb 100644
--- a/internal_deps.bzl
+++ b/internal_deps.bzl
@@ -57,18 +57,9 @@
 
     http_archive(
         name = "rules_testing",
-        sha256 = "8df0a8eb21739ea4b0a03f5dc79e68e245a45c076cfab404b940cc205cb62162",
-        strip_prefix = "rules_testing-0.4.0",
-        url = "https://github.com/bazelbuild/rules_testing/releases/download/v0.4.0/rules_testing-v0.4.0.tar.gz",
-    )
-
-    http_archive(
-        name = "rules_license",
-        urls = [
-            "https://mirror.bazel.build/github.com/bazelbuild/rules_license/releases/download/0.0.7/rules_license-0.0.7.tar.gz",
-            "https://github.com/bazelbuild/rules_license/releases/download/0.0.7/rules_license-0.0.7.tar.gz",
-        ],
-        sha256 = "4531deccb913639c30e5c7512a054d5d875698daeb75d8cf90f284375fe7c360",
+        sha256 = "02c62574631876a4e3b02a1820cb51167bb9cdcdea2381b2fa9d9b8b11c407c4",
+        strip_prefix = "rules_testing-0.6.0",
+        url = "https://github.com/bazelbuild/rules_testing/releases/download/v0.6.0/rules_testing-v0.6.0.tar.gz",
     )
 
     http_archive(
@@ -175,20 +166,17 @@
 
     http_archive(
         name = "rules_proto",
-        sha256 = "dc3fb206a2cb3441b485eb1e423165b231235a1ea9b031b4433cf7bc1fa460dd",
-        strip_prefix = "rules_proto-5.3.0-21.7",
-        urls = [
-            "https://github.com/bazelbuild/rules_proto/archive/refs/tags/5.3.0-21.7.tar.gz",
-        ],
+        sha256 = "904a8097fae42a690c8e08d805210e40cccb069f5f9a0f6727cf4faa7bed2c9c",
+        strip_prefix = "rules_proto-6.0.0-rc1",
+        url = "https://github.com/bazelbuild/rules_proto/releases/download/6.0.0-rc1/rules_proto-6.0.0-rc1.tar.gz",
     )
 
     http_archive(
         name = "com_google_protobuf",
-        sha256 = "75be42bd736f4df6d702a0e4e4d30de9ee40eac024c4b845d17ae4cc831fe4ae",
-        strip_prefix = "protobuf-21.7",
+        sha256 = "616bb3536ac1fff3fb1a141450fa28b875e985712170ea7f1bfe5e5fc41e2cd8",
+        strip_prefix = "protobuf-24.4",
         urls = [
-            "https://mirror.bazel.build/github.com/protocolbuffers/protobuf/archive/v21.7.tar.gz",
-            "https://github.com/protocolbuffers/protobuf/archive/v21.7.tar.gz",
+            "https://github.com/protocolbuffers/protobuf/releases/download/v24.4/protobuf-24.4.tar.gz",
         ],
     )
 
@@ -221,3 +209,17 @@
         ],
         sha256 = "4531deccb913639c30e5c7512a054d5d875698daeb75d8cf90f284375fe7c360",
     )
+
+    http_archive(
+        name = "bazel_features",
+        sha256 = "d7787da289a7fb497352211ad200ec9f698822a9e0757a4976fd9f713ff372b3",
+        strip_prefix = "bazel_features-1.9.1",
+        url = "https://github.com/bazel-contrib/bazel_features/releases/download/v1.9.1/bazel_features-v1.9.1.tar.gz",
+    )
+
+    http_archive(
+        name = "rules_cc",
+        sha256 = "2037875b9a4456dce4a79d112a8ae885bbc4aad968e6587dca6e64f3a0900cdf",
+        strip_prefix = "rules_cc-0.0.9",
+        urls = ["https://github.com/bazelbuild/rules_cc/releases/download/0.0.9/rules_cc-0.0.9.tar.gz"],
+    )
diff --git a/internal_setup.bzl b/internal_setup.bzl
index a80099f..1967c0e 100644
--- a/internal_setup.bzl
+++ b/internal_setup.bzl
@@ -14,6 +14,7 @@
 
 """Setup for rules_python tests and tools."""
 
+load("@bazel_features//:deps.bzl", "bazel_features_deps")
 load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
 load("@cgrindel_bazel_starlib//:deps.bzl", "bazel_starlib_dependencies")
 load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
@@ -21,8 +22,8 @@
 load("@rules_bazel_integration_test//bazel_integration_test:repo_defs.bzl", "bazel_binaries")
 load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies", "rules_proto_toolchains")
 load("//:version.bzl", "SUPPORTED_BAZEL_VERSIONS")
-load("//python/pip_install:repositories.bzl", "pip_install_dependencies")
 load("//python/private:internal_config_repo.bzl", "internal_config_repo")  # buildifier: disable=bzl-visibility
+load("//python/private/pypi:deps.bzl", "pypi_deps")  # buildifier: disable=bzl-visibility
 
 def rules_python_internal_setup():
     """Setup for rules_python tests and tools."""
@@ -30,7 +31,7 @@
     internal_config_repo(name = "rules_python_internal")
 
     # Because we don't use the pip_install rule, we have to call this to fetch its deps
-    pip_install_dependencies()
+    pypi_deps()
 
     bazel_skylib_workspace()
 
@@ -42,3 +43,4 @@
     bazel_integration_test_rules_dependencies()
     bazel_starlib_dependencies()
     bazel_binaries(versions = SUPPORTED_BAZEL_VERSIONS)
+    bazel_features_deps()
diff --git a/proposals/2018-10-25-selecting-between-python-2-and-3.md b/proposals/2018-10-25-selecting-between-python-2-and-3.md
deleted file mode 100644
index e731f97..0000000
--- a/proposals/2018-10-25-selecting-between-python-2-and-3.md
+++ /dev/null
@@ -1,136 +0,0 @@
----
-title: Selecting Between Python 2 and 3
-status: Accepted
-created: 2018-10-25
-updated: 2019-01-11
-authors:
-  - [brandjon@](https://github.com/brandjon)
-reviewers:
-  - [mrovner@](https://github.com/mrovner)
-discussion thread: [bazel #6583](https://github.com/bazelbuild/bazel/issues/6583)
----
-
-# Selecting Between Python 2 and 3
-
-## Abstract
-
-The "Python mode" configuration value controls whether Python 2 or Python 3 is used to run Python targets built by Bazel. This design document reviews the existing mechanisms for setting the Python mode (the "tri-state model") and describes a simplified mechanism that should replace it (the "boolean model").
-
-Links to Github issues are given where applicable. See also [bazel #6444](https://github.com/bazelbuild/bazel/issues/6444) for a tracking list of Python mode issues.
-
-Throughout, when we say `py_binary`, we also mean to include `py_test`.
-
-## Background
-
-The Python mode controls whether a Python 2 or 3 interpreter is used to run a `py_binary` that is built by Bazel.
-
-* When no `py_runtime` is supplied (via `--python_top`), the mode should control whether the command `python2` or `python3` is embedded into the generated wrapper script ([bazel #4815](https://github.com/bazelbuild/bazel/issues/4815)).
-
-* In a future design for a "`py_toolchain`"-type rule, a pair of interpreter targets will be bundled together as a toolchain, and the mode will control which one gets their full path embedded into this script.
-
-The Python mode is also used to help validate that Python source code annotated with `srcs_version` is used appropriately: If a Python target has the `srcs_version` attribute set to `PY2` or `PY3` rather than to `PY2AND3` (the default), it can only be depended on by targets built in Python 2 or Python 3 mode respectively.
-
-Whenever the same Bazel target can be built in multiple configurations within a single build, it is necessary to write the output artifacts of different versions of the target to different paths. Otherwise the build fails with an "action conflict" error -- Bazel's way of avoiding a correctness bug. For Python targets, and more broadly for targets that may transitively depend on Python targets, this means that different output path roots must be used for different Python modes.
-
-## Out-of-scope generalizations
-
-It is possible to imagine extending the Python mode and `srcs_version` so that it can check for compatibility with minor releases (ex: "Python 3.7"), patch releases ("Python 3.7.1"), alternative interpreters ("CPython" or "PyPy"), and exclude known bad releases. We decline to do so because this treads into generalized constraint checking, which may be better handled in the future by the [platforms and toolchain framework](https://docs.bazel.build/versions/master/toolchains.html).
-
-Compared to these other kinds of version checks, Python 2 vs. 3 is a more compelling use case to support with dedicated machinery. The incompatibilities between these versions are more severe. In many code bases there is an ongoing effort to migrate from 2 to 3, while in others there exists Python 2 code that will never be migrated and must be supported indefinitely.
-
-## Tri-state model
-
-Under the existing tri-state model, the Python mode can take on three values: `PY2`, `PY3`, and `null`. The first two modes can be triggered by the `--force_python` flag on the command line or by the `default_python_version` attribute on `py_binary` rules. The `null` mode is the default state when neither the flag nor `default_python_version` is specified. `select()` expressions can distinguish between these states by using `config_setting`s that test the value of `force_python` (where `null` is matched by `//conditions:default`).
-
-The Python mode is "sticky"; once it is set to `PY2` or `PY3`, it stays that way for all subsequent targets. For a `py_binary` target, this means that all transitive dependencies of the target are built with the same mode as the target itself. For the `--force_python` flag, this means that if the flag is given, it applies universally to the entire build invocation, regardless of the `default_python_version` attributes of any Python targets (hence the "default" in the attribute's name).
-
-### Data dependencies
-
-In principle the Python mode needs to propagate to any `py_library` targets that are transitively in the `deps` attribute. Conceptually, this corresponds to enforcing that a Python binary cannot `import` a module written for a different version of Python than the currently running interpreter. But there is no need to propagate the mode across the `data` attribute, which often corresponds to one Python binary calling another as a separate process.
-
-In order to facilitate `PY3` binaries that depend on `PY2` ones and vice versa, the tri-state model needs to be modified so that the mode is reset to `null` for `data` attributes ([bazel #6441](https://github.com/bazelbuild/bazel/issues/6441)). But it's not clear exactly which attributes should trigger a reset. For example, suppose a Python source file is generated by a `genrule`: Then the `genrule` shouldn't propagate any Python mode to any of its attributes, even though it appears in the transitive closure of a `py_binary`'s `deps`. One could imagine resetting the mode across every attribute except those in a small whitelist (`deps` of `py_binary`, `py_test`, and `py_library`), but this would require new functionality in Bazel and possibly interact poorly with Starlark-defined rules.
-
-### Output roots
-
-Since targets that are built for Python 3 produce different results than those built for Python 2, the outputs for these two configurations must be kept separate in order to avoid action conflicts. Therefore, targets built in `PY3` mode get placed under an output root that includes the string "`-py3`".
-
-Currently, targets that are built in the `null` mode default to using Python 2. Counterintuitively, there is a subtle distinction between building a target in `null` mode and `PY2` mode: Even though the same interpreter is used for the top-level target, the target's transitive dependencies may behave differently, for instance if a `select()` on `force_python` is used. This means that using both `PY2` and `null` for the same target can result in action conflicts ([bazel #6501](https://github.com/bazelbuild/bazel/issues/6501)). However, due to a bug it is not yet possible to have both `PY2` and `null` modes within the same build invocation.
-
-Under the tri-state model, the most straightforward solution for these action conflicts is to use a separate "`-py2`" root for `PY2` mode. This would mean that the same target could be built in not two but three different configurations, corresponding to the three different modes, even though there are only two distinct Python versions. A more complicated alternative would be to prohibit `select()` from being able to distinguish `null` from `PY2`, in order to help ensure that building an arbitrary target in both of these modes does not succeed with different results.
-
-### Libraries at the top level
-
-Currently the mode is only changed by `--force_python` and by `py_binary`. This means that when you build a `py_library` at the top level (that is, specifying it directly on the build command line) without a `--force_python` flag, the library gets the `null` mode, which means Python 2 by default. This causes an error if the library has `srcs_python` set to `PY3`. This in turn means you cannot run a flagless build command on a wildcard pattern, such as `bazel build :all` or `bazel build ...`, if any of the targets in the package(s) contains a Python 3-only library target. Worse, if there are both a Python 2-only library and a Python 3-only library, even specifying `--force_python` can't make the wildcard build work.
-
-In the tri-state model, this can be addressed by allowing `py_library` to change the mode from `null` to either `PY2` or `PY3` based on whichever version is compatible with its `srcs_version` attribute. This was a proposed fix for [bazel #1446](https://github.com/bazelbuild/bazel/issues/1446).
-
-## Boolean model
-
-Under the boolean model, `null` is eliminated as a valid value for the Python mode. Instead, the mode will immediately default to either `PY2` or `PY3`. The mode is no longer sticky, but changes as needed whenever a new `py_binary` target is reached.
-
-Since there is no longer a third value corresponding to "uncommitted", a target can no longer tell whether it was set to `PY2` mode explicitly (by a flag or a `py_binary`), or if it was set by default because no mode was specified. The current version will be inspectable using `config_setting` to read a setting whose value is always one of `"PY2"` or `"PY3"`.
-
-### Data dependencies
-
-Since `py_binary` will now change the mode as needed, there is no need to explicitly reset the mode to a particular value (`null`) when crossing `data` attributes. Python 3 targets can freely depend on Python 2 targets and vice versa, so long as the dependency is not via the `deps` attribute in a way that violates `srcs_version` validation (see below).
-
-### Output roots
-
-Since there are only two modes, there need only be two output roots. This avoids action conflicts without resorting to creating a redundant third output root, or trying to coerce two similar-but-distinct modes to map onto the same output root.
-
-Since the mode is not being reset across data dependencies, it is possible that compared to the tri-state model, the boolean model causes some data dependencies to be built in two configurations instead of just one. This is considered to be an acceptable tradeoff of the boolean model. Note that there exist other cases where redundant rebuilding occurs regardless of which model we use.
-
-### Libraries at the top level
-
-We want to be able to build a `py_library` at the top level without having to specify the correct mode. At the same time, we still want `srcs_version` to validate that a `py_binary` only depends on `py_library`s that are compatible with its mode. The way to achieve this is to move validation from within the `py_library` rule up to the `py_binary` rule.
-
-We add two new boolean fields to a provider returned by `py_library`. This bools correspond to whether or not there are any Python 2-only and Python 3-only sources (respectively) in the library's transitive closure. It is easy to compute these bits as boolean ORs as the providers are merged. `py_binary` simply checks these bits against its own Python mode.
-
-It is important that when `py_binary` detects a version conflict, the user is given the label of one or more transitive dependencies that introduced the constraint. There are several ways to implement this, such as:
-
-- additional provider fields to propagate context to the error message
-- an aspect that traverses the dependencies of the `py_binary`
-- emitting warning messages at conflicting `py_library` targets
-
-The choice of which approach to use is outside the scope of this proposal.
-
-It is possible that a library is only ever used by Python 3 binaries, but when the library is built as part of a `bazel build :all` command it gets the Python 2 mode by default. This happens even if the library is annotated with `srcs_version` set to `PY3`. Generally this should cause no harm aside from some repeated build work. In the future we can add the same version attribute that `py_binary` has to `py_library`, so the target definition can be made unambiguous.
-
-Aside from failures due to validation, there is currently a bug whereby building a `PY2` library in `PY3` mode can invoke a stub wrapper that fails ([bazel #1393](https://github.com/bazelbuild/bazel/issues/1393)). We will remove the stub and the behavior that attempted to call it.
-
-## API changes
-
-The attribute `default_python_version` of `py_binary` is renamed to `python_version`. The flag `--force_python` is renamed to `--python_version`. (An alternative naming scheme would have been to use "python_major_version", but this is more verbose and inconsistent with `srcs_version`.)
-
-The Python mode becomes "non-sticky" and `srcs_version` validation becomes less strict. Building a `py_library` target directly will not trigger validation. Building a `py_binary` that depends on a `py_library` having an incompatible version will only fail if the dependency occurs via transitive `deps`, and not when it occurs via other paths such as a `data` dep or a `genrule` that produces a source file.
-
-The `"py"` provider of Python rules gains two new boolean fields, `has_py2_only_sources` and `has_py3_only_sources`. Existing Python rules are updated to set these fields. Dependencies of Python rules that do not have the `"py"` provider, or those fields on that provider, are treated as if the value of the fields is `False`.
-
-A new `select()`-able target is created at `@bazel_tools//tools/python:python_version` to return the current Python mode. It can be used in the `flag_values` attribute of `config_setting` and always equals either `"PY2"` or `"PY3"`. (In the future this flag may be moved out of `@bazel_tools` and into `bazelbuild/rules_python`. It may also be made into a `build_setting` so that it can replace the native `--python_version` flag.) It is disallowed to use `"python_version"` in a `config_setting`.
-
-The flag `--host_force_python` is unaffected by this doc, except that it becomes illegal to use it in a `config_setting`.
-
-## Migration and compatibility
-
-The rollout and migration of the new features are split into two groups, syntactic and semantic.
-
-For syntax, the new `--python_version` flag and `python_version` attribute are available immediately, and behave exactly the same as the old flag and attribute. When both the new and old flags are present on the command line, or both the new and old attributes are present on the same target, the new one takes precedence and the old is ignored. The `@bazel_tools//tools/python:python_version` target is also available unconditionally.
-
-A migration flag `--incompatible_remove_old_python_version_api` makes unavailable the `--force_python` flag and `default_python_version` attribute, and disallows `select()`-ing on `"force_python"` and `"host_force_python"`.
-
-For semantics, a flag `--incompatible_allow_python_version_transitions` makes Bazel use the new non-sticky version transitions and the deferred `srcs_version` validation. This applies regardless of whether the new or old API is used to specify the Python version. The new `"py"` provider fields are created regardless of which flags are given.
-
-Migrating for `--incompatible_remove_old_python_version_api` guarantees that the Python version only ever has two possible values. Migrating for `--incompatible_allow_python_version_transitions` enables data dependencies across different versions of Python. It is recommended to do the API migration first in order to avoid action conflicts.
-
-Strictly speaking, Python 3 support is currently marked "experimental" in documentation, so in theory we may be able to make these changes without introducing new incompatible and experimental flags. However these changes will likely affect many users of the Python rules, so flags would be more user-friendly. Bazel is also transitioning to a policy wherein all experimental APIs must be flag-guarded, regardless of any disclaimers in their documentation.
-
-## Changelog
-
-Date         | Change
------------- | ------
-2018-10-25   | Initial version
-2018-11-02   | Refine migration path
-2018-12-17   | Refine plan for `select()`
-2018-12-19   | Refine plan for `select()` again
-2019-01-10   | Refine migration path
-2019-01-11   | Formal approval and update provider fields
diff --git a/proposals/2018-11-08-customizing-the-python-stub-template.md b/proposals/2018-11-08-customizing-the-python-stub-template.md
deleted file mode 100644
index 5b9d878..0000000
--- a/proposals/2018-11-08-customizing-the-python-stub-template.md
+++ /dev/null
@@ -1,47 +0,0 @@
----
-title: Customizing the Python Stub Template
-status: Draft, not yet ready for review
-created: 2018-11-08
-updated: 2018-11-09
-authors:
-  - [brandjon@](https://github.com/brandjon)
-reviewers:
-  - [gpshead@](https://github.com/gpshead)
-discussion thread: [bazel #137](https://github.com/bazelbuild/bazel/issues/137)
----
-
-# Customizing the Python Stub Template
-
-## Abstract
-
-This design document proposes a way to use a different Python stub template, so that users can control how the Python interpreter gets invoked to run their targets.
-
-**Open questions:** It is not currently clear whether the use cases warrant this kind of expressivity, or whether users can get by with smaller, more narrowly focused ways of parameterizing the existing stub template. The exact stub API is also to be determined.
-
-## Background
-
-The usual executable artifact of a `py_binary` rule is a Python stub script. This script manipulates the Python environment to set up the module import path and make the runfiles available, before passing control to the underlying user Python program. The stub script is generated from a [stub template](https://github.com/bazelbuild/bazel/blob/ef0024b831a71521390dcb837b24b86485e5998d/src/main/java/com/google/devtools/build/lib/bazel/rules/python/python_stub_template.txt) by [instantiating some placeholders](https://github.com/bazelbuild/bazel/blob/ef0024b831a71521390dcb837b24b86485e5998d/src/main/java/com/google/devtools/build/lib/bazel/rules/python/BazelPythonSemantics.java#L152-L159).
-
-Generally the Python stub and user program is executed using the system Python interpreter of the target platform. Although this is non-hermetic, the details of the interpreter can be reified by a [`py_runtime`](https://docs.bazel.build/versions/master/be/python.html#py_runtime) target. In the future this will allow for platform-aware selection of an appropriate Python interpreter using the [toolchain](https://docs.bazel.build/versions/master/toolchains.html) framework.
-
-## Proposal
-
-A new `Label`-valued attribute, `stub_template`, is added to `py_runtime`. This label points to a file; by default it is `//tools/python:python_stub_template.txt`, which is the renamed location of the existing template. The `py_runtime` rule will resolve this label to an `Artifact` and propagate it in a new field of [`BazelPyRuntimeProvider`](https://github.com/bazelbuild/bazel/blob/1f684e1b87cd8881a0a4b33e86ba66743e32d674/src/main/java/com/google/devtools/build/lib/bazel/rules/python/BazelPyRuntimeProvider.java). [`BazelPythonSemantics#createExecutable`](https://github.com/bazelbuild/bazel/blob/ef0024b831a71521390dcb837b24b86485e5998d/src/main/java/com/google/devtools/build/lib/bazel/rules/python/BazelPythonSemantics.java#L130) will refer to this `Artifact` instead of retrieving the template as a Java resource file.
-
-It is not yet decided which template placeholders are specified, or whether the placeholders will remain an experimental API for the moment.
-
-## Original approach
-
-An earlier proposed approach (suggested on the discussion thread, and implemented by [fahhem@](https://github.com/fahhem)) was to add the `stub_template` attribute to `py_binary` rather than to `py_runtime`.
-
-This would make it trivial to customize the stub for an individual Python target without affecting the other targets in the build. This could be useful if there were a one-off target that had special requirements.
-
-However, the author believes that the stub is more naturally tied to the Python interpreter than to an individual target. Putting the attribute on `py_runtime` makes it easy to affect all Python targets that use the same interpreter. It also allows the same Python target to use different stubs depending on which interpreter it is built for -- for instance, the same target can have different stubs on different platforms.
-
-If it is necessary to use a custom stub for a particular target, that could still be achieved by making that one target use a different `py_runtime`. This isn't possible at the moment but will be when a `py_toolchain` rule is added.
-
-## Changelog
-
-Date         | Change
------------- | ------
-2018-11-08   | Initial version
diff --git a/proposals/2019-02-12-design-for-a-python-toolchain.md b/proposals/2019-02-12-design-for-a-python-toolchain.md
deleted file mode 100644
index 0d45866..0000000
--- a/proposals/2019-02-12-design-for-a-python-toolchain.md
+++ /dev/null
@@ -1,247 +0,0 @@
----
-title: Design for a Python Toolchain
-status: Accepted
-created: 2019-02-12
-updated: 2019-02-21
-authors:
-  - [brandjon@](https://github.com/brandjon)
-reviewers:
-  - [katre@](https://github.com/katre), [mrovner@](https://github.com/mrovner), [nlopezgi@](https://github.com/nlopezgi)
-discussion thread: [bazel #7375](https://github.com/bazelbuild/bazel/issues/7375)
----
-
-# Design for a Python Toolchain
-
-## Abstract
-
-This doc outlines the design of a Python toolchain rule and its associated machinery. Essentially a new `py_runtime_pair` toolchain rule is created to wrap two `py_runtime` targets (one for Python 2 and one for Python 3), thereby making runtimes discoverable via [toolchain resolution](https://docs.bazel.build/versions/master/toolchains.html). This replaces the previous mechanism of explicitly specifying a global runtime via `--python_top` or `--python_path`; those flags are now deprecated.
-
-The new toolchain-related definitions are implemented in Starlark. A byproduct of this is that the provider type for `py_runtime` is exposed to Starlark. We also add to `py_runtime` an attribute for declaring whether it represents a Python 2 or Python 3 runtime.
-
-## Motivation
-
-The goal is to make the native Python rules use the toolchain framework to resolve the Python runtime. Advantages include:
-
-* allowing each `py_binary` to use a runtime suitable for its target platform
-
-* allowing Python 2 and Python 3 targets to run in the same build without [hacks](https://github.com/bazelbuild/bazel/issues/4815#issuecomment-460777113)
-
-* making it easier to run Python-related builds under remote execution
-
-* adding support for autodetection of available system Python runtimes, without requiring ad hoc rule logic
-
-* removing `--python_top` and `--python_path`
-
-* bringing Python in line with other rule sets and Bazel's best practices
-
-**Non-goal:** This work does not allow individual `py_binary`s to directly name a Python runtime to use. Instead, this information should be worked into either the configuration or a future toolchain constraint system. See the FAQ, below.
-
-## Design
-
-### New definitions
-
-A new [toolchain type](https://docs.bazel.build/versions/master/toolchains.html#writing-rules-that-use-toolchains) is created at `@bazel_tools//tools/python:toolchain_type`. This is the type for toolchains that provide a way to run Python code.
-
-Toolchain rules of this type are expected to return a [`ToolchainInfo`](https://docs.bazel.build/versions/master/skylark/lib/ToolchainInfo.html) with two fields, `py2_runtime` and `py3_runtime`, each of type `PyRuntimeInfo`. They are used for `PY2` and `PY3` binaries respectively.
-
-```python
-def _some_python_toolchain_impl(ctx):
-    ...
-    return [platform_common.ToolchainInfo(
-        py2_runtime = PyRuntimeInfo(...),
-        py3_runtime = PyRuntimeInfo(...))]
-```
-
-If either Python 2 or Python 3 is not provided by the toolchain, the corresponding field may be set to `None`. This is strongly discouraged, as it will prevent any target relying on that toolchain from using that version of Python. Toolchains that do use `None` here should be registered with lower priority than other toolchains, so that they are chosen only as a fallback.
-
-`PyRuntimeInfo` is the newly-exposed Starlark name of the native provider returned by the [`py_runtime`](https://docs.bazel.build/versions/master/be/python.html#py_runtime) rule. Like `PyInfo`, it is a top-level built-in name. Also like `PyInfo` and the native Python rules, it will eventually be migrated to Starlark and moved out of the Bazel repository.
-
-A `PyRuntimeInfo` describes either a *platform runtime* or an *in-build runtime*. A platform runtime accesses a system-installed interpreter at a known path, whereas an in-build runtime points to a build target that acts as the interpreter. In both cases, an "interpreter" is really any executable binary or wrapper script that is capable of running a Python script passed on the command line, following the same conventions as the standard CPython interpreter. Note that any platform runtime imposes a requirement on the target platform. Therefore, any toolchain returning such a `PyRuntimeInfo` should include a corresponding target platform constraint, to ensure it cannot be selected for a platform that does not have the interpreter at that path. Even an in-build runtime can require platform constraints, for instance in the case of a wrapper script that invokes the system interpreter.
-
-We provide two [`constraint_setting`](https://docs.bazel.build/versions/master/be/platform.html#constraint_setting)s to act as a standardized namespace for this kind of platform constraint: `@bazel_tools//tools/python:py2_interpreter_path` and `@bazel_tools//tools/python:py3_interpreter_path`. This doc does not mandate any particular structure for the names of [`constraint_value`](https://docs.bazel.build/versions/master/be/platform.html#constraint_value)s associated with these settings. If a platform does not provide a Python 2 runtime, it should have no constraint value associated with `py2_interpreter_path`, and similarly for Python 3.
-
-`PyRuntimeInfo` has the following fields, each of which corresponds to an attribute on `py_runtime`. (The last one, `python_version`, is newly added in this doc.)
-
-* `interpreter_path`: If this is a platform runtime, this field is the absolute filesystem path to the interpreter on the target platform. Otherwise, this is `None`.
-
-* `interpreter`: If this is an in-build runtime, this field is a `File` representing the interpreter. Otherwise, this is `None`.
-
-* `files`: If this is an in-build runtime, this field is a depset of `File`s that need to be added to the runfiles of an executable target that uses this toolchain. The value of `interpreter` need not be included in this field. If this is a platform runtime then this field is `None`.
-
-* `python_version`: Either the string `"PY2"` or `"PY3"`, indicating which version of Python the interpreter referenced by `interpreter_path` or `interpreter` is.
-
-The constructor of `PyRuntimeInfo` takes each of these fields as keyword arguments. The constructor enforces the invariants about which combinations of fields may be `None`. Fields that are not meaningful may be omitted; e.g. when `interpreter_path` is given, `interpreter` and `files` may be omitted instead of passing `None`.
-
-It is not possible to directly specify a system command (e.g. `"python"`) in `interpreter_path`. However, this can be done indirectly by creating a wrapper script that invokes the system command, and referencing that script from the `interpreter` field.
-
-Finally, we define a standard Python toolchain rule implementing the new toolchain type. The rule's name is `py_runtime_pair` and it can be loaded from `@bazel_tools//tools/python:toolchain.bzl`. It has two label-valued attributes, `py2_runtime` and `py3_runtime`, that refer to `py_runtime` targets.
-
-### Changes to the native Python rules
-
-The executable Python rules [`py_binary`](https://docs.bazel.build/versions/master/be/python.html#py_binary) and [`py_test`](https://docs.bazel.build/versions/master/be/python.html#py_test) are modified to require the new toolchain type. The Python runtime information is obtained by retrieving a `PyRuntimeInfo` from either the `py2_runtime` or `py3_runtime` field of the toolchain, rather than from `--python_top`. The `python_version` field of the `PyRuntimeInfo` is also checked to ensure that a `py_runtime` didn't accidentally end up in the wrong place.
-
-Since `--python_top` is no longer read, it is deprecated. Since `--python_path` was only read when no runtime information is available, but the toolchain must always be present, it too is deprecated.
-
-Implementation wise, the native `PyRuntimeProvider` is turned into the user-visible `PyRuntimeInfo` by adding Starlark API annotations in the usual way (`@SkylarkCallable`, etc.). A previous version of this proposal suggested defining `PyRuntimeInfo` in Starlark underneath `@bazel_tools` and accessing it from the native rules, but this is technically difficult to implement.
-
-A `python_version` attribute is added to `py_runtime`. It is mandatory and accepts values `"PY2"` and `"PY3"` only.
-
-As a drive-by cleanup (and non-breaking change), the `files` attribute of `py_runtime` is made optional. For the non-hermetic case, specifying `files` is nonsensical and it is even an error to give it a non-empty value. For the hermetic case, `files` can be useful but is by no means necessary if the interpreter requires no additional in-repo inputs (such as when the "interpreter" is just a wrapper script that dispatches to the platform's system interpreter).
-
-### Default toolchain
-
-For convenience, we supply a predefined [toolchain](https://docs.bazel.build/versions/master/be/platform.html#toolchain) of last resort, `@bazel_tools//tools/python:autodetecting_python_toolchain`. This toolchain is registered with lower priority than any user-registered Python toolchain. It simply dispatches to a wrapper script that tries to locate a suitable interpreter from `PATH` at runtime, on a best-effort basis. It has no platform constraints.
-
-## Example
-
-Here is a minimal example that defines a platform whose Python interpreters are located under a non-standard path. The example also defines a Python toolchain to accompany this platform.
-
-```python
-# //platform_defs:BUILD
-
-load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair")
-
-# Constraint values that represent that the system's "python2" and "python3"
-# executables are located under /usr/weirdpath.
-
-constraint_value(
-    name = "usr_weirdpath_python2",
-    constraint_setting = "@bazel_tools//tools/python:py2_interpreter_path",
-)
-
-constraint_value(
-    name = "usr_weirdpath_python3",
-    constraint_setting = "@bazel_tools//tools/python:py3_interpreter_path",
-)
-
-# A definition of a platform whose Python interpreters are under these paths.
-
-platform(
-    name = "my_platform",
-    constraint_values = [
-        ":usr_weirdpath_python2",
-        ":usr_weirdpath_python3",
-    ],
-)
-
-# Python runtime definitions that reify these system paths as BUILD targets.
-
-py_runtime(
-    name = "my_platform_py2_runtime",
-    interpreter_path = "/usr/weirdpath/python2",
-)
-
-py_runtime(
-    name = "my_platform_py3_runtime",
-    interpreter_path = "/usr/weirdpath/python3",
-)
-
-py_runtime_pair(
-    name = "my_platform_runtimes",
-    py2_runtime = ":my_platform_py2_runtime",
-    py3_runtime = ":my_platform_py3_runtime",
-)
-
-# A toolchain definition to expose these runtimes to toolchain resolution.
-
-toolchain(
-    name = "my_platform_python_toolchain",
-    # Since the Python interpreter is invoked at runtime on the target
-    # platform, there's no need to specify execution platform constraints here.
-    target_compatible_with = [
-        # Make sure this toolchain is only selected for a target platform that
-        # advertises that it has interpreters available under /usr/weirdpath.
-        ":usr_weirdpath_python2",
-        ":usr_weirdpath_python3",
-    ],
-    toolchain = ":my_platform_runtimes",
-    toolchain_type = "@bazel_tools//tools/python:toolchain_type",
-)
-```
-
-```python
-# //pkg:BUILD
-
-# An ordinary Python target to build.
-py_binary(
-    name = "my_pybin",
-    srcs = ["my_pybin.py"],
-    python_version = "PY3",
-)
-```
-
-```python
-# WORKSPACE
-
-# Register the custom Python toolchain so it can be chosen for my_platform.
-register_toolchains(
-    "//platform_defs:my_platform_python_toolchain",
-)
-```
-
-We can then build with
-
-```
-bazel build //pkg:my_pybin --platforms=//platform_defs:my_platform
-```
-
-and thanks to toolchain resolution, the resulting executable will automatically know to use the interpreter located at `/usr/weirdpath/python3`.
-
-If we had not defined a custom toolchain, then we'd be stuck with `autodetecting_python_toolchain`, which would fail at execution time if `/usr/weirdpath` were not on `PATH`. (It would also be slightly slower since it requires an extra invocation of the interpreter at execution time to confirm its version.)
-
-## Backward compatibility
-
-The new `@bazel_tools` definitions and the `PyRuntimeInfo` provider are made available immediately. A new flag, `--incompatible_use_python_toolchains`, is created to assist migration. When the flag is enabled, `py_binary` and `py_test` will use the `PyRuntimeInfo` obtained from the toolchain, instead of the one obtained from `--python_top` or the default information in `--python_path`. In addition, when `--incompatible_use_python_toolchains` is enabled it is an error to set the following flags: `--python_top`, `--python_path`, `--python2_path`, `--python3_path`. (The latter two were already deprecated.) These flags will be deleted when the incompatible flag is removed.
-
-Because of how the toolchain framework is implemented, it is not possible to gate whether a rule requires a toolchain type based on a flag. Therefore `py_binary` and `py_test` are made to require `@bazel_tools//tools/python:toolchain_type` immediately and unconditionally. This may impact how toolchain resolution determines the toolchains and execution platforms for a given build, but should not otherwise cause problems so long as the build uses constraints correctly.
-
-The new `python_version` attribute is added to `py_runtime` immediately. Its default value is the same as the `python_version` attribute for `py_binary`, i.e. `PY3` if `--incompatible_py3_is_default` is true and `PY2` otherwise. When `--incompatible_use_python_toolchains` is enabled this attribute becomes mandatory.
-
-## FAQ
-
-#### How can I force a `py_binary` to use a given runtime, say for a particular minor version of Python?
-
-This is not directly addressed by this doc. Note that such a system could be used not just for controlling the minor version of the interpreter, but also to choose between different Python implementations (CPython vs PyPy), compilation modes (optimized, debug), an interpreter linked with a pre-selected set of extensions, etc.
-
-There are two possible designs.
-
-The first design is to put this information in the configuration, and have the toolchain read the configuration to decide which `PyRuntimeInfo` to return. We'd use Starlark Build Configurations to define a flag to represent the Python minor version, and transition the `py_binary` target's configuration to use this version. This configuration would be inherited by the resolved toolchain just like any other dependency inherits its parents configuration. The toolchain could then use a `select()` on the minor version flag to choose which `py_runtime` to depend on.
-
-There's one problem: Currently all toolchains are analyzed in the host configuration. It is expected that this will be addressed soon.
-
-We could even migrate the Python major version to use this approach. Instead of having two different `ToolchainInfo` fields, `py2_runtime` and `py3_runtime`, we'd have a single `py_runtime` field that would be populated with one or the other based on the configuration. (It's still a good idea to keep them as separate attributes in the user-facing toolchain rule, i.e. `py_runtime_pair`, because it's a very common use case to require both major versions of Python in a build. But note that this causes both runtimes to be analyzed as dependencies, even if the whole build uses only one or the other.)
-
-The second design for controlling what runtime is chosen is to introduce additional constraints on the toolchain, and let toolchain resolution solve the problem. However, currently toolchains only support constraints on the target and execution platforms, and this is not a platform-related constraint. What would be needed is a per-target semantic-level constraint system.
-
-The second approach has the advantage of allowing individual runtimes to be registered independently, without having to combine them into a massive `select()`. But the first approach is much more feasible to implement in the short-term.
-
-#### Why `py_runtime_pair` as opposed to some other way of organizing multiple Python runtimes?
-
-Alternatives might include a dictionary mapping from version identifiers to runtimes, or a list of runtimes paired with additional metadata.
-
-The `PY2`/`PY3` dichotomy is already baked into the Python rule set and indeed the Python ecosystem at large. Keeping this concept in the toolchain rule serves to complement, rather than complicate, Bazel's existing Python support.
-
-It will always be possible to add new toolchains, first by extending the schema of the `ToolchainInfo` accepted by the Python rules, and then by defining new user-facing toolchain rules that serve as front-ends for this provider.
-
-#### Why not split Python 2 and Python 3 into two separate toolchain types?
-
-The general pattern for rule sets seems to be to have a single toolchain type representing all of a language's concerns. Case in point: The naming convention for toolchain types is to literally name the target "toolchain_type", and let the package path distinguish its label.
-
-If the way of categorizing Python runtimes changes in the future, it will probably be easier to migrate rules to use a new provider schema than to use a new set of toolchain types.
-
-#### How does the introduction of new symbols to `@bazel_tools` affect the eventual plan to migrate the Python rules to `bazelbuild/rules_python`?
-
-The new `PyRuntimeInfo` provider and `py_runtime_pair` rule would have forwarding aliases set up, so they could be accessed both from `@bazel_tools` and `rules_python` during a future migration window.
-
-Forwarding aliases would also be defined for the toolchain type and the two `constraint_setting`s. Note that aliasing `toolchain_type`s is currently broken ([#7404](https://github.com/bazelbuild/bazel/issues/7404)).
-
-In the initial implementation of this proposal, the predefined `autodetecting_python_toolchain` will be automatically registered in the user's workspace by Bazel. This follows precedent for other languages with built-in support in Bazel. Once the rules are migrated to `rules_python`, registration will not be automatic; the user will have to explicitly call a configuration helper defined in `rules_python` from their own `WORKSPACE` file.
-
-## Changelog
-
-Date         | Change
------------- | ------
-2019-02-12   | Initial version
-2019-02-14   | Make `PyRuntimeInfo` natively defined
-2019-02-15   | Clarify platform runtime vs in-build runtime
-2019-02-21   | Formal approval
diff --git a/proposals/README.md b/proposals/README.md
deleted file mode 100644
index 36a8a0b..0000000
--- a/proposals/README.md
+++ /dev/null
@@ -1,11 +0,0 @@
-# Python Rules Proposals
-
-This is an index of all design documents and proposals for Python rules, both in native code (the Bazel binary) and in Starlark (the rules_python repository). Some of these proposals are also hosted in this directory.
-
-Proposals that impact native code are also indexed by [bazelbuild/proposals](https://github.com/bazelbuild/proposals), and subject to the [Bazel design process](https://bazel.build/designs/index.html).
-
-Last updated | Status        | Title | Author(s)
------------- | ------------- | ------| ---------
-2019-02-21   | Accepted      | [Design for a Python Toolchain](https://github.com/bazelbuild/rules_python/blob/master/proposals/2019-02-12-design-for-a-python-toolchain.md) | [brandjon@](https://github.com/brandjon)
-2018-11-09   | Draft         | [Customizing the Python Stub Template](https://github.com/bazelbuild/rules_python/blob/master/proposals/2018-11-08-customizing-the-python-stub-template.md) | [brandjon@](https://github.com/brandjon)
-2019-01-11   | Accepted      | [Selecting Between Python 2 and 3](https://github.com/bazelbuild/rules_python/blob/master/proposals/2018-10-25-selecting-between-python-2-and-3.md) | [brandjon@](https://github.com/brandjon)
diff --git a/python/BUILD.bazel b/python/BUILD.bazel
index 1ab59d5..878d20b 100644
--- a/python/BUILD.bazel
+++ b/python/BUILD.bazel
@@ -11,6 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
 """This package contains two sets of rules:
 
     1) the "core" Python rules, which were historically bundled with Bazel and
@@ -38,8 +39,10 @@
         "//python/constraints:distribution",
         "//python/entry_points:distribution",
         "//python/extensions:distribution",
+        "//python/pip_install:distribution",
         "//python/private:distribution",
         "//python/runfiles:distribution",
+        "//python/uv:distribution",
     ],
     visibility = ["//:__pkg__"],
 )
@@ -72,15 +75,22 @@
 )
 
 bzl_library(
+    name = "features_bzl",
+    srcs = ["features.bzl"],
+)
+
+bzl_library(
     name = "packaging_bzl",
     srcs = ["packaging.bzl"],
     deps = [
         ":py_binary_bzl",
+        "//python/private:bzlmod_enabled_bzl",
         "//python/private:py_package.bzl",
         "//python/private:py_wheel_bzl",
         "//python/private:py_wheel_normalize_pep440.bzl",
         "//python/private:stamp_bzl",
         "//python/private:util_bzl",
+        "@bazel_skylib//rules:native_binary",
     ],
 )
 
@@ -88,12 +98,13 @@
     name = "pip_bzl",
     srcs = ["pip.bzl"],
     deps = [
-        "//python/pip_install:pip_repository_bzl",
-        "//python/pip_install:repositories_bzl",
-        "//python/pip_install:requirements_bzl",
-        "//python/private:bzlmod_enabled_bzl",
-        "//python/private:full_version_bzl",
-        "//python/private:render_pkg_aliases_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private/pypi:multi_pip_parse_bzl",
+        "//python/private/pypi:package_annotation_bzl",
+        "//python/private/pypi:pip_compile_bzl",
+        "//python/private/pypi:pip_repository_bzl",
+        "//python/private/pypi:whl_library_alias_bzl",
+        "//python/private/whl_filegroup:whl_filegroup_bzl",
     ],
 )
 
@@ -122,6 +133,10 @@
 bzl_library(
     name = "py_cc_link_params_info_bzl",
     srcs = ["py_cc_link_params_info.bzl"],
+    deps = [
+        "//python/private/common:providers_bzl",
+        "@rules_python_internal//:rules_python_config_bzl",
+    ],
 )
 
 bzl_library(
@@ -177,6 +192,7 @@
         "//python/private:reexports_bzl",
         "//python/private:util_bzl",
         "//python/private/common:providers_bzl",
+        "@rules_python_internal//:rules_python_config_bzl",
     ],
 )
 
@@ -195,16 +211,7 @@
     name = "repositories_bzl",
     srcs = ["repositories.bzl"],
     deps = [
-        ":versions_bzl",
-        "//python/pip_install:repositories_bzl",
-        "//python/private:auth_bzl",
-        "//python/private:bazel_tools_bzl",
-        "//python/private:bzlmod_enabled_bzl",
-        "//python/private:coverage_deps_bzl",
-        "//python/private:full_version_bzl",
-        "//python/private:internal_config_repo_bzl",
-        "//python/private:toolchains_repo_bzl",
-        "//python/private:which_bzl",
+        "//python/private:python_repositories_bzl",
     ],
 )
 
@@ -290,6 +297,11 @@
     actual = "@bazel_tools//tools/python:toolchain_type",
 )
 
+toolchain_type(
+    name = "exec_tools_toolchain_type",
+    visibility = ["//visibility:public"],
+)
+
 # Definitions for a Python toolchain that, at execution time, attempts to detect
 # a platform runtime having the appropriate major Python version. Consider this
 # a toolchain of last resort.
@@ -302,12 +314,14 @@
 
 alias(
     name = "autodetecting_toolchain",
-    actual = "@bazel_tools//tools/python:autodetecting_toolchain",
+    actual = "//python/runtime_env_toolchains:runtime_env_toolchain",
+    deprecation = "Use //python/runtime_env_toolchains:all instead",
 )
 
 alias(
     name = "autodetecting_toolchain_nonstrict",
-    actual = "@bazel_tools//tools/python:autodetecting_toolchain_nonstrict",
+    actual = "//python/runtime_env_toolchains:runtime_env_toolchain",
+    deprecation = "Use //python/runtime_env_toolchains:all instead",
 )
 
 # ========= Packaging rules =========
diff --git a/python/cc/BUILD.bazel b/python/cc/BUILD.bazel
index 0d90e15..d384d05 100644
--- a/python/cc/BUILD.bazel
+++ b/python/cc/BUILD.bazel
@@ -3,6 +3,7 @@
 load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
 load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
 load("//python/private:current_py_cc_headers.bzl", "current_py_cc_headers")
+load("//python/private:current_py_cc_libs.bzl", "current_py_cc_libs")
 
 package(
     default_visibility = ["//:__subpackages__"],
@@ -19,6 +20,17 @@
     visibility = ["//visibility:public"],
 )
 
+# This target provides the C libraries for whatever the current toolchain is for
+# the consuming rule. It basically acts like a cc_library by forwarding on the
+# providers for the underlying cc_library that the toolchain is using.
+current_py_cc_libs(
+    name = "current_py_cc_libs",
+    # Building this directly will fail unless a py cc toolchain is registered,
+    # and it's only under bzlmod that one is registered by default.
+    tags = [] if BZLMOD_ENABLED else ["manual"],
+    visibility = ["//visibility:public"],
+)
+
 toolchain_type(
     name = "toolchain_type",
     visibility = ["//visibility:public"],
diff --git a/python/config_settings/BUILD.bazel b/python/config_settings/BUILD.bazel
index ab4ee8d..f2383d6 100644
--- a/python/config_settings/BUILD.bazel
+++ b/python/config_settings/BUILD.bazel
@@ -1,13 +1,168 @@
-load("//python:versions.bzl", "TOOL_VERSIONS")
+load("@bazel_skylib//rules:common_settings.bzl", "string_flag")
+load(
+    "//python/private:flags.bzl",
+    "BootstrapImplFlag",
+    "ExecToolsToolchainFlag",
+    "PrecompileAddToRunfilesFlag",
+    "PrecompileFlag",
+    "PrecompileSourceRetentionFlag",
+    "PycCollectionFlag",
+)
+load(
+    "//python/private/pypi:flags.bzl",
+    "UniversalWhlFlag",
+    "UseWhlFlag",
+    "WhlLibcFlag",
+    "define_pypi_internal_flags",
+)
 load(":config_settings.bzl", "construct_config_settings")
 
 filegroup(
     name = "distribution",
-    srcs = glob(["*.bzl"]) + [
-        "BUILD.bazel",
+    srcs = glob(["**"]) + [
         "//python/config_settings/private:distribution",
     ],
     visibility = ["//python:__pkg__"],
 )
 
-construct_config_settings(python_versions = TOOL_VERSIONS.keys())
+construct_config_settings(
+    name = "construct_config_settings",
+)
+
+string_flag(
+    name = "exec_tools_toolchain",
+    build_setting_default = ExecToolsToolchainFlag.DISABLED,
+    values = sorted(ExecToolsToolchainFlag.__members__.values()),
+    # NOTE: Only public because it is used in py_toolchain_suite from toolchain
+    # repositories
+    visibility = ["//visibility:private"],
+)
+
+config_setting(
+    name = "is_exec_tools_toolchain_enabled",
+    flag_values = {
+        "exec_tools_toolchain": ExecToolsToolchainFlag.ENABLED,
+    },
+    # NOTE: Only public because it is used in py_toolchain_suite from toolchain
+    # repositories
+    visibility = ["//visibility:public"],
+)
+
+string_flag(
+    name = "precompile",
+    build_setting_default = PrecompileFlag.AUTO,
+    values = sorted(PrecompileFlag.__members__.values()),
+    # NOTE: Only public because it's an implicit dependency
+    visibility = ["//visibility:public"],
+)
+
+string_flag(
+    name = "precompile_source_retention",
+    build_setting_default = PrecompileSourceRetentionFlag.KEEP_SOURCE,
+    values = sorted(PrecompileSourceRetentionFlag.__members__.values()),
+    # NOTE: Only public because it's an implicit dependency
+    visibility = ["//visibility:public"],
+)
+
+string_flag(
+    name = "precompile_add_to_runfiles",
+    build_setting_default = PrecompileAddToRunfilesFlag.ALWAYS,
+    values = sorted(PrecompileAddToRunfilesFlag.__members__.values()),
+    # NOTE: Only public because it's an implicit dependency
+    visibility = ["//visibility:public"],
+)
+
+string_flag(
+    name = "pyc_collection",
+    build_setting_default = PycCollectionFlag.DISABLED,
+    values = sorted(PycCollectionFlag.__members__.values()),
+    # NOTE: Only public because it's an implicit dependency
+    visibility = ["//visibility:public"],
+)
+
+string_flag(
+    name = "bootstrap_impl",
+    build_setting_default = BootstrapImplFlag.SYSTEM_PYTHON,
+    values = sorted(BootstrapImplFlag.__members__.values()),
+    # NOTE: Only public because it's an implicit dependency
+    visibility = ["//visibility:public"],
+)
+
+# This is used for pip and hermetic toolchain resolution.
+string_flag(
+    name = "py_linux_libc",
+    build_setting_default = WhlLibcFlag.GLIBC,
+    values = sorted(WhlLibcFlag.__members__.values()),
+    # NOTE: Only public because it is used in pip hub and toolchain repos.
+    visibility = ["//visibility:public"],
+)
+
+# pip.parse related flags
+
+string_flag(
+    name = "pip_whl",
+    build_setting_default = UseWhlFlag.AUTO,
+    values = sorted(UseWhlFlag.__members__.values()),
+    # NOTE: Only public because it is used in pip hub repos.
+    visibility = ["//visibility:public"],
+)
+
+config_setting(
+    name = "is_pip_whl_auto",
+    flag_values = {
+        ":pip_whl": UseWhlFlag.AUTO,
+    },
+    # NOTE: Only public because it is used in pip hub repos.
+    visibility = ["//visibility:public"],
+)
+
+config_setting(
+    name = "is_pip_whl_no",
+    flag_values = {
+        ":pip_whl": UseWhlFlag.NO,
+    },
+    # NOTE: Only public because it is used in pip hub repos.
+    visibility = ["//visibility:public"],
+)
+
+config_setting(
+    name = "is_pip_whl_only",
+    flag_values = {
+        ":pip_whl": UseWhlFlag.ONLY,
+    },
+    # NOTE: Only public because it is used in pip hub repos.
+    visibility = ["//visibility:public"],
+)
+
+string_flag(
+    name = "pip_whl_osx_arch",
+    build_setting_default = UniversalWhlFlag.ARCH,
+    values = sorted(UniversalWhlFlag.__members__.values()),
+    # NOTE: Only public because it is used in pip hub repos.
+    visibility = ["//visibility:public"],
+)
+
+string_flag(
+    name = "pip_whl_glibc_version",
+    build_setting_default = "",
+    # NOTE: Only public because it is used in pip hub repos.
+    visibility = ["//visibility:public"],
+)
+
+string_flag(
+    name = "pip_whl_muslc_version",
+    build_setting_default = "",
+    # NOTE: Only public because it is used in pip hub repos.
+    visibility = ["//visibility:public"],
+)
+
+string_flag(
+    name = "pip_whl_osx_version",
+    build_setting_default = "",
+    # NOTE: Only public because it is used in pip hub repos.
+    visibility = ["//visibility:public"],
+)
+
+define_pypi_internal_flags(
+    name = "define_pypi_internal_flags",
+)
diff --git a/python/config_settings/config_settings.bzl b/python/config_settings/config_settings.bzl
index 21e477e..f1d2ff0 100644
--- a/python/config_settings/config_settings.bzl
+++ b/python/config_settings/config_settings.bzl
@@ -15,26 +15,16 @@
 """This module is used to construct the config settings in the BUILD file in this same package.
 """
 
-load("@bazel_skylib//rules:common_settings.bzl", "string_flag")
+load(
+    "//python/private:config_settings.bzl",
+    _construct_config_settings = "construct_config_settings",
+    _is_python_config_setting = "is_python_config_setting",
+)
 
-# buildifier: disable=unnamed-macro
-def construct_config_settings(python_versions):
-    """Constructs a set of configs for all Python versions.
+# This is exposed only for cases where the pip hub repo needs to use this rule
+# to define hub-repo scoped config_settings for platform specific wheel
+# support.
+is_python_config_setting = _is_python_config_setting
 
-    Args:
-        python_versions: The Python versions supported by rules_python.
-    """
-    string_flag(
-        name = "python_version",
-        build_setting_default = python_versions[0],
-        values = python_versions,
-        visibility = ["//visibility:public"],
-    )
-
-    for python_version in python_versions:
-        python_version_constraint_setting = "is_python_" + python_version
-        native.config_setting(
-            name = python_version_constraint_setting,
-            flag_values = {":python_version": python_version},
-            visibility = ["//visibility:public"],
-        )
+# This is exposed for usage in rules_python only.
+construct_config_settings = _construct_config_settings
diff --git a/python/config_settings/private/BUILD.bazel b/python/config_settings/private/BUILD.bazel
index aa68c65..cb42e49 100644
--- a/python/config_settings/private/BUILD.bazel
+++ b/python/config_settings/private/BUILD.bazel
@@ -1,7 +1,5 @@
 filegroup(
     name = "distribution",
-    srcs = glob(["*.bzl"]) + [
-        "BUILD.bazel",
-    ],
+    srcs = glob(["**"]),
     visibility = ["//python/config_settings:__pkg__"],
 )
diff --git a/python/config_settings/transition.bzl b/python/config_settings/transition.bzl
index cd54b21..7ac41f8 100644
--- a/python/config_settings/transition.bzl
+++ b/python/config_settings/transition.bzl
@@ -43,48 +43,50 @@
         output = executable,
         target_file = target[DefaultInfo].files_to_run.executable,
     )
-    zipfile_symlink = None
+    default_outputs = []
     if target_is_windows:
-        # Under Windows, the expected "<name>.zip" does not exist, so we have to
-        # create the symlink ourselves to achieve the same behaviour as in macOS
-        # and Linux.
-        zipfile = None
-        expected_target_path = target[DefaultInfo].files_to_run.executable.short_path[:-4] + ".zip"
-        for file in target[DefaultInfo].default_runfiles.files.to_list():
-            if file.short_path == expected_target_path:
-                zipfile = file
-        zipfile_symlink = ctx.actions.declare_file(ctx.attr.name + ".zip")
-        ctx.actions.symlink(
-            is_executable = True,
-            output = zipfile_symlink,
-            target_file = zipfile,
-        )
+        # NOTE: Bazel 6 + host=linux + target=windows results in the .exe extension missing
+        inner_bootstrap_path = _strip_suffix(target[DefaultInfo].files_to_run.executable.short_path, ".exe")
+        inner_bootstrap = None
+        inner_zip_file_path = inner_bootstrap_path + ".zip"
+        inner_zip_file = None
+        for file in target[DefaultInfo].files.to_list():
+            if file.short_path == inner_bootstrap_path:
+                inner_bootstrap = file
+            elif file.short_path == inner_zip_file_path:
+                inner_zip_file = file
+
+        # TODO: Use `fragments.py.build_python_zip` once Bazel 6 support is dropped.
+        # Which file the Windows .exe looks for depends on the --build_python_zip file.
+        # Bazel 7+ has APIs to know the effective value of that flag, but not Bazel 6.
+        # To work around this, we treat the existence of a .zip in the default outputs
+        # to mean --build_python_zip=true.
+        if inner_zip_file:
+            suffix = ".zip"
+            underlying_launched_file = inner_zip_file
+        else:
+            suffix = ""
+            underlying_launched_file = inner_bootstrap
+
+        if underlying_launched_file:
+            launched_file_symlink = ctx.actions.declare_file(ctx.attr.name + suffix)
+            ctx.actions.symlink(
+                is_executable = True,
+                output = launched_file_symlink,
+                target_file = underlying_launched_file,
+            )
+            default_outputs.append(launched_file_symlink)
+
     env = {}
     for k, v in ctx.attr.env.items():
         env[k] = ctx.expand_location(v)
 
-    if PyInfo in target:
-        py_info = target[PyInfo]
-    elif BuiltinPyInfo in target:
-        py_info = target[BuiltinPyInfo]
-    else:
-        fail("target {} does not have rules_python PyInfo or builtin PyInfo".format(target))
-
-    if PyRuntimeInfo in target:
-        py_runtime_info = target[PyRuntimeInfo]
-    elif BuiltinPyRuntimeInfo in target:
-        py_runtime_info = target[BuiltinPyRuntimeInfo]
-    else:
-        fail("target {} does not have rules_python PyRuntimeInfo or builtin PyRuntimeInfo".format(target))
-
     providers = [
         DefaultInfo(
             executable = executable,
-            files = depset([zipfile_symlink] if zipfile_symlink else [], transitive = [target[DefaultInfo].files]),
-            runfiles = ctx.runfiles([zipfile_symlink] if zipfile_symlink else []).merge(target[DefaultInfo].default_runfiles),
+            files = depset(default_outputs, transitive = [target[DefaultInfo].files]),
+            runfiles = ctx.runfiles(default_outputs).merge(target[DefaultInfo].default_runfiles),
         ),
-        py_info,
-        py_runtime_info,
         # Ensure that the binary we're wrapping is included in code coverage.
         coverage_common.instrumented_files_info(
             ctx,
@@ -97,6 +99,15 @@
         # https://github.com/bazelbuild/bazel/commit/dbdfa07e92f99497be9c14265611ad2920161483
         testing.TestEnvironment(env),
     ]
+    if PyInfo in target:
+        providers.append(target[PyInfo])
+    if BuiltinPyInfo in target and PyInfo != BuiltinPyInfo:
+        providers.append(target[BuiltinPyInfo])
+
+    if PyRuntimeInfo in target:
+        providers.append(target[PyRuntimeInfo])
+    if BuiltinPyRuntimeInfo in target and PyRuntimeInfo != BuiltinPyRuntimeInfo:
+        providers.append(target[BuiltinPyRuntimeInfo])
     return providers
 
 _COMMON_ATTRS = {
@@ -164,6 +175,7 @@
     attrs = _COMMON_ATTRS | _PY_TEST_ATTRS,
     cfg = _transition_python_version,
     executable = True,
+    fragments = ["py"],
 )
 
 _transition_py_test = rule(
@@ -171,6 +183,7 @@
     attrs = _COMMON_ATTRS | _PY_TEST_ATTRS,
     cfg = _transition_python_version,
     test = True,
+    fragments = ["py"],
 )
 
 def _py_rule(rule_impl, transition_rule, name, python_version, **kwargs):
@@ -258,3 +271,9 @@
 
 def py_test(name, python_version, **kwargs):
     return _py_rule(_py_test, _transition_py_test, name, python_version, **kwargs)
+
+def _strip_suffix(s, suffix):
+    if s.endswith(suffix):
+        return s[:-len(suffix)]
+    else:
+        return s
diff --git a/python/current_py_toolchain.bzl b/python/current_py_toolchain.bzl
index e3345cb..f3ff2ac 100644
--- a/python/current_py_toolchain.bzl
+++ b/python/current_py_toolchain.bzl
@@ -14,6 +14,8 @@
 
 """Public entry point for current_py_toolchain rule."""
 
+load("//python/private:toolchain_types.bzl", "TARGET_TOOLCHAIN_TYPE")
+
 def _current_py_toolchain_impl(ctx):
     toolchain = ctx.toolchains[ctx.attr._toolchain]
 
@@ -50,9 +52,9 @@
     """,
     implementation = _current_py_toolchain_impl,
     attrs = {
-        "_toolchain": attr.string(default = str(Label("@bazel_tools//tools/python:toolchain_type"))),
+        "_toolchain": attr.string(default = str(TARGET_TOOLCHAIN_TYPE)),
     },
     toolchains = [
-        str(Label("@bazel_tools//tools/python:toolchain_type")),
+        str(TARGET_TOOLCHAIN_TYPE),
     ],
 )
diff --git a/python/entry_points/BUILD.bazel b/python/entry_points/BUILD.bazel
index d45fb18..46dbd92 100644
--- a/python/entry_points/BUILD.bazel
+++ b/python/entry_points/BUILD.bazel
@@ -32,10 +32,6 @@
 
 filegroup(
     name = "distribution",
-    srcs = glob([
-        "*.bzl",
-    ]) + [
-        "BUILD.bazel",
-    ],
+    srcs = glob(["**"]),
     visibility = ["//python:__subpackages__"],
 )
diff --git a/python/extensions/BUILD.bazel b/python/extensions/BUILD.bazel
index 88e3984..e8a63d6 100644
--- a/python/extensions/BUILD.bazel
+++ b/python/extensions/BUILD.bazel
@@ -28,12 +28,14 @@
     name = "pip_bzl",
     srcs = ["pip.bzl"],
     visibility = ["//:__subpackages__"],
-    deps = ["//python/private/bzlmod:pip_bzl"],
+    deps = ["//python/private/pypi:pip_bzl"],
 )
 
 bzl_library(
     name = "python_bzl",
     srcs = ["python.bzl"],
     visibility = ["//:__subpackages__"],
-    deps = ["//python/private/bzlmod:python_bzl"],
+    deps = [
+        "//python/private:python_bzl",
+    ],
 )
diff --git a/python/extensions/pip.bzl b/python/extensions/pip.bzl
index a69ee34..e9d4726 100644
--- a/python/extensions/pip.bzl
+++ b/python/extensions/pip.bzl
@@ -14,6 +14,6 @@
 
 "pip module extension for use with bzlmod"
 
-load("//python/private/bzlmod:pip.bzl", _pip = "pip")
+load("//python/private/pypi:pip.bzl", _pip = "pip")
 
 pip = _pip
diff --git a/python/extensions/python.bzl b/python/extensions/python.bzl
index 5428b75..4148d90 100644
--- a/python/extensions/python.bzl
+++ b/python/extensions/python.bzl
@@ -14,6 +14,6 @@
 
 "Python toolchain module extensions for use with bzlmod"
 
-load("//python/private/bzlmod:python.bzl", _python = "python")
+load("//python/private:python.bzl", _python = "python")
 
 python = _python
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/python/features.bzl
similarity index 68%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to python/features.bzl
index bbdfb4c..3a10532 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/python/features.bzl
@@ -1,14 +1,18 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#     http://www.apache.org/licenses/LICENSE-2.0
+#    http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+"""Allows detecting of rules_python features that aren't easily detected."""
 
+features = struct(
+    precompile = True,
+)
diff --git a/python/packaging.bzl b/python/packaging.bzl
index f811965..a5ac25b 100644
--- a/python/packaging.bzl
+++ b/python/packaging.bzl
@@ -14,7 +14,9 @@
 
 """Public API for for building wheels."""
 
+load("@bazel_skylib//rules:native_binary.bzl", "native_binary")
 load("//python:py_binary.bzl", "py_binary")
+load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
 load("//python/private:py_package.bzl", "py_package_lib")
 load("//python/private:py_wheel.bzl", _PyWheelInfo = "PyWheelInfo", _py_wheel = "py_wheel")
 load("//python/private:util.bzl", "copy_propagating_kwargs")
@@ -70,7 +72,7 @@
     },
 )
 
-def py_wheel(name, twine = None, publish_args = [], **kwargs):
+def py_wheel(name, twine = None, twine_binary = Label("//tools/publish:twine") if BZLMOD_ENABLED else None, publish_args = [], **kwargs):
     """Builds a Python Wheel.
 
     Wheels are Python distribution format defined in https://www.python.org/dev/peps/pep-0427/.
@@ -115,19 +117,21 @@
     )
     ```
 
-    To publish the wheel to PyPI, the twine package is required.
-    rules_python doesn't provide twine itself, see [https://github.com/bazelbuild/rules_python/issues/1016].
-    However you can install it with [pip_parse](#pip_parse), just like we do in the WORKSPACE file in rules_python.
+    To publish the wheel to PyPI, the twine package is required and it is installed
+    by default on `bzlmod` setups. On legacy `WORKSPACE`, `rules_python`
+    doesn't provide `twine` itself
+    (see https://github.com/bazelbuild/rules_python/issues/1016), but
+    you can install it with `pip_parse`, just like we do any other dependencies.
 
-    Once you've installed twine, you can pass its label to the `twine` attribute of this macro,
-    to get a "[name].publish" target.
+    Once you've installed twine, you can pass its label to the `twine`
+    attribute of this macro, to get a "[name].publish" target.
 
     Example:
 
     ```python
     py_wheel(
         name = "my_wheel",
-        twine = "@publish_deps_twine//:pkg",
+        twine = "@publish_deps//twine",
         ...
     )
     ```
@@ -143,6 +147,7 @@
     Args:
         name:  A unique name for this target.
         twine: A label of the external location of the py_library target for twine
+        twine_binary: A label of the external location of a binary target for twine.
         publish_args: arguments passed to twine, e.g. ["--repository-url", "https://pypi.my.org/simple/"].
             These are subject to make var expansion, as with the `args` attribute.
             Note that you can also pass additional args to the bazel run command as in the example above.
@@ -158,16 +163,32 @@
 
     _py_wheel(name = name, **kwargs)
 
-    if twine:
-        if not twine.endswith(":pkg"):
-            fail("twine label should look like @my_twine_repo//:pkg")
-        twine_main = twine.replace(":pkg", ":rules_python_wheel_entry_point_twine.py")
+    twine_args = []
+    if twine or twine_binary:
         twine_args = ["upload"]
         twine_args.extend(publish_args)
         twine_args.append("$(rootpath :{})/*".format(_dist_target))
 
-        # TODO: use py_binary from //python:defs.bzl after our stardoc setup is less brittle
-        # buildifier: disable=native-py
+    if twine_binary:
+        twine_kwargs = {"tags": ["manual"]}
+        native_binary(
+            name = "{}.publish".format(name),
+            src = twine_binary,
+            out = select({
+                "@platforms//os:windows": "{}.publish_script.exe".format(name),
+                "//conditions:default": "{}.publish_script".format(name),
+            }),
+            args = twine_args,
+            data = [_dist_target],
+            visibility = kwargs.get("visibility"),
+            **copy_propagating_kwargs(kwargs, twine_kwargs)
+        )
+    elif twine:
+        if not twine.endswith(":pkg"):
+            fail("twine label should look like @my_twine_repo//:pkg")
+
+        twine_main = twine.replace(":pkg", ":rules_python_wheel_entry_point_twine.py")
+
         py_binary(
             name = "{}.publish".format(name),
             srcs = [twine_main],
diff --git a/python/pip.bzl b/python/pip.bzl
index aeedf57..a1a6720 100644
--- a/python/pip.bzl
+++ b/python/pip.bzl
@@ -19,250 +19,27 @@
 for internal use only.
 """
 
-load("//python/pip_install:pip_repository.bzl", "pip_repository", _package_annotation = "package_annotation")
-load("//python/pip_install:requirements.bzl", _compile_pip_requirements = "compile_pip_requirements")
-load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
-load("//python/private:full_version.bzl", "full_version")
 load("//python/private:normalize_name.bzl", "normalize_name")
-load("//python/private:render_pkg_aliases.bzl", "NO_MATCH_ERROR_MESSAGE_TEMPLATE")
+load("//python/private/pypi:multi_pip_parse.bzl", _multi_pip_parse = "multi_pip_parse")
+load("//python/private/pypi:package_annotation.bzl", _package_annotation = "package_annotation")
+load("//python/private/pypi:pip_compile.bzl", "pip_compile")
+load("//python/private/pypi:pip_repository.bzl", "pip_repository")
+load("//python/private/pypi:whl_library_alias.bzl", _whl_library_alias = "whl_library_alias")
+load("//python/private/whl_filegroup:whl_filegroup.bzl", _whl_filegroup = "whl_filegroup")
 
-compile_pip_requirements = _compile_pip_requirements
+compile_pip_requirements = pip_compile
 package_annotation = _package_annotation
 pip_parse = pip_repository
-
-def _multi_pip_parse_impl(rctx):
-    rules_python = rctx.attr._rules_python_workspace.workspace_name
-    load_statements = []
-    install_deps_calls = []
-    process_requirements_calls = []
-    for python_version, pypi_repository in rctx.attr.pip_parses.items():
-        sanitized_python_version = python_version.replace(".", "_")
-        load_statement = """\
-load(
-    "@{pypi_repository}//:requirements.bzl",
-    _{sanitized_python_version}_install_deps = "install_deps",
-    _{sanitized_python_version}_all_requirements = "all_requirements",
-)""".format(
-            pypi_repository = pypi_repository,
-            sanitized_python_version = sanitized_python_version,
-        )
-        load_statements.append(load_statement)
-        process_requirements_call = """\
-_process_requirements(
-    pkg_labels = _{sanitized_python_version}_all_requirements,
-    python_version = "{python_version}",
-    repo_prefix = "{pypi_repository}_",
-)""".format(
-            pypi_repository = pypi_repository,
-            python_version = python_version,
-            sanitized_python_version = sanitized_python_version,
-        )
-        process_requirements_calls.append(process_requirements_call)
-        install_deps_call = """    _{sanitized_python_version}_install_deps(**whl_library_kwargs)""".format(
-            sanitized_python_version = sanitized_python_version,
-        )
-        install_deps_calls.append(install_deps_call)
-
-    # NOTE @aignas 2023-10-31: I am not sure it is possible to render aliases
-    # for all of the packages using the `render_pkg_aliases` function because
-    # we need to know what the list of packages for each version is and then
-    # we would be creating directories for each.
-    macro_tmpl = "@%s_{}//:{}" % rctx.attr.name
-
-    requirements_bzl = """\
-# Generated by python/pip.bzl
-
-load("@{rules_python}//python:pip.bzl", "whl_library_alias", "pip_utils")
-{load_statements}
-
-_wheel_names = []
-_version_map = dict()
-def _process_requirements(pkg_labels, python_version, repo_prefix):
-    for pkg_label in pkg_labels:
-        wheel_name = Label(pkg_label).package
-        if not wheel_name:
-            # We are dealing with the cases where we don't have aliases.
-            workspace_name = Label(pkg_label).workspace_name
-            wheel_name = workspace_name[len(repo_prefix):]
-
-        _wheel_names.append(wheel_name)
-        if not wheel_name in _version_map:
-            _version_map[wheel_name] = dict()
-        _version_map[wheel_name][python_version] = repo_prefix
-
-{process_requirements_calls}
-
-def requirement(name):
-    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "pkg")
-
-def whl_requirement(name):
-    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "whl")
-
-def data_requirement(name):
-    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "data")
-
-def dist_info_requirement(name):
-    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "dist_info")
-
-def entry_point(pkg, script = None):
-    fail("Not implemented yet")
-
-def install_deps(**whl_library_kwargs):
-{install_deps_calls}
-    for wheel_name in _wheel_names:
-        whl_library_alias(
-            name = "{name}_" + wheel_name,
-            wheel_name = wheel_name,
-            default_version = "{default_version}",
-            version_map = _version_map[wheel_name],
-        )
-""".format(
-        name = rctx.attr.name,
-        install_deps_calls = "\n".join(install_deps_calls),
-        load_statements = "\n".join(load_statements),
-        macro_tmpl = macro_tmpl,
-        process_requirements_calls = "\n".join(process_requirements_calls),
-        rules_python = rules_python,
-        default_version = rctx.attr.default_version,
-    )
-    rctx.file("requirements.bzl", requirements_bzl)
-    rctx.file("BUILD.bazel", "exports_files(['requirements.bzl'])")
-
-_multi_pip_parse = repository_rule(
-    _multi_pip_parse_impl,
-    attrs = {
-        "default_version": attr.string(),
-        "pip_parses": attr.string_dict(),
-        "_rules_python_workspace": attr.label(default = Label("//:WORKSPACE")),
-    },
-)
-
-def _whl_library_alias_impl(rctx):
-    rules_python = rctx.attr._rules_python_workspace.workspace_name
-    if rctx.attr.default_version:
-        default_repo_prefix = rctx.attr.version_map[rctx.attr.default_version]
-    else:
-        default_repo_prefix = None
-    version_map = rctx.attr.version_map.items()
-    build_content = ["# Generated by python/pip.bzl"]
-    for alias_name in ["pkg", "whl", "data", "dist_info"]:
-        build_content.append(_whl_library_render_alias_target(
-            alias_name = alias_name,
-            default_repo_prefix = default_repo_prefix,
-            rules_python = rules_python,
-            version_map = version_map,
-            wheel_name = rctx.attr.wheel_name,
-        ))
-    rctx.file("BUILD.bazel", "\n".join(build_content))
-
-def _whl_library_render_alias_target(
-        alias_name,
-        default_repo_prefix,
-        rules_python,
-        version_map,
-        wheel_name):
-    # The template below adds one @, but under bzlmod, the name
-    # is canonical, so we have to add a second @.
-    if BZLMOD_ENABLED:
-        rules_python = "@" + rules_python
-
-    alias = ["""\
-alias(
-    name = "{alias_name}",
-    actual = select({{""".format(alias_name = alias_name)]
-    for [python_version, repo_prefix] in version_map:
-        alias.append("""\
-        "@{rules_python}//python/config_settings:is_python_{full_python_version}": "{actual}",""".format(
-            full_python_version = full_version(python_version),
-            actual = "@{repo_prefix}{wheel_name}//:{alias_name}".format(
-                repo_prefix = repo_prefix,
-                wheel_name = wheel_name,
-                alias_name = alias_name,
-            ),
-            rules_python = rules_python,
-        ))
-    if default_repo_prefix:
-        default_actual = "@{repo_prefix}{wheel_name}//:{alias_name}".format(
-            repo_prefix = default_repo_prefix,
-            wheel_name = wheel_name,
-            alias_name = alias_name,
-        )
-        alias.append('        "//conditions:default": "{default_actual}",'.format(
-            default_actual = default_actual,
-        ))
-
-    alias.append("    },")  # Close select expression condition dict
-    if not default_repo_prefix:
-        supported_versions = sorted([python_version for python_version, _ in version_map])
-        alias.append('    no_match_error="""{}""",'.format(
-            NO_MATCH_ERROR_MESSAGE_TEMPLATE.format(
-                supported_versions = ", ".join(supported_versions),
-                rules_python = rules_python,
-            ),
-        ))
-    alias.append("    ),")  # Close the select expression
-    alias.append('    visibility = ["//visibility:public"],')
-    alias.append(")")  # Close the alias() expression
-    return "\n".join(alias)
-
-whl_library_alias = repository_rule(
-    _whl_library_alias_impl,
-    attrs = {
-        "default_version": attr.string(
-            mandatory = False,
-            doc = "Optional Python version in major.minor format, e.g. '3.10'." +
-                  "The Python version of the wheel to use when the versions " +
-                  "from `version_map` don't match. This allows the default " +
-                  "(version unaware) rules to match and select a wheel. If " +
-                  "not specified, then the default rules won't be able to " +
-                  "resolve a wheel and an error will occur.",
-        ),
-        "version_map": attr.string_dict(mandatory = True),
-        "wheel_name": attr.string(mandatory = True),
-        "_rules_python_workspace": attr.label(default = Label("//:WORKSPACE")),
-    },
-)
-
-def multi_pip_parse(name, default_version, python_versions, python_interpreter_target, requirements_lock, **kwargs):
-    """NOT INTENDED FOR DIRECT USE!
-
-    This is intended to be used by the multi_pip_parse implementation in the template of the
-    multi_toolchain_aliases repository rule.
-
-    Args:
-        name: the name of the multi_pip_parse repository.
-        default_version: the default Python version.
-        python_versions: all Python toolchain versions currently registered.
-        python_interpreter_target: a dictionary which keys are Python versions and values are resolved host interpreters.
-        requirements_lock: a dictionary which keys are Python versions and values are locked requirements files.
-        **kwargs: extra arguments passed to all wrapped pip_parse.
-
-    Returns:
-        The internal implementation of multi_pip_parse repository rule.
-    """
-    pip_parses = {}
-    for python_version in python_versions:
-        if not python_version in python_interpreter_target:
-            fail("Missing python_interpreter_target for Python version %s in '%s'" % (python_version, name))
-        if not python_version in requirements_lock:
-            fail("Missing requirements_lock for Python version %s in '%s'" % (python_version, name))
-
-        pip_parse_name = name + "_" + python_version.replace(".", "_")
-        pip_parse(
-            name = pip_parse_name,
-            python_interpreter_target = python_interpreter_target[python_version],
-            requirements_lock = requirements_lock[python_version],
-            **kwargs
-        )
-        pip_parses[python_version] = pip_parse_name
-
-    return _multi_pip_parse(
-        name = name,
-        default_version = default_version,
-        pip_parses = pip_parses,
-    )
+whl_filegroup = _whl_filegroup
 
 # Extra utilities visible to rules_python users.
 pip_utils = struct(
     normalize_name = normalize_name,
 )
+
+# The following are only exported here because they are used from
+# multi_toolchain_aliases repository_rule, not intended for public use.
+#
+# See ./private/toolchains_repo.bzl
+multi_pip_parse = _multi_pip_parse
+whl_library_alias = _whl_library_alias
diff --git a/python/pip_install/BUILD.bazel b/python/pip_install/BUILD.bazel
index 4bcd5b8..683199f 100644
--- a/python/pip_install/BUILD.bazel
+++ b/python/pip_install/BUILD.bazel
@@ -22,92 +22,39 @@
     name = "pip_repository_bzl",
     srcs = ["pip_repository.bzl"],
     deps = [
-        ":repositories_bzl",
-        ":requirements_parser_bzl",
-        "//python:repositories_bzl",
-        "//python:versions_bzl",
-        "//python/pip_install/private:generate_group_library_build_bazel_bzl",
-        "//python/pip_install/private:generate_whl_library_build_bazel_bzl",
-        "//python/pip_install/private:srcs_bzl",
-        "//python/private:bzlmod_enabled_bzl",
-        "//python/private:normalize_name_bzl",
-        "//python/private:parse_whl_name_bzl",
-        "//python/private:patch_whl_bzl",
-        "//python/private:render_pkg_aliases_bzl",
-        "//python/private:toolchains_repo_bzl",
-        "//python/private:which_bzl",
-        "//python/private:whl_target_platforms_bzl",
-        "@bazel_skylib//lib:sets",
+        "//python/private/pypi:group_library_bzl",
+        "//python/private/pypi:package_annotation_bzl",
+        "//python/private/pypi:pip_repository_bzl",
+        "//python/private/pypi:whl_library_bzl",
     ],
 )
 
 bzl_library(
     name = "requirements_bzl",
     srcs = ["requirements.bzl"],
-    deps = [
-        ":repositories_bzl",
-        "//python:defs_bzl",
-    ],
-)
-
-bzl_library(
-    name = "requirements_parser_bzl",
-    srcs = ["requirements_parser.bzl"],
+    deps = ["//python/private/pypi:pip_compile_bzl"],
 )
 
 bzl_library(
     name = "repositories_bzl",
     srcs = ["repositories.bzl"],
     deps = [
-        "//:version_bzl",
-        "//python/private:bazel_tools_bzl",
-        "@bazel_skylib//lib:versions",
+        "//python/private/pypi:deps_bzl",
     ],
 )
 
 filegroup(
     name = "distribution",
-    srcs = glob(["*.bzl"]) + [
-        "BUILD.bazel",
-        "pip_repository_requirements.bzl.tmpl",
-        "//python/pip_install/private:distribution",
-        "//python/pip_install/tools/dependency_resolver:distribution",
-        "//python/pip_install/tools/wheel_installer:distribution",
-    ],
-    visibility = ["//:__pkg__"],
-)
-
-filegroup(
-    name = "repositories",
-    srcs = ["repositories.bzl"],
-    visibility = ["//tools/private/update_deps:__pkg__"],
-)
-
-filegroup(
-    name = "requirements_txt",
-    srcs = ["tools/requirements.txt"],
-    visibility = ["//tools/private/update_deps:__pkg__"],
+    srcs = glob(["**"]),
+    visibility = ["//python:__pkg__"],
 )
 
 filegroup(
     name = "bzl",
-    srcs = glob(["*.bzl"]) + [
-        "//python/pip_install/private:bzl_srcs",
-    ],
+    srcs = glob(["*.bzl"]),
     visibility = ["//:__subpackages__"],
 )
 
-filegroup(
-    name = "py_srcs",
-    srcs = [
-        "//python/pip_install/tools/dependency_resolver:py_srcs",
-        "//python/pip_install/tools/wheel_installer:py_srcs",
-        "//python/private:repack_whl.py",
-        "//tools:wheelmaker.py",
-    ],
-    visibility = ["//python/pip_install/private:__pkg__"],
-)
-
 exports_files(
     glob(["*.bzl"]),
     visibility = ["//docs:__pkg__"],
diff --git a/python/pip_install/pip_repository.bzl b/python/pip_install/pip_repository.bzl
index 3e4878b..18deee1 100644
--- a/python/pip_install/pip_repository.bzl
+++ b/python/pip_install/pip_repository.bzl
@@ -14,956 +14,13 @@
 
 ""
 
-load("@bazel_skylib//lib:sets.bzl", "sets")
-load("//python:repositories.bzl", "is_standalone_interpreter")
-load("//python:versions.bzl", "WINDOWS_NAME")
-load("//python/pip_install:repositories.bzl", "all_requirements")
-load("//python/pip_install:requirements_parser.bzl", parse_requirements = "parse")
-load("//python/pip_install/private:generate_group_library_build_bazel.bzl", "generate_group_library_build_bazel")
-load("//python/pip_install/private:generate_whl_library_build_bazel.bzl", "generate_whl_library_build_bazel")
-load("//python/pip_install/private:srcs.bzl", "PIP_INSTALL_PY_SRCS")
-load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
-load("//python/private:normalize_name.bzl", "normalize_name")
-load("//python/private:parse_whl_name.bzl", "parse_whl_name")
-load("//python/private:patch_whl.bzl", "patch_whl")
-load("//python/private:render_pkg_aliases.bzl", "render_pkg_aliases")
-load("//python/private:toolchains_repo.bzl", "get_host_os_arch")
-load("//python/private:which.bzl", "which_with_fail")
-load("//python/private:whl_target_platforms.bzl", "whl_target_platforms")
+load("//python/private/pypi:group_library.bzl", _group_library = "group_library")
+load("//python/private/pypi:package_annotation.bzl", _package_annotation = "package_annotation")
+load("//python/private/pypi:pip_repository.bzl", _pip_repository = "pip_repository")
+load("//python/private/pypi:whl_library.bzl", _whl_library = "whl_library")
 
-CPPFLAGS = "CPPFLAGS"
-
-COMMAND_LINE_TOOLS_PATH_SLUG = "commandlinetools"
-
-_WHEEL_ENTRY_POINT_PREFIX = "rules_python_wheel_entry_point"
-
-def _construct_pypath(rctx):
-    """Helper function to construct a PYTHONPATH.
-
-    Contains entries for code in this repo as well as packages downloaded from //python/pip_install:repositories.bzl.
-    This allows us to run python code inside repository rule implementations.
-
-    Args:
-        rctx: Handle to the repository_context.
-
-    Returns: String of the PYTHONPATH.
-    """
-
-    separator = ":" if not "windows" in rctx.os.name.lower() else ";"
-    pypath = separator.join([
-        str(rctx.path(entry).dirname)
-        for entry in rctx.attr._python_path_entries
-    ])
-    return pypath
-
-def _get_python_interpreter_attr(rctx):
-    """A helper function for getting the `python_interpreter` attribute or it's default
-
-    Args:
-        rctx (repository_ctx): Handle to the rule repository context.
-
-    Returns:
-        str: The attribute value or it's default
-    """
-    if rctx.attr.python_interpreter:
-        return rctx.attr.python_interpreter
-
-    if "win" in rctx.os.name:
-        return "python.exe"
-    else:
-        return "python3"
-
-def _resolve_python_interpreter(rctx):
-    """Helper function to find the python interpreter from the common attributes
-
-    Args:
-        rctx: Handle to the rule repository context.
-
-    Returns:
-        `path` object, for the resolved path to the Python interpreter.
-    """
-    python_interpreter = _get_python_interpreter_attr(rctx)
-
-    if rctx.attr.python_interpreter_target != None:
-        python_interpreter = rctx.path(rctx.attr.python_interpreter_target)
-
-        if BZLMOD_ENABLED:
-            (os, _) = get_host_os_arch(rctx)
-
-            # On Windows, the symlink doesn't work because Windows attempts to find
-            # Python DLLs where the symlink is, not where the symlink points.
-            if os == WINDOWS_NAME:
-                python_interpreter = python_interpreter.realpath
-    elif "/" not in python_interpreter:
-        # It's a plain command, e.g. "python3", to look up in the environment.
-        found_python_interpreter = rctx.which(python_interpreter)
-        if not found_python_interpreter:
-            fail("python interpreter `{}` not found in PATH".format(python_interpreter))
-        python_interpreter = found_python_interpreter
-    else:
-        python_interpreter = rctx.path(python_interpreter)
-    return python_interpreter
-
-def _get_xcode_location_cflags(rctx):
-    """Query the xcode sdk location to update cflags
-
-    Figure out if this interpreter target comes from rules_python, and patch the xcode sdk location if so.
-    Pip won't be able to compile c extensions from sdists with the pre built python distributions from indygreg
-    otherwise. See https://github.com/indygreg/python-build-standalone/issues/103
-    """
-
-    # Only run on MacOS hosts
-    if not rctx.os.name.lower().startswith("mac os"):
-        return []
-
-    xcode_sdk_location = rctx.execute([which_with_fail("xcode-select", rctx), "--print-path"])
-    if xcode_sdk_location.return_code != 0:
-        return []
-
-    xcode_root = xcode_sdk_location.stdout.strip()
-    if COMMAND_LINE_TOOLS_PATH_SLUG not in xcode_root.lower():
-        # This is a full xcode installation somewhere like /Applications/Xcode13.0.app/Contents/Developer
-        # so we need to change the path to to the macos specific tools which are in a different relative
-        # path than xcode installed command line tools.
-        xcode_root = "{}/Platforms/MacOSX.platform/Developer".format(xcode_root)
-    return [
-        "-isysroot {}/SDKs/MacOSX.sdk".format(xcode_root),
-    ]
-
-def _get_toolchain_unix_cflags(rctx, python_interpreter):
-    """Gather cflags from a standalone toolchain for unix systems.
-
-    Pip won't be able to compile c extensions from sdists with the pre built python distributions from indygreg
-    otherwise. See https://github.com/indygreg/python-build-standalone/issues/103
-    """
-
-    # Only run on Unix systems
-    if not rctx.os.name.lower().startswith(("mac os", "linux")):
-        return []
-
-    # Only update the location when using a standalone toolchain.
-    if not is_standalone_interpreter(rctx, python_interpreter):
-        return []
-
-    er = rctx.execute([
-        python_interpreter,
-        "-c",
-        "import sys; print(f'{sys.version_info[0]}.{sys.version_info[1]}', end='')",
-    ])
-    if er.return_code != 0:
-        fail("could not get python version from interpreter (status {}): {}".format(er.return_code, er.stderr))
-    _python_version = er.stdout
-    include_path = "{}/include/python{}".format(
-        python_interpreter.dirname,
-        _python_version,
-    )
-
-    return ["-isystem {}".format(include_path)]
-
-def use_isolated(ctx, attr):
-    """Determine whether or not to pass the pip `--isolated` flag to the pip invocation.
-
-    Args:
-        ctx: repository or module context
-        attr: attributes for the repo rule or tag extension
-
-    Returns:
-        True if --isolated should be passed
-    """
-    use_isolated = attr.isolated
-
-    # The environment variable will take precedence over the attribute
-    isolated_env = ctx.os.environ.get("RULES_PYTHON_PIP_ISOLATED", None)
-    if isolated_env != None:
-        if isolated_env.lower() in ("0", "false"):
-            use_isolated = False
-        else:
-            use_isolated = True
-
-    return use_isolated
-
-def _parse_optional_attrs(rctx, args):
-    """Helper function to parse common attributes of pip_repository and whl_library repository rules.
-
-    This function also serializes the structured arguments as JSON
-    so they can be passed on the command line to subprocesses.
-
-    Args:
-        rctx: Handle to the rule repository context.
-        args: A list of parsed args for the rule.
-    Returns: Augmented args list.
-    """
-
-    if use_isolated(rctx, rctx.attr):
-        args.append("--isolated")
-
-    # Check for None so we use empty default types from our attrs.
-    # Some args want to be list, and some want to be dict.
-    if rctx.attr.extra_pip_args != None:
-        args += [
-            "--extra_pip_args",
-            json.encode(struct(arg = rctx.attr.extra_pip_args)),
-        ]
-
-    if rctx.attr.download_only:
-        args.append("--download_only")
-
-    if rctx.attr.pip_data_exclude != None:
-        args += [
-            "--pip_data_exclude",
-            json.encode(struct(arg = rctx.attr.pip_data_exclude)),
-        ]
-
-    if rctx.attr.enable_implicit_namespace_pkgs:
-        args.append("--enable_implicit_namespace_pkgs")
-
-    if rctx.attr.environment != None:
-        args += [
-            "--environment",
-            json.encode(struct(arg = rctx.attr.environment)),
-        ]
-
-    return args
-
-def _create_repository_execution_environment(rctx, python_interpreter):
-    """Create a environment dictionary for processes we spawn with rctx.execute.
-
-    Args:
-        rctx (repository_ctx): The repository context.
-        python_interpreter (path): The resolved python interpreter.
-    Returns:
-        Dictionary of environment variable suitable to pass to rctx.execute.
-    """
-
-    # Gather any available CPPFLAGS values
-    cppflags = []
-    cppflags.extend(_get_xcode_location_cflags(rctx))
-    cppflags.extend(_get_toolchain_unix_cflags(rctx, python_interpreter))
-
-    env = {
-        "PYTHONPATH": _construct_pypath(rctx),
-        CPPFLAGS: " ".join(cppflags),
-    }
-
-    return env
-
-_BUILD_FILE_CONTENTS = """\
-package(default_visibility = ["//visibility:public"])
-
-# Ensure the `requirements.bzl` source can be accessed by stardoc, since users load() from it
-exports_files(["requirements.bzl"])
-"""
-
-def locked_requirements_label(ctx, attr):
-    """Get the preferred label for a locked requirements file based on platform.
-
-    Args:
-        ctx: repository or module context
-        attr: attributes for the repo rule or tag extension
-
-    Returns:
-        Label
-    """
-    os = ctx.os.name.lower()
-    requirements_txt = attr.requirements_lock
-    if os.startswith("mac os") and attr.requirements_darwin != None:
-        requirements_txt = attr.requirements_darwin
-    elif os.startswith("linux") and attr.requirements_linux != None:
-        requirements_txt = attr.requirements_linux
-    elif "win" in os and attr.requirements_windows != None:
-        requirements_txt = attr.requirements_windows
-    if not requirements_txt:
-        fail("""\
-A requirements_lock attribute must be specified, or a platform-specific lockfile using one of the requirements_* attributes.
-""")
-    return requirements_txt
-
-def _pip_repository_impl(rctx):
-    requirements_txt = locked_requirements_label(rctx, rctx.attr)
-    content = rctx.read(requirements_txt)
-    parsed_requirements_txt = parse_requirements(content)
-
-    packages = [(normalize_name(name), requirement) for name, requirement in parsed_requirements_txt.requirements]
-
-    bzl_packages = sorted([normalize_name(name) for name, _ in parsed_requirements_txt.requirements])
-
-    # Normalize cycles first
-    requirement_cycles = {
-        name: sorted(sets.to_list(sets.make(deps)))
-        for name, deps in rctx.attr.experimental_requirement_cycles.items()
-    }
-
-    # Check for conflicts between cycles _before_ we normalize package names so
-    # that reported errors use the names the user specified
-    for i in range(len(requirement_cycles)):
-        left_group = requirement_cycles.keys()[i]
-        left_deps = requirement_cycles.values()[i]
-        for j in range(len(requirement_cycles) - (i + 1)):
-            right_deps = requirement_cycles.values()[1 + i + j]
-            right_group = requirement_cycles.keys()[1 + i + j]
-            for d in left_deps:
-                if d in right_deps:
-                    fail("Error: Requirement %s cannot be repeated between cycles %s and %s; please merge the cycles." % (d, left_group, right_group))
-
-    # And normalize the names as used in the cycle specs
-    #
-    # NOTE: We must check that a listed dependency is actually in the actual
-    # requirements set for the current platform so that we can support cycles in
-    # platform-conditional requirements. Otherwise we'll blindly generate a
-    # label referencing a package which may not be installed on the current
-    # platform.
-    requirement_cycles = {
-        normalize_name(name): sorted([normalize_name(d) for d in group if normalize_name(d) in bzl_packages])
-        for name, group in requirement_cycles.items()
-    }
-
-    imports = [
-        # NOTE: Maintain the order consistent with `buildifier`
-        'load("@rules_python//python:pip.bzl", "pip_utils")',
-        'load("@rules_python//python/pip_install:pip_repository.bzl", "group_library", "whl_library")',
-    ]
-
-    annotations = {}
-    for pkg, annotation in rctx.attr.annotations.items():
-        filename = "{}.annotation.json".format(normalize_name(pkg))
-        rctx.file(filename, json.encode_indent(json.decode(annotation)))
-        annotations[pkg] = "@{name}//:{filename}".format(name = rctx.attr.name, filename = filename)
-
-    tokenized_options = []
-    for opt in parsed_requirements_txt.options:
-        for p in opt.split(" "):
-            tokenized_options.append(p)
-
-    options = tokenized_options + rctx.attr.extra_pip_args
-
-    config = {
-        "download_only": rctx.attr.download_only,
-        "enable_implicit_namespace_pkgs": rctx.attr.enable_implicit_namespace_pkgs,
-        "environment": rctx.attr.environment,
-        "extra_pip_args": options,
-        "isolated": use_isolated(rctx, rctx.attr),
-        "pip_data_exclude": rctx.attr.pip_data_exclude,
-        "python_interpreter": _get_python_interpreter_attr(rctx),
-        "quiet": rctx.attr.quiet,
-        "repo": rctx.attr.name,
-        "repo_prefix": "{}_".format(rctx.attr.name),
-        "timeout": rctx.attr.timeout,
-    }
-
-    if rctx.attr.python_interpreter_target:
-        config["python_interpreter_target"] = str(rctx.attr.python_interpreter_target)
-    if rctx.attr.experimental_target_platforms:
-        config["experimental_target_platforms"] = rctx.attr.experimental_target_platforms
-
-    if rctx.attr.incompatible_generate_aliases:
-        macro_tmpl = "@%s//{}:{}" % rctx.attr.name
-        aliases = render_pkg_aliases(repo_name = rctx.attr.name, bzl_packages = bzl_packages)
-        for path, contents in aliases.items():
-            rctx.file(path, contents)
-    else:
-        macro_tmpl = "@%s_{}//:{}" % rctx.attr.name
-
-    rctx.file("BUILD.bazel", _BUILD_FILE_CONTENTS)
-    rctx.template("requirements.bzl", rctx.attr._template, substitutions = {
-        "%%ALL_DATA_REQUIREMENTS%%": _format_repr_list([
-            macro_tmpl.format(p, "data")
-            for p in bzl_packages
-        ]),
-        "%%ALL_REQUIREMENTS%%": _format_repr_list([
-            macro_tmpl.format(p, "pkg")
-            for p in bzl_packages
-        ]),
-        "%%ALL_REQUIREMENT_GROUPS%%": _format_dict(_repr_dict(requirement_cycles)),
-        "%%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%": _format_dict(_repr_dict({
-            p: macro_tmpl.format(p, "whl")
-            for p in bzl_packages
-        })),
-        "%%ANNOTATIONS%%": _format_dict(_repr_dict(annotations)),
-        "%%CONFIG%%": _format_dict(_repr_dict(config)),
-        "%%EXTRA_PIP_ARGS%%": json.encode(options),
-        "%%IMPORTS%%": "\n".join(imports),
-        "%%MACRO_TMPL%%": macro_tmpl,
-        "%%NAME%%": rctx.attr.name,
-        "%%PACKAGES%%": _format_repr_list(
-            [
-                ("{}_{}".format(rctx.attr.name, p), r)
-                for p, r in packages
-            ],
-        ),
-        "%%REQUIREMENTS_LOCK%%": str(requirements_txt),
-    })
-
-    return
-
-common_env = [
-    "RULES_PYTHON_PIP_ISOLATED",
-]
-
-common_attrs = {
-    "download_only": attr.bool(
-        doc = """
-Whether to use "pip download" instead of "pip wheel". Disables building wheels from source, but allows use of
---platform, --python-version, --implementation, and --abi in --extra_pip_args to download wheels for a different
-platform from the host platform.
-        """,
-    ),
-    "enable_implicit_namespace_pkgs": attr.bool(
-        default = False,
-        doc = """
-If true, disables conversion of native namespace packages into pkg-util style namespace packages. When set all py_binary
-and py_test targets must specify either `legacy_create_init=False` or the global Bazel option
-`--incompatible_default_to_explicit_init_py` to prevent `__init__.py` being automatically generated in every directory.
-
-This option is required to support some packages which cannot handle the conversion to pkg-util style.
-            """,
-    ),
-    "environment": attr.string_dict(
-        doc = """
-Environment variables to set in the pip subprocess.
-Can be used to set common variables such as `http_proxy`, `https_proxy` and `no_proxy`
-Note that pip is run with "--isolated" on the CLI so `PIP_<VAR>_<NAME>`
-style env vars are ignored, but env vars that control requests and urllib3
-can be passed.
-        """,
-        default = {},
-    ),
-    "experimental_requirement_cycles": attr.string_list_dict(
-        default = {},
-        doc = """\
-A mapping of dependency cycle names to a list of requirements which form that cycle.
-
-Requirements which form cycles will be installed together and taken as
-dependencies together in order to ensure that the cycle is always satisified.
-
-Example:
-  `sphinx` depends on `sphinxcontrib-serializinghtml`
-  When listing both as requirements, ala
-
-  ```
-  py_binary(
-    name = "doctool",
-    ...
-    deps = [
-      "@pypi//sphinx:pkg",
-      "@pypi//sphinxcontrib_serializinghtml",
-     ]
-  )
-  ```
-
-  Will produce a Bazel error such as
-
-  ```
-  ERROR: .../external/pypi_sphinxcontrib_serializinghtml/BUILD.bazel:44:6: in alias rule @pypi_sphinxcontrib_serializinghtml//:pkg: cycle in dependency graph:
-      //:doctool (...)
-      @pypi//sphinxcontrib_serializinghtml:pkg (...)
-  .-> @pypi_sphinxcontrib_serializinghtml//:pkg (...)
-  |   @pypi_sphinxcontrib_serializinghtml//:_pkg (...)
-  |   @pypi_sphinx//:pkg (...)
-  |   @pypi_sphinx//:_pkg (...)
-  `-- @pypi_sphinxcontrib_serializinghtml//:pkg (...)
-  ```
-
-  Which we can resolve by configuring these two requirements to be installed together as a cycle
-
-  ```
-  pip_parse(
-    ...
-    experimental_requirement_cycles = {
-      "sphinx": [
-        "sphinx",
-        "sphinxcontrib-serializinghtml",
-      ]
-    },
-  )
-  ```
-
-Warning:
-  If a dependency participates in multiple cycles, all of those cycles must be
-  collapsed down to one. For instance `a <-> b` and `a <-> c` cannot be listed
-  as two separate cycles.
-""",
-    ),
-    "experimental_target_platforms": attr.string_list(
-        default = [],
-        doc = """\
-A list of platforms that we will generate the conditional dependency graph for
-cross platform wheels by parsing the wheel metadata. This will generate the
-correct dependencies for packages like `sphinx` or `pylint`, which include
-`colorama` when installed and used on Windows platforms.
-
-An empty list means falling back to the legacy behaviour where the host
-platform is the target platform.
-
-WARNING: It may not work as expected in cases where the python interpreter
-implementation that is being used at runtime is different between different platforms.
-This has been tested for CPython only.
-
-Special values: `all` (for generating deps for all platforms), `host` (for
-generating deps for the host platform only). `linux_*` and other `<os>_*` values.
-In the future we plan to set `all` as the default to this attribute.
-
-For specific target platforms use values of the form `<os>_<arch>` where `<os>`
-is one of `linux`, `osx`, `windows` and arch is one of `x86_64`, `x86_32`,
-`aarch64`, `s390x` and `ppc64le`.
-""",
-    ),
-    "extra_pip_args": attr.string_list(
-        doc = "Extra arguments to pass on to pip. Must not contain spaces.",
-    ),
-    "isolated": attr.bool(
-        doc = """\
-Whether or not to pass the [--isolated](https://pip.pypa.io/en/stable/cli/pip/#cmdoption-isolated) flag to
-the underlying pip command. Alternatively, the `RULES_PYTHON_PIP_ISOLATED` environment variable can be used
-to control this flag.
-""",
-        default = True,
-    ),
-    "pip_data_exclude": attr.string_list(
-        doc = "Additional data exclusion parameters to add to the pip packages BUILD file.",
-    ),
-    "python_interpreter": attr.string(
-        doc = """\
-The python interpreter to use. This can either be an absolute path or the name
-of a binary found on the host's `PATH` environment variable. If no value is set
-`python3` is defaulted for Unix systems and `python.exe` for Windows.
-""",
-        # NOTE: This attribute should not have a default. See `_get_python_interpreter_attr`
-        # default = "python3"
-    ),
-    "python_interpreter_target": attr.label(
-        allow_single_file = True,
-        doc = """
-If you are using a custom python interpreter built by another repository rule,
-use this attribute to specify its BUILD target. This allows pip_repository to invoke
-pip using the same interpreter as your toolchain. If set, takes precedence over
-python_interpreter. An example value: "@python3_x86_64-unknown-linux-gnu//:python".
-""",
-    ),
-    "quiet": attr.bool(
-        default = True,
-        doc = "If True, suppress printing stdout and stderr output to the terminal.",
-    ),
-    "repo_prefix": attr.string(
-        doc = """
-Prefix for the generated packages will be of the form `@<prefix><sanitized-package-name>//...`
-""",
-    ),
-    # 600 is documented as default here: https://docs.bazel.build/versions/master/skylark/lib/repository_ctx.html#execute
-    "timeout": attr.int(
-        default = 600,
-        doc = "Timeout (in seconds) on the rule's execution duration.",
-    ),
-    "_py_srcs": attr.label_list(
-        doc = "Python sources used in the repository rule",
-        allow_files = True,
-        default = PIP_INSTALL_PY_SRCS,
-    ),
-}
-
-pip_repository_attrs = {
-    "annotations": attr.string_dict(
-        doc = "Optional annotations to apply to packages",
-    ),
-    "incompatible_generate_aliases": attr.bool(
-        default = True,
-        doc = """\
-If true, extra aliases will be created in the main `hub` repo - i.e. the repo
-where the `requirements.bzl` is located. This means that for a Python package
-`PyYAML` initialized within a `pip` `hub_repo` there will be the following
-aliases generated:
-- `@pip//pyyaml` will point to `@pip_pyyaml//:pkg`
-- `@pip//pyyaml:data` will point to `@pip_pyyaml//:data`
-- `@pip//pyyaml:dist_info` will point to `@pip_pyyaml//:dist_info`
-- `@pip//pyyaml:pkg` will point to `@pip_pyyaml//:pkg`
-- `@pip//pyyaml:whl` will point to `@pip_pyyaml//:whl`
-
-This is to keep the dependencies coming from PyPI to have more ergonomic label
-names and support smooth transition to `bzlmod`.
-""",
-    ),
-    "requirements_darwin": attr.label(
-        allow_single_file = True,
-        doc = "Override the requirements_lock attribute when the host platform is Mac OS",
-    ),
-    "requirements_linux": attr.label(
-        allow_single_file = True,
-        doc = "Override the requirements_lock attribute when the host platform is Linux",
-    ),
-    "requirements_lock": attr.label(
-        allow_single_file = True,
-        doc = """\
-A fully resolved 'requirements.txt' pip requirement file containing the
-transitive set of your dependencies. If this file is passed instead of
-'requirements' no resolve will take place and pip_repository will create
-individual repositories for each of your dependencies so that wheels are
-fetched/built only for the targets specified by 'build/run/test'. Note that if
-your lockfile is platform-dependent, you can use the `requirements_[platform]`
-attributes.
-""",
-    ),
-    "requirements_windows": attr.label(
-        allow_single_file = True,
-        doc = "Override the requirements_lock attribute when the host platform is Windows",
-    ),
-    "_template": attr.label(
-        default = ":pip_repository_requirements.bzl.tmpl",
-    ),
-}
-
-pip_repository_attrs.update(**common_attrs)
-
-pip_repository = repository_rule(
-    attrs = pip_repository_attrs,
-    doc = """Accepts a locked/compiled requirements file and installs the dependencies listed within.
-
-Those dependencies become available in a generated `requirements.bzl` file.
-You can instead check this `requirements.bzl` file into your repo, see the "vendoring" section below.
-
-In your WORKSPACE file:
-
-```starlark
-load("@rules_python//python:pip.bzl", "pip_parse")
-
-pip_parse(
-    name = "pip_deps",
-    requirements_lock = ":requirements.txt",
-)
-
-load("@pip_deps//:requirements.bzl", "install_deps")
-
-install_deps()
-```
-
-You can then reference installed dependencies from a `BUILD` file with:
-
-```starlark
-load("@pip_deps//:requirements.bzl", "requirement")
-
-py_library(
-    name = "bar",
-    ...
-    deps = [
-       "//my/other:dep",
-       requirement("requests"),
-       requirement("numpy"),
-    ],
-)
-```
-
-In addition to the `requirement` macro, which is used to access the generated `py_library`
-target generated from a package's wheel, The generated `requirements.bzl` file contains
-functionality for exposing [entry points][whl_ep] as `py_binary` targets as well.
-
-[whl_ep]: https://packaging.python.org/specifications/entry-points/
-
-```starlark
-load("@pip_deps//:requirements.bzl", "entry_point")
-
-alias(
-    name = "pip-compile",
-    actual = entry_point(
-        pkg = "pip-tools",
-        script = "pip-compile",
-    ),
-)
-```
-
-Note that for packages whose name and script are the same, only the name of the package
-is needed when calling the `entry_point` macro.
-
-```starlark
-load("@pip_deps//:requirements.bzl", "entry_point")
-
-alias(
-    name = "flake8",
-    actual = entry_point("flake8"),
-)
-```
-
-### Vendoring the requirements.bzl file
-
-In some cases you may not want to generate the requirements.bzl file as a repository rule
-while Bazel is fetching dependencies. For example, if you produce a reusable Bazel module
-such as a ruleset, you may want to include the requirements.bzl file rather than make your users
-install the WORKSPACE setup to generate it.
-See https://github.com/bazelbuild/rules_python/issues/608
-
-This is the same workflow as Gazelle, which creates `go_repository` rules with
-[`update-repos`](https://github.com/bazelbuild/bazel-gazelle#update-repos)
-
-To do this, use the "write to source file" pattern documented in
-https://blog.aspect.dev/bazel-can-write-to-the-source-folder
-to put a copy of the generated requirements.bzl into your project.
-Then load the requirements.bzl file directly rather than from the generated repository.
-See the example in rules_python/examples/pip_parse_vendored.
-""",
-    implementation = _pip_repository_impl,
-    environ = common_env,
-)
-
-def _whl_library_impl(rctx):
-    python_interpreter = _resolve_python_interpreter(rctx)
-    args = [
-        python_interpreter,
-        "-m",
-        "python.pip_install.tools.wheel_installer.wheel_installer",
-        "--requirement",
-        rctx.attr.requirement,
-    ]
-
-    args = _parse_optional_attrs(rctx, args)
-
-    # Manually construct the PYTHONPATH since we cannot use the toolchain here
-    environment = _create_repository_execution_environment(rctx, python_interpreter)
-
-    result = rctx.execute(
-        args,
-        environment = environment,
-        quiet = rctx.attr.quiet,
-        timeout = rctx.attr.timeout,
-    )
-    if result.return_code:
-        fail("whl_library %s failed: %s (%s) error code: '%s'" % (rctx.attr.name, result.stdout, result.stderr, result.return_code))
-
-    whl_path = rctx.path(json.decode(rctx.read("whl_file.json"))["whl_file"])
-    if not rctx.delete("whl_file.json"):
-        fail("failed to delete the whl_file.json file")
-
-    if rctx.attr.whl_patches:
-        patches = {}
-        for patch_file, json_args in rctx.attr.whl_patches.items():
-            patch_dst = struct(**json.decode(json_args))
-            if whl_path.basename in patch_dst.whls:
-                patches[patch_file] = patch_dst.patch_strip
-
-        whl_path = patch_whl(
-            rctx,
-            python_interpreter = python_interpreter,
-            whl_path = whl_path,
-            patches = patches,
-            quiet = rctx.attr.quiet,
-            timeout = rctx.attr.timeout,
-        )
-
-    target_platforms = rctx.attr.experimental_target_platforms
-    if target_platforms:
-        parsed_whl = parse_whl_name(whl_path.basename)
-        if parsed_whl.platform_tag != "any":
-            # NOTE @aignas 2023-12-04: if the wheel is a platform specific
-            # wheel, we only include deps for that target platform
-            target_platforms = [
-                "{}_{}".format(p.os, p.cpu)
-                for p in whl_target_platforms(parsed_whl.platform_tag)
-            ]
-
-    result = rctx.execute(
-        args + [
-            "--whl-file",
-            whl_path,
-        ] + ["--platform={}".format(p) for p in target_platforms],
-        environment = environment,
-        quiet = rctx.attr.quiet,
-        timeout = rctx.attr.timeout,
-    )
-
-    if result.return_code:
-        fail("whl_library %s failed: %s (%s) error code: '%s'" % (rctx.attr.name, result.stdout, result.stderr, result.return_code))
-
-    metadata = json.decode(rctx.read("metadata.json"))
-    rctx.delete("metadata.json")
-
-    entry_points = {}
-    for item in metadata["entry_points"]:
-        name = item["name"]
-        module = item["module"]
-        attribute = item["attribute"]
-
-        # There is an extreme edge-case with entry_points that end with `.py`
-        # See: https://github.com/bazelbuild/bazel/blob/09c621e4cf5b968f4c6cdf905ab142d5961f9ddc/src/test/java/com/google/devtools/build/lib/rules/python/PyBinaryConfiguredTargetTest.java#L174
-        entry_point_without_py = name[:-3] + "_py" if name.endswith(".py") else name
-        entry_point_target_name = (
-            _WHEEL_ENTRY_POINT_PREFIX + "_" + entry_point_without_py
-        )
-        entry_point_script_name = entry_point_target_name + ".py"
-
-        rctx.file(
-            entry_point_script_name,
-            _generate_entry_point_contents(module, attribute),
-        )
-        entry_points[entry_point_without_py] = entry_point_script_name
-
-    build_file_contents = generate_whl_library_build_bazel(
-        repo_prefix = rctx.attr.repo_prefix,
-        whl_name = whl_path.basename,
-        dependencies = metadata["deps"],
-        dependencies_by_platform = metadata["deps_by_platform"],
-        group_name = rctx.attr.group_name,
-        group_deps = rctx.attr.group_deps,
-        data_exclude = rctx.attr.pip_data_exclude,
-        tags = [
-            "pypi_name=" + metadata["name"],
-            "pypi_version=" + metadata["version"],
-        ],
-        entry_points = entry_points,
-        annotation = None if not rctx.attr.annotation else struct(**json.decode(rctx.read(rctx.attr.annotation))),
-    )
-    rctx.file("BUILD.bazel", build_file_contents)
-
-    return
-
-def _generate_entry_point_contents(
-        module,
-        attribute,
-        shebang = "#!/usr/bin/env python3"):
-    """Generate the contents of an entry point script.
-
-    Args:
-        module (str): The name of the module to use.
-        attribute (str): The name of the attribute to call.
-        shebang (str, optional): The shebang to use for the entry point python
-            file.
-
-    Returns:
-        str: A string of python code.
-    """
-    contents = """\
-{shebang}
-import sys
-from {module} import {attribute}
-if __name__ == "__main__":
-    sys.exit({attribute}())
-""".format(
-        shebang = shebang,
-        module = module,
-        attribute = attribute,
-    )
-    return contents
-
-whl_library_attrs = {
-    "annotation": attr.label(
-        doc = (
-            "Optional json encoded file containing annotation to apply to the extracted wheel. " +
-            "See `package_annotation`"
-        ),
-        allow_files = True,
-    ),
-    "group_deps": attr.string_list(
-        doc = "List of dependencies to skip in order to break the cycles within a dependency group.",
-        default = [],
-    ),
-    "group_name": attr.string(
-        doc = "Name of the group, if any.",
-    ),
-    "repo": attr.string(
-        mandatory = True,
-        doc = "Pointer to parent repo name. Used to make these rules rerun if the parent repo changes.",
-    ),
-    "requirement": attr.string(
-        mandatory = True,
-        doc = "Python requirement string describing the package to make available",
-    ),
-    "whl_patches": attr.label_keyed_string_dict(
-        doc = """a label-keyed-string dict that has
-            json.encode(struct([whl_file], patch_strip]) as values. This
-            is to maintain flexibility and correct bzlmod extension interface
-            until we have a better way to define whl_library and move whl
-            patching to a separate place. INTERNAL USE ONLY.""",
-    ),
-    "_python_path_entries": attr.label_list(
-        # Get the root directory of these rules and keep them as a default attribute
-        # in order to avoid unnecessary repository fetching restarts.
-        #
-        # This is very similar to what was done in https://github.com/bazelbuild/rules_go/pull/3478
-        default = [
-            Label("//:BUILD.bazel"),
-        ] + [
-            # Includes all the external dependencies from repositories.bzl
-            Label("@" + repo + "//:BUILD.bazel")
-            for repo in all_requirements
-        ],
-    ),
-}
-
-whl_library_attrs.update(**common_attrs)
-
-whl_library = repository_rule(
-    attrs = whl_library_attrs,
-    doc = """
-Download and extracts a single wheel based into a bazel repo based on the requirement string passed in.
-Instantiated from pip_repository and inherits config options from there.""",
-    implementation = _whl_library_impl,
-    environ = common_env,
-)
-
-def package_annotation(
-        additive_build_content = None,
-        copy_files = {},
-        copy_executables = {},
-        data = [],
-        data_exclude_glob = [],
-        srcs_exclude_glob = []):
-    """Annotations to apply to the BUILD file content from package generated from a `pip_repository` rule.
-
-    [cf]: https://github.com/bazelbuild/bazel-skylib/blob/main/docs/copy_file_doc.md
-
-    Args:
-        additive_build_content (str, optional): Raw text to add to the generated `BUILD` file of a package.
-        copy_files (dict, optional): A mapping of `src` and `out` files for [@bazel_skylib//rules:copy_file.bzl][cf]
-        copy_executables (dict, optional): A mapping of `src` and `out` files for
-            [@bazel_skylib//rules:copy_file.bzl][cf]. Targets generated here will also be flagged as
-            executable.
-        data (list, optional): A list of labels to add as `data` dependencies to the generated `py_library` target.
-        data_exclude_glob (list, optional): A list of exclude glob patterns to add as `data` to the generated
-            `py_library` target.
-        srcs_exclude_glob (list, optional): A list of labels to add as `srcs` to the generated `py_library` target.
-
-    Returns:
-        str: A json encoded string of the provided content.
-    """
-    return json.encode(struct(
-        additive_build_content = additive_build_content,
-        copy_files = copy_files,
-        copy_executables = copy_executables,
-        data = data,
-        data_exclude_glob = data_exclude_glob,
-        srcs_exclude_glob = srcs_exclude_glob,
-    ))
-
-def _group_library_impl(rctx):
-    build_file_contents = generate_group_library_build_bazel(
-        repo_prefix = rctx.attr.repo_prefix,
-        groups = rctx.attr.groups,
-    )
-    rctx.file("BUILD.bazel", build_file_contents)
-
-group_library = repository_rule(
-    attrs = {
-        "groups": attr.string_list_dict(
-            doc = "A mapping of group names to requirements within that group.",
-        ),
-        "repo_prefix": attr.string(
-            doc = "Prefix used for the whl_library created components of each group",
-        ),
-    },
-    implementation = _group_library_impl,
-    doc = """
-Create a package containing only wrapper py_library and whl_library rules for implementing dependency groups.
-This is an implementation detail of dependency groups and should not be used alone.
-    """,
-)
-
-# pip_repository implementation
-
-def _format_list(items):
-    return "[{}]".format(", ".join(items))
-
-def _format_repr_list(strings):
-    return _format_list(
-        [repr(s) for s in strings],
-    )
-
-def _repr_dict(items):
-    return {k: repr(v) for k, v in items.items()}
-
-def _format_dict(items):
-    return "{{{}}}".format(", ".join(sorted(['"{}": {}'.format(k, v) for k, v in items.items()])))
+# Re-exports for backwards compatibility
+group_library = _group_library
+pip_repository = _pip_repository
+whl_library = _whl_library
+package_annotation = _package_annotation
diff --git a/python/pip_install/private/BUILD.bazel b/python/pip_install/private/BUILD.bazel
deleted file mode 100644
index 887d2d3..0000000
--- a/python/pip_install/private/BUILD.bazel
+++ /dev/null
@@ -1,48 +0,0 @@
-load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
-load(":pip_install_utils.bzl", "srcs_module")
-
-package(default_visibility = ["//:__subpackages__"])
-
-exports_files([
-    "srcs.bzl",
-])
-
-filegroup(
-    name = "distribution",
-    srcs = glob(["*"]),
-    visibility = ["//python/pip_install:__subpackages__"],
-)
-
-filegroup(
-    name = "bzl_srcs",
-    srcs = glob(["*.bzl"]),
-)
-
-srcs_module(
-    name = "srcs_module",
-    srcs = "//python/pip_install:py_srcs",
-    dest = ":srcs.bzl",
-)
-
-bzl_library(
-    name = "generate_whl_library_build_bazel_bzl",
-    srcs = ["generate_whl_library_build_bazel.bzl"],
-    deps = [
-        "//python/private:labels_bzl",
-        "//python/private:normalize_name_bzl",
-    ],
-)
-
-bzl_library(
-    name = "generate_group_library_build_bazel_bzl",
-    srcs = ["generate_group_library_build_bazel.bzl"],
-    deps = [
-        "//python/private:labels_bzl",
-        "//python/private:normalize_name_bzl",
-    ],
-)
-
-bzl_library(
-    name = "srcs_bzl",
-    srcs = ["srcs.bzl"],
-)
diff --git a/python/pip_install/private/pip_install_utils.bzl b/python/pip_install/private/pip_install_utils.bzl
deleted file mode 100644
index 488583d..0000000
--- a/python/pip_install/private/pip_install_utils.bzl
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utilities for `rules_python` pip rules"""
-
-_SRCS_TEMPLATE = """\
-\"\"\"A generated file containing all source files used for `@rules_python//python/pip_install:pip_repository.bzl` rules
-
-This file is auto-generated from the `@rules_python//python/pip_install/private:srcs_module.update` target. Please
-`bazel run` this target to apply any updates. Note that doing so will discard any local modifications.
-"\"\"
-
-# Each source file is tracked as a target so `pip_repository` rules will know to automatically rebuild if any of the
-# sources changed.
-PIP_INSTALL_PY_SRCS = [
-    {srcs}
-]
-"""
-
-def _src_label(file):
-    dir_path, file_name = file.short_path.rsplit("/", 1)
-
-    return "@rules_python//{}:{}".format(
-        dir_path,
-        file_name,
-    )
-
-def _srcs_module_impl(ctx):
-    srcs = [_src_label(src) for src in ctx.files.srcs]
-    if not srcs:
-        fail("`srcs` cannot be empty")
-    output = ctx.actions.declare_file(ctx.label.name)
-
-    ctx.actions.write(
-        output = output,
-        content = _SRCS_TEMPLATE.format(
-            srcs = "\n    ".join(["\"{}\",".format(src) for src in srcs]),
-        ),
-    )
-
-    return DefaultInfo(
-        files = depset([output]),
-    )
-
-_srcs_module = rule(
-    doc = "A rule for writing a list of sources to a templated file",
-    implementation = _srcs_module_impl,
-    attrs = {
-        "srcs": attr.label(
-            doc = "A filegroup of source files",
-            allow_files = True,
-        ),
-    },
-)
-
-_INSTALLER_TEMPLATE = """\
-#!/bin/bash
-set -euo pipefail
-cp -f "{path}" "${{BUILD_WORKSPACE_DIRECTORY}}/{dest}"
-"""
-
-def _srcs_updater_impl(ctx):
-    output = ctx.actions.declare_file(ctx.label.name + ".sh")
-    target_file = ctx.file.input
-    dest = ctx.file.dest.short_path
-
-    ctx.actions.write(
-        output = output,
-        content = _INSTALLER_TEMPLATE.format(
-            path = target_file.short_path,
-            dest = dest,
-        ),
-        is_executable = True,
-    )
-
-    return DefaultInfo(
-        files = depset([output]),
-        runfiles = ctx.runfiles(files = [target_file]),
-        executable = output,
-    )
-
-_srcs_updater = rule(
-    doc = "A rule for writing a `srcs.bzl` file back to the repository",
-    implementation = _srcs_updater_impl,
-    attrs = {
-        "dest": attr.label(
-            doc = "The target file to write the new `input` to.",
-            allow_single_file = ["srcs.bzl"],
-            mandatory = True,
-        ),
-        "input": attr.label(
-            doc = "The file to write back to the repository",
-            allow_single_file = True,
-            mandatory = True,
-        ),
-    },
-    executable = True,
-)
-
-def srcs_module(name, dest, **kwargs):
-    """A helper rule to ensure `pip_repository` rules are always up to date
-
-    Args:
-        name (str): The name of the sources module
-        dest (str): The filename the module should be written as in the current package.
-        **kwargs (dict): Additional keyword arguments
-    """
-    tags = kwargs.pop("tags", [])
-
-    _srcs_module(
-        name = name,
-        tags = tags,
-        **kwargs
-    )
-
-    _srcs_updater(
-        name = name + ".update",
-        input = name,
-        dest = dest,
-        tags = tags,
-    )
diff --git a/python/pip_install/private/srcs.bzl b/python/pip_install/private/srcs.bzl
deleted file mode 100644
index e92e49f..0000000
--- a/python/pip_install/private/srcs.bzl
+++ /dev/null
@@ -1,18 +0,0 @@
-"""A generated file containing all source files used for `@rules_python//python/pip_install:pip_repository.bzl` rules
-
-This file is auto-generated from the `@rules_python//python/pip_install/private:srcs_module.update` target. Please
-`bazel run` this target to apply any updates. Note that doing so will discard any local modifications.
-"""
-
-# Each source file is tracked as a target so `pip_repository` rules will know to automatically rebuild if any of the
-# sources changed.
-PIP_INSTALL_PY_SRCS = [
-    "@rules_python//python/pip_install/tools/dependency_resolver:__init__.py",
-    "@rules_python//python/pip_install/tools/dependency_resolver:dependency_resolver.py",
-    "@rules_python//python/pip_install/tools/wheel_installer:arguments.py",
-    "@rules_python//python/pip_install/tools/wheel_installer:namespace_pkgs.py",
-    "@rules_python//python/pip_install/tools/wheel_installer:wheel.py",
-    "@rules_python//python/pip_install/tools/wheel_installer:wheel_installer.py",
-    "@rules_python//python/private:repack_whl.py",
-    "@rules_python//tools:wheelmaker.py",
-]
diff --git a/python/pip_install/private/test/BUILD.bazel b/python/pip_install/private/test/BUILD.bazel
deleted file mode 100644
index d4978f3..0000000
--- a/python/pip_install/private/test/BUILD.bazel
+++ /dev/null
@@ -1,20 +0,0 @@
-load("@bazel_skylib//rules:diff_test.bzl", "diff_test")
-load(":requirements_parser_tests.bzl", parse_requirements_tests = "parse_tests")
-
-diff_test(
-    name = "srcs_diff_test",
-    failure_message = (
-        "Please run 'bazel run //python/pip_install/private:srcs_module.update' " +
-        "to update the 'srcs.bzl' module found in the same package."
-    ),
-    file1 = "//python/pip_install/private:srcs_module",
-    file2 = "//python/pip_install/private:srcs.bzl",
-    # TODO: The diff_test here fails on Windows. As does the
-    # install script. This should be fixed.
-    target_compatible_with = select({
-        "@platforms//os:windows": ["@platforms//:incompatible"],
-        "//conditions:default": [],
-    }),
-)
-
-parse_requirements_tests(name = "test_parse_requirements")
diff --git a/python/pip_install/repositories.bzl b/python/pip_install/repositories.bzl
index 91bdd4b..5231d1f 100644
--- a/python/pip_install/repositories.bzl
+++ b/python/pip_install/repositories.bzl
@@ -14,131 +14,6 @@
 
 ""
 
-load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
-load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+load("//python/private/pypi:deps.bzl", "pypi_deps")
 
-_RULE_DEPS = [
-    # START: maintained by 'bazel run //tools/private:update_pip_deps'
-    (
-        "pypi__build",
-        "https://files.pythonhosted.org/packages/58/91/17b00d5fac63d3dca605f1b8269ba3c65e98059e1fd99d00283e42a454f0/build-0.10.0-py3-none-any.whl",
-        "af266720050a66c893a6096a2f410989eeac74ff9a68ba194b3f6473e8e26171",
-    ),
-    (
-        "pypi__click",
-        "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl",
-        "ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28",
-    ),
-    (
-        "pypi__colorama",
-        "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl",
-        "4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6",
-    ),
-    (
-        "pypi__importlib_metadata",
-        "https://files.pythonhosted.org/packages/cc/37/db7ba97e676af155f5fcb1a35466f446eadc9104e25b83366e8088c9c926/importlib_metadata-6.8.0-py3-none-any.whl",
-        "3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb",
-    ),
-    (
-        "pypi__installer",
-        "https://files.pythonhosted.org/packages/e5/ca/1172b6638d52f2d6caa2dd262ec4c811ba59eee96d54a7701930726bce18/installer-0.7.0-py3-none-any.whl",
-        "05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53",
-    ),
-    (
-        "pypi__more_itertools",
-        "https://files.pythonhosted.org/packages/5a/cb/6dce742ea14e47d6f565589e859ad225f2a5de576d7696e0623b784e226b/more_itertools-10.1.0-py3-none-any.whl",
-        "64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6",
-    ),
-    (
-        "pypi__packaging",
-        "https://files.pythonhosted.org/packages/ab/c3/57f0601a2d4fe15de7a553c00adbc901425661bf048f2a22dfc500caf121/packaging-23.1-py3-none-any.whl",
-        "994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61",
-    ),
-    (
-        "pypi__pep517",
-        "https://files.pythonhosted.org/packages/ee/2f/ef63e64e9429111e73d3d6cbee80591672d16f2725e648ebc52096f3d323/pep517-0.13.0-py3-none-any.whl",
-        "4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b",
-    ),
-    (
-        "pypi__pip",
-        "https://files.pythonhosted.org/packages/50/c2/e06851e8cc28dcad7c155f4753da8833ac06a5c704c109313b8d5a62968a/pip-23.2.1-py3-none-any.whl",
-        "7ccf472345f20d35bdc9d1841ff5f313260c2c33fe417f48c30ac46cccabf5be",
-    ),
-    (
-        "pypi__pip_tools",
-        "https://files.pythonhosted.org/packages/e8/df/47e6267c6b5cdae867adbdd84b437393e6202ce4322de0a5e0b92960e1d6/pip_tools-7.3.0-py3-none-any.whl",
-        "8717693288720a8c6ebd07149c93ab0be1fced0b5191df9e9decd3263e20d85e",
-    ),
-    (
-        "pypi__pyproject_hooks",
-        "https://files.pythonhosted.org/packages/d5/ea/9ae603de7fbb3df820b23a70f6aff92bf8c7770043254ad8d2dc9d6bcba4/pyproject_hooks-1.0.0-py3-none-any.whl",
-        "283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8",
-    ),
-    (
-        "pypi__setuptools",
-        "https://files.pythonhosted.org/packages/4f/ab/0bcfebdfc3bfa8554b2b2c97a555569c4c1ebc74ea288741ea8326c51906/setuptools-68.1.2-py3-none-any.whl",
-        "3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b",
-    ),
-    (
-        "pypi__tomli",
-        "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl",
-        "939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
-    ),
-    (
-        "pypi__wheel",
-        "https://files.pythonhosted.org/packages/b8/8b/31273bf66016be6ad22bb7345c37ff350276cfd46e389a0c2ac5da9d9073/wheel-0.41.2-py3-none-any.whl",
-        "75909db2664838d015e3d9139004ee16711748a52c8f336b52882266540215d8",
-    ),
-    (
-        "pypi__zipp",
-        "https://files.pythonhosted.org/packages/8c/08/d3006317aefe25ea79d3b76c9650afabaf6d63d1c8443b236e7405447503/zipp-3.16.2-py3-none-any.whl",
-        "679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0",
-    ),
-    # END: maintained by 'bazel run //tools/private:update_pip_deps'
-]
-
-_GENERIC_WHEEL = """\
-package(default_visibility = ["//visibility:public"])
-
-load("@rules_python//python:defs.bzl", "py_library")
-
-py_library(
-    name = "lib",
-    srcs = glob(["**/*.py"]),
-    data = glob(["**/*"], exclude=[
-        # These entries include those put into user-installed dependencies by
-        # data_exclude in /python/pip_install/tools/bazel.py
-        # to avoid non-determinism following pip install's behavior.
-        "**/*.py",
-        "**/*.pyc",
-        "**/*.pyc.*",  # During pyc creation, temp files named *.pyc.NNN are created
-        "**/* *",
-        "**/*.dist-info/RECORD",
-        "BUILD",
-        "WORKSPACE",
-    ]),
-    # This makes this directory a top-level in the python import
-    # search path for anything that depends on this.
-    imports = ["."],
-)
-"""
-
-# Collate all the repository names so they can be easily consumed
-all_requirements = [name for (name, _, _) in _RULE_DEPS]
-
-def requirement(pkg):
-    return Label("@pypi__" + pkg + "//:lib")
-
-def pip_install_dependencies():
-    """
-    Fetch dependencies these rules depend on. Workspaces that use the pip_parse rule can call this.
-    """
-    for (name, url, sha256) in _RULE_DEPS:
-        maybe(
-            http_archive,
-            name,
-            url = url,
-            sha256 = sha256,
-            type = "zip",
-            build_file_content = _GENERIC_WHEEL,
-        )
+pip_install_dependencies = pypi_deps
diff --git a/python/pip_install/requirements.bzl b/python/pip_install/requirements.bzl
index 5caf762..6ae3f8f 100644
--- a/python/pip_install/requirements.bzl
+++ b/python/pip_install/requirements.bzl
@@ -14,149 +14,6 @@
 
 """Rules to verify and update pip-compile locked requirements.txt"""
 
-load("//python:defs.bzl", _py_binary = "py_binary", _py_test = "py_test")
-load("//python/pip_install:repositories.bzl", "requirement")
+load("//python/private/pypi:pip_compile.bzl", "pip_compile")
 
-def compile_pip_requirements(
-        name,
-        src = None,
-        extra_args = [],
-        extra_deps = [],
-        generate_hashes = True,
-        py_binary = _py_binary,
-        py_test = _py_test,
-        requirements_in = None,
-        requirements_txt = None,
-        requirements_darwin = None,
-        requirements_linux = None,
-        requirements_windows = None,
-        visibility = ["//visibility:private"],
-        tags = None,
-        **kwargs):
-    """Generates targets for managing pip dependencies with pip-compile.
-
-    By default this rules generates a filegroup named "[name]" which can be included in the data
-    of some other compile_pip_requirements rule that references these requirements
-    (e.g. with `-r ../other/requirements.txt`).
-
-    It also generates two targets for running pip-compile:
-
-    - validate with `bazel test [name]_test`
-    - update with   `bazel run [name].update`
-
-    If you are using a version control system, the requirements.txt generated by this rule should
-    be checked into it to ensure that all developers/users have the same dependency versions.
-
-    Args:
-        name: base name for generated targets, typically "requirements".
-        src: file containing inputs to dependency resolution. If not specified,
-            defaults to `pyproject.toml`. Supported formats are:
-            * a requirements text file, usually named `requirements.in`
-            * A `.toml` file, where the `project.dependencies` list is used as per
-              [PEP621](https://peps.python.org/pep-0621/).
-        extra_args: passed to pip-compile.
-        extra_deps: extra dependencies passed to pip-compile.
-        generate_hashes: whether to put hashes in the requirements_txt file.
-        py_binary: the py_binary rule to be used.
-        py_test: the py_test rule to be used.
-        requirements_in: file expressing desired dependencies. Deprecated, use src instead.
-        requirements_txt: result of "compiling" the requirements.in file.
-        requirements_linux: File of linux specific resolve output to check validate if requirement.in has changes.
-        requirements_darwin: File of darwin specific resolve output to check validate if requirement.in has changes.
-        requirements_windows: File of windows specific resolve output to check validate if requirement.in has changes.
-        tags: tagging attribute common to all build rules, passed to both the _test and .update rules.
-        visibility: passed to both the _test and .update rules.
-        **kwargs: other bazel attributes passed to the "_test" rule.
-    """
-    if requirements_in and src:
-        fail("Only one of 'src' and 'requirements_in' attributes can be used")
-    else:
-        src = requirements_in or src or "pyproject.toml"
-
-    requirements_txt = name + ".txt" if requirements_txt == None else requirements_txt
-
-    # "Default" target produced by this macro
-    # Allow a compile_pip_requirements rule to include another one in the data
-    # for a requirements file that does `-r ../other/requirements.txt`
-    native.filegroup(
-        name = name,
-        srcs = kwargs.pop("data", []) + [requirements_txt],
-        visibility = visibility,
-    )
-
-    data = [name, requirements_txt, src] + [f for f in (requirements_linux, requirements_darwin, requirements_windows) if f != None]
-
-    # Use the Label constructor so this is expanded in the context of the file
-    # where it appears, which is to say, in @rules_python
-    pip_compile = Label("//python/pip_install/tools/dependency_resolver:dependency_resolver.py")
-
-    loc = "$(rlocationpath {})"
-
-    args = [
-        loc.format(src),
-        loc.format(requirements_txt),
-        "//%s:%s.update" % (native.package_name(), name),
-        "--resolver=backtracking",
-        "--allow-unsafe",
-    ]
-    if generate_hashes:
-        args.append("--generate-hashes")
-    if requirements_linux:
-        args.append("--requirements-linux={}".format(loc.format(requirements_linux)))
-    if requirements_darwin:
-        args.append("--requirements-darwin={}".format(loc.format(requirements_darwin)))
-    if requirements_windows:
-        args.append("--requirements-windows={}".format(loc.format(requirements_windows)))
-    args.extend(extra_args)
-
-    deps = [
-        requirement("build"),
-        requirement("click"),
-        requirement("colorama"),
-        requirement("importlib_metadata"),
-        requirement("more_itertools"),
-        requirement("packaging"),
-        requirement("pep517"),
-        requirement("pip"),
-        requirement("pip_tools"),
-        requirement("pyproject_hooks"),
-        requirement("setuptools"),
-        requirement("tomli"),
-        requirement("zipp"),
-        Label("//python/runfiles:runfiles"),
-    ] + extra_deps
-
-    tags = tags or []
-    tags.append("requires-network")
-    tags.append("no-remote-exec")
-    tags.append("no-sandbox")
-    attrs = {
-        "args": args,
-        "data": data,
-        "deps": deps,
-        "main": pip_compile,
-        "srcs": [pip_compile],
-        "tags": tags,
-        "visibility": visibility,
-    }
-
-    # cheap way to detect the bazel version
-    _bazel_version_4_or_greater = "propeller_optimize" in dir(native)
-
-    # Bazel 4.0 added the "env" attribute to py_test/py_binary
-    if _bazel_version_4_or_greater:
-        attrs["env"] = kwargs.pop("env", {})
-
-    py_binary(
-        name = name + ".update",
-        **attrs
-    )
-
-    timeout = kwargs.pop("timeout", "short")
-
-    py_test(
-        name = name + "_test",
-        timeout = timeout,
-        # kwargs could contain test-specific attributes like size or timeout
-        **dict(attrs, **kwargs)
-    )
+compile_pip_requirements = pip_compile
diff --git a/python/pip_install/requirements_parser.bzl b/python/pip_install/requirements_parser.bzl
index 3b49fdf..82ec1b9 100644
--- a/python/pip_install/requirements_parser.bzl
+++ b/python/pip_install/requirements_parser.bzl
@@ -14,120 +14,6 @@
 
 "Pip requirements parser for Starlark"
 
-_STATE = struct(
-    # Consume extraneous whitespace
-    ConsumeSpace = 0,
-    # Consume a comment
-    ConsumeComment = 1,
-    # Parse the name of a pip package
-    ParseDependency = 2,
-    # Parse a full requirement line
-    ParseRequirement = 3,
-    # Parse a pip option
-    ParseOption = 4,
-)
+load("//python/private/pypi:parse_requirements_txt.bzl", "parse_requirements_txt")
 
-EOF = {}
-
-def parse(content):
-    """A simplistic (and incomplete) pip requirements lockfile parser.
-
-    Parses package names and their full requirement lines, as well pip
-    options.
-
-    Args:
-      content: lockfile content as a string
-
-    Returns:
-      Struct with fields `requirements` and `options`.
-
-      requirements: List of requirements, where each requirement is a 2-element
-        tuple containing the package name and the requirement line.
-        E.g., [(certifi', 'certifi==2021.10.8 --hash=sha256:7888...'), ...]
-
-      options: List of pip option lines
-    """
-    content = content.replace("\r", "")
-
-    result = struct(
-        requirements = [],
-        options = [],
-    )
-    state = _STATE.ConsumeSpace
-    buffer = ""
-
-    inputs = content.elems()[:]
-    inputs.append(EOF)
-
-    for input in inputs:
-        if state == _STATE.ConsumeSpace:
-            (state, buffer) = _handleConsumeSpace(input)
-        elif state == _STATE.ConsumeComment:
-            (state, buffer) = _handleConsumeComment(input, buffer, result)
-        elif state == _STATE.ParseDependency:
-            (state, buffer) = _handleParseDependency(input, buffer, result)
-        elif state == _STATE.ParseOption:
-            (state, buffer) = _handleParseOption(input, buffer, result)
-        elif state == _STATE.ParseRequirement:
-            (state, buffer) = _handleParseRequirement(input, buffer, result)
-        else:
-            fail("Unknown state %d" % state)
-
-    return result
-
-def _handleConsumeSpace(input):
-    if input == EOF:
-        return (_STATE.ConsumeSpace, "")
-    if input.isspace():
-        return (_STATE.ConsumeSpace, "")
-    elif input == "#":
-        return (_STATE.ConsumeComment, "")
-    elif input == "-":
-        return (_STATE.ParseOption, input)
-
-    return (_STATE.ParseDependency, input)
-
-def _handleConsumeComment(input, buffer, result):
-    if input == "\n":
-        if len(result.requirements) > 0 and len(result.requirements[-1]) == 1:
-            result.requirements[-1] = (result.requirements[-1][0], buffer.rstrip(" \n"))
-            return (_STATE.ConsumeSpace, "")
-        elif len(buffer) > 0:
-            result.options.append(buffer.rstrip(" \n"))
-            return (_STATE.ConsumeSpace, "")
-        return (_STATE.ConsumeSpace, "")
-    return (_STATE.ConsumeComment, buffer)
-
-def _handleParseDependency(input, buffer, result):
-    if input == EOF:
-        fail("Enountered unexpected end of file while parsing requirement")
-    elif input.isspace() or input in [">", "<", "~", "=", ";", "["]:
-        result.requirements.append((buffer,))
-        return (_STATE.ParseRequirement, buffer + input)
-
-    return (_STATE.ParseDependency, buffer + input)
-
-def _handleParseOption(input, buffer, result):
-    if input == "\n" and buffer.endswith("\\"):
-        return (_STATE.ParseOption, buffer[0:-1])
-    elif input == " ":
-        result.options.append(buffer.rstrip("\n"))
-        return (_STATE.ParseOption, "")
-    elif input == "\n" or input == EOF:
-        result.options.append(buffer.rstrip("\n"))
-        return (_STATE.ConsumeSpace, "")
-    elif input == "#" and (len(buffer) == 0 or buffer[-1].isspace()):
-        return (_STATE.ConsumeComment, buffer)
-
-    return (_STATE.ParseOption, buffer + input)
-
-def _handleParseRequirement(input, buffer, result):
-    if input == "\n" and buffer.endswith("\\"):
-        return (_STATE.ParseRequirement, buffer[0:-1])
-    elif input == "\n" or input == EOF:
-        result.requirements[-1] = (result.requirements[-1][0], buffer.rstrip(" \n"))
-        return (_STATE.ConsumeSpace, "")
-    elif input == "#" and (len(buffer) == 0 or buffer[-1].isspace()):
-        return (_STATE.ConsumeComment, buffer)
-
-    return (_STATE.ParseRequirement, buffer + input)
+parse = parse_requirements_txt
diff --git a/python/pip_install/tools/dependency_resolver/BUILD.bazel b/python/pip_install/tools/dependency_resolver/BUILD.bazel
deleted file mode 100644
index c2cfb39..0000000
--- a/python/pip_install/tools/dependency_resolver/BUILD.bazel
+++ /dev/null
@@ -1,19 +0,0 @@
-exports_files(["dependency_resolver.py"])
-
-filegroup(
-    name = "distribution",
-    srcs = glob(
-        ["*"],
-        exclude = ["*_test.py"],
-    ),
-    visibility = ["//python/pip_install:__subpackages__"],
-)
-
-filegroup(
-    name = "py_srcs",
-    srcs = glob(
-        include = ["**/*.py"],
-        exclude = ["**/*_test.py"],
-    ),
-    visibility = ["//python/pip_install:__subpackages__"],
-)
diff --git a/python/pip_install/tools/wheel_installer/BUILD.bazel b/python/pip_install/tools/wheel_installer/BUILD.bazel
deleted file mode 100644
index a396488..0000000
--- a/python/pip_install/tools/wheel_installer/BUILD.bazel
+++ /dev/null
@@ -1,91 +0,0 @@
-load("//python:defs.bzl", "py_binary", "py_library", "py_test")
-load("//python/pip_install:repositories.bzl", "requirement")
-
-py_library(
-    name = "lib",
-    srcs = [
-        "arguments.py",
-        "namespace_pkgs.py",
-        "wheel.py",
-        "wheel_installer.py",
-    ],
-    visibility = ["//third_party/rules_pycross/pycross/private:__subpackages__"],
-    deps = [
-        requirement("installer"),
-        requirement("pip"),
-        requirement("packaging"),
-        requirement("setuptools"),
-    ],
-)
-
-py_binary(
-    name = "wheel_installer",
-    srcs = [
-        "wheel_installer.py",
-    ],
-    deps = [":lib"],
-)
-
-py_test(
-    name = "arguments_test",
-    size = "small",
-    srcs = [
-        "arguments_test.py",
-    ],
-    deps = [
-        ":lib",
-    ],
-)
-
-py_test(
-    name = "namespace_pkgs_test",
-    size = "small",
-    srcs = [
-        "namespace_pkgs_test.py",
-    ],
-    deps = [
-        ":lib",
-    ],
-)
-
-py_test(
-    name = "wheel_test",
-    size = "small",
-    srcs = [
-        "wheel_test.py",
-    ],
-    data = ["//examples/wheel:minimal_with_py_package"],
-    deps = [
-        ":lib",
-    ],
-)
-
-py_test(
-    name = "wheel_installer_test",
-    size = "small",
-    srcs = [
-        "wheel_installer_test.py",
-    ],
-    data = ["//examples/wheel:minimal_with_py_package"],
-    deps = [
-        ":lib",
-    ],
-)
-
-filegroup(
-    name = "distribution",
-    srcs = glob(
-        ["*"],
-        exclude = ["*_test.py"],
-    ),
-    visibility = ["//python/pip_install:__subpackages__"],
-)
-
-filegroup(
-    name = "py_srcs",
-    srcs = glob(
-        include = ["**/*.py"],
-        exclude = ["**/*_test.py"],
-    ),
-    visibility = ["//python/pip_install:__subpackages__"],
-)
diff --git a/python/pip_install/tools/wheel_installer/wheel.py b/python/pip_install/tools/wheel_installer/wheel.py
deleted file mode 100644
index efd916d..0000000
--- a/python/pip_install/tools/wheel_installer/wheel.py
+++ /dev/null
@@ -1,466 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utility class to inspect an extracted wheel directory"""
-
-import email
-import platform
-import re
-import sys
-from collections import defaultdict
-from dataclasses import dataclass
-from enum import Enum
-from pathlib import Path
-from typing import Any, Dict, List, Optional, Set, Tuple, Union
-
-import installer
-from packaging.requirements import Requirement
-from pip._vendor.packaging.utils import canonicalize_name
-
-
-class OS(Enum):
-    linux = 1
-    osx = 2
-    windows = 3
-    darwin = osx
-    win32 = windows
-
-
-class Arch(Enum):
-    x86_64 = 1
-    x86_32 = 2
-    aarch64 = 3
-    ppc = 4
-    s390x = 5
-    amd64 = x86_64
-    arm64 = aarch64
-    i386 = x86_32
-    i686 = x86_32
-    x86 = x86_32
-    ppc64le = ppc
-
-
-@dataclass(frozen=True)
-class Platform:
-    os: OS
-    arch: Optional[Arch] = None
-
-    @classmethod
-    def all(cls, want_os: Optional[OS] = None) -> List["Platform"]:
-        return sorted(
-            [
-                cls(os=os, arch=arch)
-                for os in OS
-                for arch in Arch
-                if not want_os or want_os == os
-            ]
-        )
-
-    @classmethod
-    def host(cls) -> List["Platform"]:
-        """Use the Python interpreter to detect the platform.
-
-        We extract `os` from sys.platform and `arch` from platform.machine
-
-        Returns:
-            A list of parsed values which makes the signature the same as
-            `Platform.all` and `Platform.from_string`.
-        """
-        return [
-            cls(
-                os=OS[sys.platform.lower()],
-                # FIXME @aignas 2023-12-13: Hermetic toolchain on Windows 3.11.6
-                # is returning an empty string here, so lets default to x86_64
-                arch=Arch[platform.machine().lower() or "x86_64"],
-            )
-        ]
-
-    def __lt__(self, other: Any) -> bool:
-        """Add a comparison method, so that `sorted` returns the most specialized platforms first."""
-        if not isinstance(other, Platform) or other is None:
-            raise ValueError(f"cannot compare {other} with Platform")
-
-        if self.arch is None and other.arch is not None:
-            return True
-
-        if self.arch is not None and other.arch is None:
-            return True
-
-        # Here we ensure that we sort by OS before sorting by arch
-
-        if self.arch is None and other.arch is None:
-            return self.os.value < other.os.value
-
-        if self.os.value < other.os.value:
-            return True
-
-        if self.os.value == other.os.value:
-            return self.arch.value < other.arch.value
-
-        return False
-
-    def __str__(self) -> str:
-        if self.arch is None:
-            return f"@platforms//os:{self.os.name.lower()}"
-
-        return self.os.name.lower() + "_" + self.arch.name.lower()
-
-    @classmethod
-    def from_string(cls, platform: Union[str, List[str]]) -> List["Platform"]:
-        """Parse a string and return a list of platforms"""
-        platform = [platform] if isinstance(platform, str) else list(platform)
-        ret = set()
-        for p in platform:
-            if p == "host":
-                ret.update(cls.host())
-            elif p == "all":
-                ret.update(cls.all())
-            elif p.endswith("*"):
-                os, _, _ = p.partition("_")
-                ret.update(cls.all(OS[os]))
-            else:
-                os, _, arch = p.partition("_")
-                ret.add(cls(os=OS[os], arch=Arch[arch]))
-
-        return sorted(ret)
-
-    # NOTE @aignas 2023-12-05: below is the minimum number of accessors that are defined in
-    # https://peps.python.org/pep-0496/ to make rules_python generate dependencies.
-    #
-    # WARNING: It may not work in cases where the python implementation is different between
-    # different platforms.
-
-    # derived from OS
-    @property
-    def os_name(self) -> str:
-        if self.os == OS.linux or self.os == OS.osx:
-            return "posix"
-        elif self.os == OS.windows:
-            return "nt"
-        else:
-            return ""
-
-    @property
-    def sys_platform(self) -> str:
-        if self.os == OS.linux:
-            return "linux"
-        elif self.os == OS.osx:
-            return "darwin"
-        elif self.os == OS.windows:
-            return "win32"
-        else:
-            return ""
-
-    @property
-    def platform_system(self) -> str:
-        if self.os == OS.linux:
-            return "Linux"
-        elif self.os == OS.osx:
-            return "Darwin"
-        elif self.os == OS.windows:
-            return "Windows"
-
-    # derived from OS and Arch
-    @property
-    def platform_machine(self) -> str:
-        """Guess the target 'platform_machine' marker.
-
-        NOTE @aignas 2023-12-05: this may not work on really new systems, like
-        Windows if they define the platform markers in a different way.
-        """
-        if self.arch == Arch.x86_64:
-            return "x86_64"
-        elif self.arch == Arch.x86_32 and self.os != OS.osx:
-            return "i386"
-        elif self.arch == Arch.x86_32:
-            return ""
-        elif self.arch == Arch.aarch64 and self.os == OS.linux:
-            return "aarch64"
-        elif self.arch == Arch.aarch64:
-            # Assuming that OSX and Windows use this one since the precedent is set here:
-            # https://github.com/cgohlke/win_arm64-wheels
-            return "arm64"
-        elif self.os != OS.linux:
-            return ""
-        elif self.arch == Arch.ppc64le:
-            return "ppc64le"
-        elif self.arch == Arch.s390x:
-            return "s390x"
-        else:
-            return ""
-
-    def env_markers(self, extra: str) -> Dict[str, str]:
-        return {
-            "extra": extra,
-            "os_name": self.os_name,
-            "sys_platform": self.sys_platform,
-            "platform_machine": self.platform_machine,
-            "platform_system": self.platform_system,
-            "platform_release": "",  # unset
-            "platform_version": "",  # unset
-            # we assume that the following are the same as the interpreter used to setup the deps:
-            # "implementation_version": "X.Y.Z",
-            # "implementation_name": "cpython"
-            # "python_version": "X.Y",
-            # "python_full_version": "X.Y.Z",
-            # "platform_python_implementation: "CPython",
-        }
-
-
-@dataclass(frozen=True)
-class FrozenDeps:
-    deps: List[str]
-    deps_select: Dict[str, List[str]]
-
-
-class Deps:
-    def __init__(
-        self,
-        name: str,
-        extras: Optional[Set[str]] = None,
-        platforms: Optional[Set[Platform]] = None,
-    ):
-        self.name: str = Deps._normalize(name)
-        self._deps: Set[str] = set()
-        self._select: Dict[Platform, Set[str]] = defaultdict(set)
-        self._want_extras: Set[str] = extras or {""}  # empty strings means no extras
-        self._platforms: Set[Platform] = platforms or set()
-
-    def _add(self, dep: str, platform: Optional[Platform]):
-        dep = Deps._normalize(dep)
-
-        # Packages may create dependency cycles when specifying optional-dependencies / 'extras'.
-        # Example: github.com/google/etils/blob/a0b71032095db14acf6b33516bca6d885fe09e35/pyproject.toml#L32.
-        if dep == self.name:
-            return
-
-        if platform:
-            self._select[platform].add(dep)
-        else:
-            self._deps.add(dep)
-
-    @staticmethod
-    def _normalize(name: str) -> str:
-        return re.sub(r"[-_.]+", "_", name).lower()
-
-    def add(self, *wheel_reqs: str) -> None:
-        reqs = [Requirement(wheel_req) for wheel_req in wheel_reqs]
-
-        # Resolve any extra extras due to self-edges
-        self._want_extras = self._resolve_extras(reqs)
-
-        # process self-edges first to resolve the extras used
-        for req in reqs:
-            self._add_req(req)
-
-    def _resolve_extras(self, reqs: List[Requirement]) -> Set[str]:
-        """Resolve extras which are due to depending on self[some_other_extra].
-
-        Some packages may have cyclic dependencies resulting from extras being used, one example is
-        `elint`, where we have one set of extras as aliases for other extras
-        and we have an extra called 'all' that includes all other extras.
-
-        When the `requirements.txt` is generated by `pip-tools`, then it is likely that
-        this step is not needed, but for other `requirements.txt` files this may be useful.
-
-        NOTE @aignas 2023-12-08: the extra resolution is not platform dependent, but
-        in order for it to become platform dependent we would have to have separate targets for each extra in
-        self._want_extras.
-        """
-        extras = self._want_extras
-
-        self_reqs = []
-        for req in reqs:
-            if Deps._normalize(req.name) != self.name:
-                continue
-
-            if req.marker is None:
-                # I am pretty sure we cannot reach this code as it does not
-                # make sense to specify packages in this way, but since it is
-                # easy to handle, lets do it.
-                #
-                # TODO @aignas 2023-12-08: add a test
-                extras = extras | req.extras
-            else:
-                # process these in a separate loop
-                self_reqs.append(req)
-
-        # A double loop is not strictly optimal, but always correct without recursion
-        for req in self_reqs:
-            if any(req.marker.evaluate({"extra": extra}) for extra in extras):
-                extras = extras | req.extras
-            else:
-                continue
-
-            # Iterate through all packages to ensure that we include all of the extras from previously
-            # visited packages.
-            for req_ in self_reqs:
-                if any(req_.marker.evaluate({"extra": extra}) for extra in extras):
-                    extras = extras | req_.extras
-
-        return extras
-
-    def _add_req(self, req: Requirement) -> None:
-        extras = self._want_extras
-
-        if req.marker is None:
-            self._add(req.name, None)
-            return
-
-        marker_str = str(req.marker)
-
-        # NOTE @aignas 2023-12-08: in order to have reasonable select statements
-        # we do have to have some parsing of the markers, so it begs the question
-        # if packaging should be reimplemented in Starlark to have the best solution
-        # for now we will implement it in Python and see what the best parsing result
-        # can be before making this decision.
-        if not self._platforms or not any(
-            tag in marker_str
-            for tag in [
-                "os_name",
-                "sys_platform",
-                "platform_machine",
-                "platform_system",
-            ]
-        ):
-            if any(req.marker.evaluate({"extra": extra}) for extra in extras):
-                self._add(req.name, None)
-            return
-
-        for plat in self._platforms:
-            if not any(
-                req.marker.evaluate(plat.env_markers(extra)) for extra in extras
-            ):
-                continue
-
-            if "platform_machine" in marker_str:
-                self._add(req.name, plat)
-            else:
-                self._add(req.name, Platform(plat.os))
-
-    def build(self) -> FrozenDeps:
-        if not self._select:
-            return FrozenDeps(
-                deps=sorted(self._deps),
-                deps_select={},
-            )
-
-        # Get all of the OS-specific dependencies applicable to all architectures
-        select = {
-            p: deps for p, deps in self._select.items() if deps and p.arch is None
-        }
-        # Now add them to all arch specific dependencies
-        select.update(
-            {
-                p: deps | select.get(Platform(p.os), set())
-                for p, deps in self._select.items()
-                if deps and p.arch is not None
-            }
-        )
-
-        return FrozenDeps(
-            deps=sorted(self._deps),
-            deps_select={str(p): sorted(deps) for p, deps in sorted(select.items())},
-        )
-
-
-class Wheel:
-    """Representation of the compressed .whl file"""
-
-    def __init__(self, path: Path):
-        self._path = path
-
-    @property
-    def path(self) -> str:
-        return self._path
-
-    @property
-    def name(self) -> str:
-        # TODO Also available as installer.sources.WheelSource.distribution
-        name = str(self.metadata["Name"])
-        return canonicalize_name(name)
-
-    @property
-    def metadata(self) -> email.message.Message:
-        with installer.sources.WheelFile.open(self.path) as wheel_source:
-            metadata_contents = wheel_source.read_dist_info("METADATA")
-            metadata = installer.utils.parse_metadata_file(metadata_contents)
-        return metadata
-
-    @property
-    def version(self) -> str:
-        # TODO Also available as installer.sources.WheelSource.version
-        return str(self.metadata["Version"])
-
-    def entry_points(self) -> Dict[str, Tuple[str, str]]:
-        """Returns the entrypoints defined in the current wheel
-
-        See https://packaging.python.org/specifications/entry-points/ for more info
-
-        Returns:
-            Dict[str, Tuple[str, str]]: A mapping of the entry point's name to it's module and attribute
-        """
-        with installer.sources.WheelFile.open(self.path) as wheel_source:
-            if "entry_points.txt" not in wheel_source.dist_info_filenames:
-                return dict()
-
-            entry_points_mapping = dict()
-            entry_points_contents = wheel_source.read_dist_info("entry_points.txt")
-            entry_points = installer.utils.parse_entrypoints(entry_points_contents)
-            for script, module, attribute, script_section in entry_points:
-                if script_section == "console":
-                    entry_points_mapping[script] = (module, attribute)
-
-            return entry_points_mapping
-
-    def dependencies(
-        self,
-        extras_requested: Set[str] = None,
-        platforms: Optional[Set[Platform]] = None,
-    ) -> FrozenDeps:
-        dependency_set = Deps(
-            self.name,
-            extras=extras_requested,
-            platforms=platforms,
-        )
-        for wheel_req in self.metadata.get_all("Requires-Dist", []):
-            dependency_set.add(wheel_req)
-
-        return dependency_set.build()
-
-    def unzip(self, directory: str) -> None:
-        installation_schemes = {
-            "purelib": "/site-packages",
-            "platlib": "/site-packages",
-            "headers": "/include",
-            "scripts": "/bin",
-            "data": "/data",
-        }
-        destination = installer.destinations.SchemeDictionaryDestination(
-            installation_schemes,
-            # TODO Should entry_point scripts also be handled by installer rather than custom code?
-            interpreter="/dev/null",
-            script_kind="posix",
-            destdir=directory,
-            bytecode_optimization_levels=[],
-        )
-
-        with installer.sources.WheelFile.open(self.path) as wheel_source:
-            installer.install(
-                source=wheel_source,
-                destination=destination,
-                additional_metadata={
-                    "INSTALLER": b"https://github.com/bazelbuild/rules_python",
-                },
-            )
diff --git a/python/pip_install/tools/wheel_installer/wheel_test.py b/python/pip_install/tools/wheel_installer/wheel_test.py
deleted file mode 100644
index 721b710..0000000
--- a/python/pip_install/tools/wheel_installer/wheel_test.py
+++ /dev/null
@@ -1,220 +0,0 @@
-import unittest
-
-from python.pip_install.tools.wheel_installer import wheel
-
-
-class DepsTest(unittest.TestCase):
-    def test_simple(self):
-        deps = wheel.Deps("foo")
-        deps.add("bar")
-
-        got = deps.build()
-
-        self.assertIsInstance(got, wheel.FrozenDeps)
-        self.assertEqual(["bar"], got.deps)
-        self.assertEqual({}, got.deps_select)
-
-    def test_can_add_os_specific_deps(self):
-        platforms = {
-            "linux_x86_64",
-            "osx_x86_64",
-            "windows_x86_64",
-        }
-        deps = wheel.Deps("foo", platforms=set(wheel.Platform.from_string(platforms)))
-        deps.add(
-            "bar",
-            "posix_dep; os_name=='posix'",
-            "win_dep; os_name=='nt'",
-        )
-
-        got = deps.build()
-
-        self.assertEqual(["bar"], got.deps)
-        self.assertEqual(
-            {
-                "@platforms//os:linux": ["posix_dep"],
-                "@platforms//os:osx": ["posix_dep"],
-                "@platforms//os:windows": ["win_dep"],
-            },
-            got.deps_select,
-        )
-
-    def test_can_add_platform_specific_deps(self):
-        platforms = {
-            "linux_x86_64",
-            "osx_x86_64",
-            "osx_aarch64",
-            "windows_x86_64",
-        }
-        deps = wheel.Deps("foo", platforms=set(wheel.Platform.from_string(platforms)))
-        deps.add(
-            "bar",
-            "posix_dep; os_name=='posix'",
-            "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
-            "win_dep; os_name=='nt'",
-        )
-
-        got = deps.build()
-
-        self.assertEqual(["bar"], got.deps)
-        self.assertEqual(
-            {
-                "osx_aarch64": ["m1_dep", "posix_dep"],
-                "@platforms//os:linux": ["posix_dep"],
-                "@platforms//os:osx": ["posix_dep"],
-                "@platforms//os:windows": ["win_dep"],
-            },
-            got.deps_select,
-        )
-
-    def test_non_platform_markers_are_added_to_common_deps(self):
-        platforms = {
-            "linux_x86_64",
-            "osx_x86_64",
-            "osx_aarch64",
-            "windows_x86_64",
-        }
-        deps = wheel.Deps("foo", platforms=set(wheel.Platform.from_string(platforms)))
-        deps.add(
-            "bar",
-            "baz; implementation_name=='cpython'",
-            "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
-        )
-
-        got = deps.build()
-
-        self.assertEqual(["bar", "baz"], got.deps)
-        self.assertEqual(
-            {
-                "osx_aarch64": ["m1_dep"],
-            },
-            got.deps_select,
-        )
-
-    def test_self_is_ignored(self):
-        deps = wheel.Deps("foo", extras={"ssl"})
-        deps.add(
-            "bar",
-            "req_dep; extra == 'requests'",
-            "foo[requests]; extra == 'ssl'",
-            "ssl_lib; extra == 'ssl'",
-        )
-
-        got = deps.build()
-
-        self.assertEqual(["bar", "req_dep", "ssl_lib"], got.deps)
-        self.assertEqual({}, got.deps_select)
-
-    def test_handle_etils(self):
-        deps = wheel.Deps("etils", extras={"all"})
-        requires = """
-etils[array-types] ; extra == "all"
-etils[eapp] ; extra == "all"
-etils[ecolab] ; extra == "all"
-etils[edc] ; extra == "all"
-etils[enp] ; extra == "all"
-etils[epath] ; extra == "all"
-etils[epath-gcs] ; extra == "all"
-etils[epath-s3] ; extra == "all"
-etils[epy] ; extra == "all"
-etils[etqdm] ; extra == "all"
-etils[etree] ; extra == "all"
-etils[etree-dm] ; extra == "all"
-etils[etree-jax] ; extra == "all"
-etils[etree-tf] ; extra == "all"
-etils[enp] ; extra == "array-types"
-pytest ; extra == "dev"
-pytest-subtests ; extra == "dev"
-pytest-xdist ; extra == "dev"
-pyink ; extra == "dev"
-pylint>=2.6.0 ; extra == "dev"
-chex ; extra == "dev"
-torch ; extra == "dev"
-optree ; extra == "dev"
-dataclass_array ; extra == "dev"
-sphinx-apitree[ext] ; extra == "docs"
-etils[dev,all] ; extra == "docs"
-absl-py ; extra == "eapp"
-simple_parsing ; extra == "eapp"
-etils[epy] ; extra == "eapp"
-jupyter ; extra == "ecolab"
-numpy ; extra == "ecolab"
-mediapy ; extra == "ecolab"
-packaging ; extra == "ecolab"
-etils[enp] ; extra == "ecolab"
-etils[epy] ; extra == "ecolab"
-etils[epy] ; extra == "edc"
-numpy ; extra == "enp"
-etils[epy] ; extra == "enp"
-fsspec ; extra == "epath"
-importlib_resources ; extra == "epath"
-typing_extensions ; extra == "epath"
-zipp ; extra == "epath"
-etils[epy] ; extra == "epath"
-gcsfs ; extra == "epath-gcs"
-etils[epath] ; extra == "epath-gcs"
-s3fs ; extra == "epath-s3"
-etils[epath] ; extra == "epath-s3"
-typing_extensions ; extra == "epy"
-absl-py ; extra == "etqdm"
-tqdm ; extra == "etqdm"
-etils[epy] ; extra == "etqdm"
-etils[array_types] ; extra == "etree"
-etils[epy] ; extra == "etree"
-etils[enp] ; extra == "etree"
-etils[etqdm] ; extra == "etree"
-dm-tree ; extra == "etree-dm"
-etils[etree] ; extra == "etree-dm"
-jax[cpu] ; extra == "etree-jax"
-etils[etree] ; extra == "etree-jax"
-tensorflow ; extra == "etree-tf"
-etils[etree] ; extra == "etree-tf"
-etils[ecolab] ; extra == "lazy-imports"
-"""
-
-        deps.add(*requires.strip().split("\n"))
-
-        got = deps.build()
-        want = [
-            "absl_py",
-            "dm_tree",
-            "fsspec",
-            "gcsfs",
-            "importlib_resources",
-            "jax",
-            "jupyter",
-            "mediapy",
-            "numpy",
-            "packaging",
-            "s3fs",
-            "simple_parsing",
-            "tensorflow",
-            "tqdm",
-            "typing_extensions",
-            "zipp",
-        ]
-
-        self.assertEqual(want, got.deps)
-        self.assertEqual({}, got.deps_select)
-
-
-class PlatformTest(unittest.TestCase):
-    def test_can_get_host(self):
-        host = wheel.Platform.host()
-        self.assertIsNotNone(host)
-        self.assertEqual(1, len(wheel.Platform.from_string("host")))
-        self.assertEqual(host, wheel.Platform.from_string("host"))
-
-    def test_can_get_all(self):
-        all_platforms = wheel.Platform.all()
-        self.assertEqual(15, len(all_platforms))
-        self.assertEqual(all_platforms, wheel.Platform.from_string("all"))
-
-    def test_can_get_all_for_os(self):
-        linuxes = wheel.Platform.all(wheel.OS.linux)
-        self.assertEqual(5, len(linuxes))
-        self.assertEqual(linuxes, wheel.Platform.from_string("linux_*"))
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/python/private/BUILD.bazel b/python/private/BUILD.bazel
index 25937f0..146e934 100644
--- a/python/private/BUILD.bazel
+++ b/python/private/BUILD.bazel
@@ -13,9 +13,11 @@
 # limitations under the License.
 
 load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("@bazel_skylib//rules:common_settings.bzl", "bool_setting")
 load("//python:py_binary.bzl", "py_binary")
 load("//python:py_library.bzl", "py_library")
 load("//python:versions.bzl", "print_toolchains_checksums")
+load(":py_exec_tools_toolchain.bzl", "current_interpreter_executable")
 load(":stamp.bzl", "stamp_build_setting")
 
 package(
@@ -27,9 +29,10 @@
 filegroup(
     name = "distribution",
     srcs = glob(["**"]) + [
-        "//python/private/bzlmod:distribution",
         "//python/private/common:distribution",
         "//python/private/proto:distribution",
+        "//python/private/pypi:distribution",
+        "//python/private/whl_filegroup:distribution",
         "//tools/build_defs/python/private:distribution",
     ],
     visibility = ["//python:__pkg__"],
@@ -55,9 +58,11 @@
 )
 
 bzl_library(
-    name = "autodetecting_toolchain_bzl",
-    srcs = ["autodetecting_toolchain.bzl"],
+    name = "runtime_env_toolchain_bzl",
+    srcs = ["runtime_env_toolchain.bzl"],
     deps = [
+        ":py_exec_tools_toolchain_bzl",
+        ":toolchain_types_bzl",
         "//python:py_runtime_bzl",
         "//python:py_runtime_pair_bzl",
     ],
@@ -69,6 +74,15 @@
 )
 
 bzl_library(
+    name = "config_settings_bzl",
+    srcs = ["config_settings.bzl"],
+    deps = [
+        "//python:versions_bzl",
+        "@bazel_skylib//lib:selects",
+    ],
+)
+
+bzl_library(
     name = "coverage_deps_bzl",
     srcs = ["coverage_deps.bzl"],
     deps = [
@@ -78,6 +92,25 @@
 )
 
 bzl_library(
+    name = "enum_bzl",
+    srcs = ["enum.bzl"],
+)
+
+bzl_library(
+    name = "envsubst_bzl",
+    srcs = ["envsubst.bzl"],
+)
+
+bzl_library(
+    name = "flags_bzl",
+    srcs = ["flags.bzl"],
+    deps = [
+        ":enum_bzl",
+        "@bazel_skylib//rules:common_settings",
+    ],
+)
+
+bzl_library(
     name = "full_version_bzl",
     srcs = ["full_version.bzl"],
     deps = ["//python:versions_bzl"],
@@ -95,14 +128,43 @@
 )
 
 bzl_library(
-    name = "patch_whl_bzl",
-    srcs = ["patch_whl.bzl"],
-    deps = [":parse_whl_name_bzl"],
+    name = "python_bzl",
+    srcs = ["python.bzl"],
+    deps = [
+        ":pythons_hub_bzl",
+        ":repo_utils_bzl",
+        ":toolchains_repo_bzl",
+        ":util_bzl",
+        "//python:repositories_bzl",
+        "@bazel_features//:features",
+    ],
 )
 
 bzl_library(
-    name = "parse_whl_name_bzl",
-    srcs = ["parse_whl_name.bzl"],
+    name = "python_repositories_bzl",
+    srcs = ["python_repositories.bzl"],
+    deps = [
+        "//python:versions_bzl",
+        "//python/private:auth_bzl",
+        "//python/private:bazel_tools_bzl",
+        "//python/private:bzlmod_enabled_bzl",
+        "//python/private:coverage_deps_bzl",
+        "//python/private:full_version_bzl",
+        "//python/private:internal_config_repo_bzl",
+        "//python/private:repo_utils_bzl",
+        "//python/private:toolchains_repo_bzl",
+        "//python/private/pypi:deps_bzl",
+    ],
+)
+
+bzl_library(
+    name = "pythons_hub_bzl",
+    srcs = ["pythons_hub.bzl"],
+    deps = [
+        ":full_version_bzl",
+        ":py_toolchain_suite_bzl",
+        "//python:versions_bzl",
+    ],
 )
 
 bzl_library(
@@ -117,7 +179,9 @@
     ],
     deps = [
         ":py_cc_toolchain_info_bzl",
+        ":rules_cc_srcs_bzl",
         ":util_bzl",
+        "@bazel_skylib//rules:common_settings",
     ],
 )
 
@@ -140,6 +204,22 @@
 )
 
 bzl_library(
+    name = "py_exec_tools_toolchain_bzl",
+    srcs = ["py_exec_tools_toolchain.bzl"],
+    deps = [
+        ":toolchain_types_bzl",
+        "//python/private/common:providers_bzl",
+        "@bazel_skylib//rules:common_settings",
+    ],
+)
+
+bzl_library(
+    name = "py_interpreter_program_bzl",
+    srcs = ["py_interpreter_program.bzl"],
+    deps = ["@bazel_skylib//rules:common_settings"],
+)
+
+bzl_library(
     name = "py_package_bzl",
     srcs = ["py_package.bzl"],
     visibility = ["//:__subpackages__"],
@@ -158,6 +238,18 @@
     deps = [
         "//python:py_runtime_bzl",
         "//python:py_runtime_info_bzl",
+        "@bazel_skylib//rules:common_settings",
+    ],
+)
+
+bzl_library(
+    name = "py_toolchain_suite_bzl",
+    srcs = ["py_toolchain_suite.bzl"],
+    deps = [
+        ":config_settings_bzl",
+        ":text_util_bzl",
+        ":toolchain_types_bzl",
+        "@bazel_skylib//lib:selects",
     ],
 )
 
@@ -186,13 +278,8 @@
 )
 
 bzl_library(
-    name = "render_pkg_aliases_bzl",
-    srcs = ["render_pkg_aliases.bzl"],
-    deps = [
-        ":normalize_name_bzl",
-        ":text_util_bzl",
-        ":version_label_bzl",
-    ],
+    name = "repo_utils_bzl",
+    srcs = ["repo_utils.bzl"],
 )
 
 bzl_library(
@@ -210,12 +297,17 @@
     name = "toolchains_repo_bzl",
     srcs = ["toolchains_repo.bzl"],
     deps = [
-        ":which_bzl",
+        ":repo_utils_bzl",
         "//python:versions_bzl",
     ],
 )
 
 bzl_library(
+    name = "toolchain_types_bzl",
+    srcs = ["toolchain_types.bzl"],
+)
+
+bzl_library(
     name = "util_bzl",
     srcs = ["util.bzl"],
     visibility = [
@@ -229,26 +321,6 @@
     srcs = ["version_label.bzl"],
 )
 
-bzl_library(
-    name = "which_bzl",
-    srcs = ["which.bzl"],
-    visibility = [
-        "//docs:__subpackages__",
-        "//python:__subpackages__",
-    ],
-)
-
-bzl_library(
-    name = "whl_target_platforms_bzl",
-    srcs = ["whl_target_platforms.bzl"],
-    visibility = ["//:__subpackages__"],
-)
-
-bzl_library(
-    name = "labels_bzl",
-    srcs = ["labels.bzl"],
-)
-
 # @bazel_tools can't define bzl_library itself, so we just put a wrapper around it.
 bzl_library(
     name = "bazel_tools_bzl",
@@ -260,6 +332,13 @@
     ],
 )
 
+# @rules_cc does not offer a bzl_library target for @rules_cc//cc:defs.bzl
+bzl_library(
+    name = "rules_cc_srcs_bzl",
+    srcs = ["@rules_cc//cc:bzl_srcs"],
+    deps = [":bazel_tools_bzl"],
+)
+
 # Needed to define bzl_library targets for docgen. (We don't define the
 # bzl_library target here because it'd give our users a transitive dependency
 # on Skylib.)
@@ -285,9 +364,63 @@
     visibility = ["//visibility:public"],
 )
 
+filegroup(
+    name = "stage1_bootstrap_template",
+    srcs = ["stage1_bootstrap_template.sh"],
+    # Not actually public. Only public because it's an implicit dependency of
+    # py_runtime.
+    visibility = ["//visibility:public"],
+)
+
+filegroup(
+    name = "stage2_bootstrap_template",
+    srcs = ["stage2_bootstrap_template.py"],
+    # Not actually public. Only public because it's an implicit dependency of
+    # py_runtime.
+    visibility = ["//visibility:public"],
+)
+
+filegroup(
+    name = "zip_main_template",
+    srcs = ["zip_main_template.py"],
+    # Not actually public. Only public because it's an implicit dependency of
+    # py_runtime.
+    visibility = ["//visibility:public"],
+)
+
+# NOTE: Windows builds don't use this bootstrap. Instead, a native Windows
+# program locates some Python exe and runs `python.exe foo.zip` which
+# runs the __main__.py in the zip file.
+alias(
+    name = "bootstrap_template",
+    actual = select({
+        ":is_script_bootstrap_enabled": "stage1_bootstrap_template.sh",
+        "//conditions:default": "python_bootstrap_template.txt",
+    }),
+    # Not actually public. Only public because it's an implicit dependency of
+    # py_runtime.
+    visibility = ["//visibility:public"],
+)
+
 # Used to determine the use of `--stamp` in Starlark rules
 stamp_build_setting(name = "stamp")
 
+config_setting(
+    name = "is_script_bootstrap_enabled",
+    flag_values = {
+        "//python/config_settings:bootstrap_impl": "script",
+    },
+)
+
+# This should only be set by analysis tests to expose additional metadata to
+# aid testing, so a setting instead of a flag.
+bool_setting(
+    name = "visible_for_testing",
+    build_setting_default = False,
+    # This is only because it is an implicit dependency by the toolchains.
+    visibility = ["//visibility:public"],
+)
+
 print_toolchains_checksums(name = "print_toolchains_checksums")
 
 # Used for py_console_script_gen rule
@@ -308,3 +441,12 @@
         "//tests/entry_points:__pkg__",
     ],
 )
+
+# The current toolchain's interpreter as an excutable, usable with
+# executable=True attributes.
+current_interpreter_executable(
+    name = "current_interpreter_executable",
+    # Not actually public. Only public because it's an implicit dependency of
+    # py_exec_tools_toolchain.
+    visibility = ["//visibility:public"],
+)
diff --git a/python/private/auth.bzl b/python/private/auth.bzl
index 39ada37..6b61267 100644
--- a/python/private/auth.bzl
+++ b/python/private/auth.bzl
@@ -17,26 +17,90 @@
 The implementation below is copied directly from Bazel's implementation of `http_archive`.
 Accordingly, the return value of this function should be used identically as the `auth` parameter of `http_archive`.
 Reference: https://github.com/bazelbuild/bazel/blob/6.3.2/tools/build_defs/repo/http.bzl#L109
+
+The helpers were further modified to support module_ctx.
 """
 
-# TODO @aignas 2023-12-18: use the following instead when available.
-# load("@bazel_tools//tools/build_defs/repo:utils.bzl", "get_auth")
 load("@bazel_tools//tools/build_defs/repo:utils.bzl", "read_netrc", "read_user_netrc", "use_netrc")
 
-def get_auth(rctx, urls):
+# Copied from https://sourcegraph.com/github.com/bazelbuild/bazel@26c6add3f9809611ad3795bce1e5c0fb37902902/-/blob/tools/build_defs/repo/http.bzl
+_AUTH_PATTERN_DOC = """An optional dict mapping host names to custom authorization patterns.
+
+If a URL's host name is present in this dict the value will be used as a pattern when
+generating the authorization header for the http request. This enables the use of custom
+authorization schemes used in a lot of common cloud storage providers.
+
+The pattern currently supports 2 tokens: <code>&lt;login&gt;</code> and
+<code>&lt;password&gt;</code>, which are replaced with their equivalent value
+in the netrc file for the same host name. After formatting, the result is set
+as the value for the <code>Authorization</code> field of the HTTP request.
+
+Example attribute and netrc for a http download to an oauth2 enabled API using a bearer token:
+
+<pre>
+auth_patterns = {
+    "storage.cloudprovider.com": "Bearer &lt;password&gt;"
+}
+</pre>
+
+netrc:
+
+<pre>
+machine storage.cloudprovider.com
+        password RANDOM-TOKEN
+</pre>
+
+The final HTTP request would have the following header:
+
+<pre>
+Authorization: Bearer RANDOM-TOKEN
+</pre>
+"""
+
+# AUTH_ATTRS are used within whl_library and pip bzlmod extension.
+AUTH_ATTRS = {
+    "auth_patterns": attr.string_dict(
+        doc = _AUTH_PATTERN_DOC,
+    ),
+    "netrc": attr.string(
+        doc = "Location of the .netrc file to use for authentication",
+    ),
+}
+
+def get_auth(ctx, urls, ctx_attr = None):
     """Utility for retrieving netrc-based authentication parameters for repository download rules used in python_repository.
 
     Args:
-        rctx (repository_ctx): The repository rule's context object.
+        ctx(repository_ctx or module_ctx): The extension module_ctx or
+            repository rule's repository_ctx object.
         urls: A list of URLs from which assets will be downloaded.
+        ctx_attr(struct): The attributes to get the netrc from. When ctx is
+            repository_ctx, then we will attempt to use repository_ctx.attr
+            if this is not specified, otherwise we will use the specified
+            field. The module_ctx attributes are located in the tag classes
+            so it cannot be retrieved from the context.
 
     Returns:
         dict: A map of authentication parameters by URL.
     """
-    if rctx.attr.netrc:
-        netrc = read_netrc(rctx, rctx.attr.netrc)
-    elif "NETRC" in rctx.os.environ:
-        netrc = read_netrc(rctx, rctx.os.environ["NETRC"])
+
+    # module_ctx does not have attributes, as they are stored in tag classes. Whilst
+    # the correct behaviour should be to pass the `attr` to the
+    ctx_attr = ctx_attr or getattr(ctx, "attr", None)
+    ctx_attr = struct(
+        netrc = getattr(ctx_attr, "netrc", None),
+        auth_patterns = getattr(ctx_attr, "auth_patterns", ""),
+    )
+
+    if ctx_attr.netrc:
+        netrc = read_netrc(ctx, ctx_attr.netrc)
+    elif "NETRC" in ctx.os.environ:
+        # This can be used on newer bazel versions
+        if hasattr(ctx, "getenv"):
+            netrc = read_netrc(ctx, ctx.getenv("NETRC"))
+        else:
+            netrc = read_netrc(ctx, ctx.os.environ["NETRC"])
     else:
-        netrc = read_user_netrc(rctx)
-    return use_netrc(netrc, urls, rctx.attr.auth_patterns)
+        netrc = read_user_netrc(ctx)
+
+    return use_netrc(netrc, urls, ctx_attr.auth_patterns)
diff --git a/python/private/autodetecting_toolchain.bzl b/python/private/autodetecting_toolchain.bzl
deleted file mode 100644
index 3caa5aa..0000000
--- a/python/private/autodetecting_toolchain.bzl
+++ /dev/null
@@ -1,70 +0,0 @@
-# Copyright 2019 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Definitions related to the Python toolchain."""
-
-load("//python:py_runtime.bzl", "py_runtime")
-load("//python:py_runtime_pair.bzl", "py_runtime_pair")
-
-def define_autodetecting_toolchain(name):
-    """Defines the autodetecting Python toolchain.
-
-    Args:
-        name: The name of the toolchain to introduce. Must have value
-            "autodetecting_toolchain". This param is present only to make the
-            BUILD file more readable.
-    """
-    if name != "autodetecting_toolchain":
-        fail("Python autodetecting toolchain must be named " +
-             "'autodetecting_toolchain'")
-
-    # buildifier: disable=native-py
-    py_runtime(
-        name = "_autodetecting_py3_runtime",
-        interpreter = ":py3wrapper.sh",
-        python_version = "PY3",
-        stub_shebang = "#!/usr/bin/env python3",
-        visibility = ["//visibility:private"],
-    )
-
-    # This is a dummy runtime whose interpreter_path triggers the native rule
-    # logic to use the legacy behavior on Windows.
-    # TODO(#7844): Remove this target.
-    # buildifier: disable=native-py
-    py_runtime(
-        name = "_magic_sentinel_runtime",
-        interpreter_path = "/_magic_pyruntime_sentinel_do_not_use",
-        python_version = "PY3",
-        visibility = ["//visibility:private"],
-    )
-
-    py_runtime_pair(
-        name = "_autodetecting_py_runtime_pair",
-        py3_runtime = select({
-            # If we're on windows, inject the sentinel to tell native rule logic
-            # that we attempted to use the autodetecting toolchain and need to
-            # switch back to legacy behavior.
-            # TODO(#7844): Remove this hack.
-            "@platforms//os:windows": ":_magic_sentinel_runtime",
-            "//conditions:default": ":_autodetecting_py3_runtime",
-        }),
-        visibility = ["//visibility:public"],
-    )
-
-    native.toolchain(
-        name = name,
-        toolchain = ":_autodetecting_py_runtime_pair",
-        toolchain_type = ":toolchain_type",
-        visibility = ["//visibility:public"],
-    )
diff --git a/python/private/bzlmod/BUILD.bazel b/python/private/bzlmod/BUILD.bazel
deleted file mode 100644
index a312922..0000000
--- a/python/private/bzlmod/BUILD.bazel
+++ /dev/null
@@ -1,78 +0,0 @@
-# Copyright 2022 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
-load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
-
-package(default_visibility = ["//:__subpackages__"])
-
-licenses(["notice"])
-
-filegroup(
-    name = "distribution",
-    srcs = glob(["**"]),
-    visibility = ["//python/private:__pkg__"],
-)
-
-bzl_library(
-    name = "pip_bzl",
-    srcs = ["pip.bzl"],
-    deps = [
-        ":pip_repository_bzl",
-        "//python/pip_install:pip_repository_bzl",
-        "//python/pip_install:requirements_parser_bzl",
-        "//python/private:full_version_bzl",
-        "//python/private:normalize_name_bzl",
-        "//python/private:parse_whl_name_bzl",
-        "//python/private:version_label_bzl",
-        ":bazel_features_bzl",
-    ] + [
-        "@pythons_hub//:interpreters_bzl",
-    ] if BZLMOD_ENABLED else [],
-)
-
-bzl_library(
-    name = "bazel_features_bzl",
-    srcs = ["@bazel_features//:bzl_files"] if BZLMOD_ENABLED else [],
-)
-
-bzl_library(
-    name = "pip_repository_bzl",
-    srcs = ["pip_repository.bzl"],
-    visibility = ["//:__subpackages__"],
-    deps = [
-        "//python/private:render_pkg_aliases_bzl",
-        "//python/private:text_util_bzl",
-    ],
-)
-
-bzl_library(
-    name = "python_bzl",
-    srcs = ["python.bzl"],
-    deps = [
-        ":pythons_hub_bzl",
-        "//python:repositories_bzl",
-        "//python/private:toolchains_repo_bzl",
-    ],
-)
-
-bzl_library(
-    name = "pythons_hub_bzl",
-    srcs = ["pythons_hub.bzl"],
-    deps = [
-        "//python:versions_bzl",
-        "//python/private:full_version_bzl",
-        "//python/private:toolchains_repo_bzl",
-    ],
-)
diff --git a/python/private/bzlmod/internal_deps.bzl b/python/private/bzlmod/internal_deps.bzl
deleted file mode 100644
index aadf2cc..0000000
--- a/python/private/bzlmod/internal_deps.bzl
+++ /dev/null
@@ -1,25 +0,0 @@
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"Python toolchain module extension for internal rule use"
-
-load("//python/pip_install:repositories.bzl", "pip_install_dependencies")
-load("//python/private:internal_config_repo.bzl", "internal_config_repo")
-
-# buildifier: disable=unused-variable
-def _internal_deps_impl(module_ctx):
-    internal_config_repo(name = "rules_python_internal")
-    pip_install_dependencies()
-
-internal_deps = module_extension(
-    doc = "This extension to register internal rules_python dependecies.",
-    implementation = _internal_deps_impl,
-    tag_classes = {
-        "install": tag_class(attrs = dict()),
-    },
-)
diff --git a/python/private/bzlmod/pip.bzl b/python/private/bzlmod/pip.bzl
deleted file mode 100644
index 6d45a26..0000000
--- a/python/private/bzlmod/pip.bzl
+++ /dev/null
@@ -1,558 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"pip module extension for use with bzlmod"
-
-load("@bazel_features//:features.bzl", "bazel_features")
-load("@pythons_hub//:interpreters.bzl", "DEFAULT_PYTHON_VERSION", "INTERPRETER_LABELS")
-load(
-    "//python/pip_install:pip_repository.bzl",
-    "group_library",
-    "locked_requirements_label",
-    "pip_repository_attrs",
-    "use_isolated",
-    "whl_library",
-)
-load("//python/pip_install:requirements_parser.bzl", parse_requirements = "parse")
-load("//python/private:full_version.bzl", "full_version")
-load("//python/private:normalize_name.bzl", "normalize_name")
-load("//python/private:parse_whl_name.bzl", "parse_whl_name")
-load("//python/private:version_label.bzl", "version_label")
-load(":pip_repository.bzl", "pip_repository")
-
-def _whl_mods_impl(mctx):
-    """Implementation of the pip.whl_mods tag class.
-
-    This creates the JSON files used to modify the creation of different wheels.
-"""
-    whl_mods_dict = {}
-    for mod in mctx.modules:
-        for whl_mod_attr in mod.tags.whl_mods:
-            if whl_mod_attr.hub_name not in whl_mods_dict.keys():
-                whl_mods_dict[whl_mod_attr.hub_name] = {whl_mod_attr.whl_name: whl_mod_attr}
-            elif whl_mod_attr.whl_name in whl_mods_dict[whl_mod_attr.hub_name].keys():
-                # We cannot have the same wheel name in the same hub, as we
-                # will create the same JSON file name.
-                fail("""\
-Found same whl_name '{}' in the same hub '{}', please use a different hub_name.""".format(
-                    whl_mod_attr.whl_name,
-                    whl_mod_attr.hub_name,
-                ))
-            else:
-                whl_mods_dict[whl_mod_attr.hub_name][whl_mod_attr.whl_name] = whl_mod_attr
-
-    for hub_name, whl_maps in whl_mods_dict.items():
-        whl_mods = {}
-
-        # create a struct that we can pass to the _whl_mods_repo rule
-        # to create the different JSON files.
-        for whl_name, mods in whl_maps.items():
-            build_content = mods.additive_build_content
-            if mods.additive_build_content_file != None and mods.additive_build_content != "":
-                fail("""\
-You cannot use both the additive_build_content and additive_build_content_file arguments at the same time.
-""")
-            elif mods.additive_build_content_file != None:
-                build_content = mctx.read(mods.additive_build_content_file)
-
-            whl_mods[whl_name] = json.encode(struct(
-                additive_build_content = build_content,
-                copy_files = mods.copy_files,
-                copy_executables = mods.copy_executables,
-                data = mods.data,
-                data_exclude_glob = mods.data_exclude_glob,
-                srcs_exclude_glob = mods.srcs_exclude_glob,
-            ))
-
-        _whl_mods_repo(
-            name = hub_name,
-            whl_mods = whl_mods,
-        )
-
-def _create_whl_repos(module_ctx, pip_attr, whl_map, whl_overrides):
-    python_interpreter_target = pip_attr.python_interpreter_target
-
-    # if we do not have the python_interpreter set in the attributes
-    # we programmatically find it.
-    hub_name = pip_attr.hub_name
-    if python_interpreter_target == None and not pip_attr.python_interpreter:
-        python_name = "python_" + version_label(pip_attr.python_version, sep = "_")
-        if python_name not in INTERPRETER_LABELS.keys():
-            fail((
-                "Unable to find interpreter for pip hub '{hub_name}' for " +
-                "python_version={version}: Make sure a corresponding " +
-                '`python.toolchain(python_version="{version}")` call exists'
-            ).format(
-                hub_name = hub_name,
-                version = pip_attr.python_version,
-            ))
-        python_interpreter_target = INTERPRETER_LABELS[python_name]
-
-    pip_name = "{}_{}".format(
-        hub_name,
-        version_label(pip_attr.python_version),
-    )
-    requrements_lock = locked_requirements_label(module_ctx, pip_attr)
-
-    # Parse the requirements file directly in starlark to get the information
-    # needed for the whl_libary declarations below.
-    requirements_lock_content = module_ctx.read(requrements_lock)
-    parse_result = parse_requirements(requirements_lock_content)
-
-    # Replicate a surprising behavior that WORKSPACE builds allowed:
-    # Defining a repo with the same name multiple times, but only the last
-    # definition is respected.
-    # The requirement lines might have duplicate names because lines for extras
-    # are returned as just the base package name. e.g., `foo[bar]` results
-    # in an entry like `("foo", "foo[bar] == 1.0 ...")`.
-    requirements = {
-        normalize_name(entry[0]): entry
-        # The WORKSPACE pip_parse sorted entries, so mimic that ordering.
-        for entry in sorted(parse_result.requirements)
-    }.values()
-
-    extra_pip_args = pip_attr.extra_pip_args + parse_result.options
-
-    if hub_name not in whl_map:
-        whl_map[hub_name] = {}
-
-    whl_modifications = {}
-    if pip_attr.whl_modifications != None:
-        for mod, whl_name in pip_attr.whl_modifications.items():
-            whl_modifications[whl_name] = mod
-
-    requirement_cycles = {
-        name: [normalize_name(whl_name) for whl_name in whls]
-        for name, whls in pip_attr.experimental_requirement_cycles.items()
-    }
-
-    whl_group_mapping = {
-        whl_name: group_name
-        for group_name, group_whls in requirement_cycles.items()
-        for whl_name in group_whls
-    }
-
-    group_repo = "%s__groups" % (pip_name,)
-    group_library(
-        name = group_repo,
-        repo_prefix = pip_name + "_",
-        groups = pip_attr.experimental_requirement_cycles,
-    )
-
-    # Create a new wheel library for each of the different whls
-    for whl_name, requirement_line in requirements:
-        # We are not using the "sanitized name" because the user
-        # would need to guess what name we modified the whl name
-        # to.
-        annotation = whl_modifications.get(whl_name)
-        whl_name = normalize_name(whl_name)
-        group_name = whl_group_mapping.get(whl_name)
-        group_deps = requirement_cycles.get(group_name, [])
-
-        whl_library(
-            name = "%s_%s" % (pip_name, whl_name),
-            requirement = requirement_line,
-            repo = pip_name,
-            repo_prefix = pip_name + "_",
-            annotation = annotation,
-            whl_patches = {
-                p: json.encode(args)
-                for p, args in whl_overrides.get(whl_name, {}).items()
-            },
-            experimental_target_platforms = pip_attr.experimental_target_platforms,
-            python_interpreter = pip_attr.python_interpreter,
-            python_interpreter_target = python_interpreter_target,
-            quiet = pip_attr.quiet,
-            timeout = pip_attr.timeout,
-            isolated = use_isolated(module_ctx, pip_attr),
-            extra_pip_args = extra_pip_args,
-            download_only = pip_attr.download_only,
-            pip_data_exclude = pip_attr.pip_data_exclude,
-            enable_implicit_namespace_pkgs = pip_attr.enable_implicit_namespace_pkgs,
-            environment = pip_attr.environment,
-            group_name = group_name,
-            group_deps = group_deps,
-        )
-
-        if whl_name not in whl_map[hub_name]:
-            whl_map[hub_name][whl_name] = {}
-
-        whl_map[hub_name][whl_name][full_version(pip_attr.python_version)] = pip_name + "_"
-
-def _pip_impl(module_ctx):
-    """Implementation of a class tag that creates the pip hub and corresponding pip spoke whl repositories.
-
-    This implementation iterates through all of the `pip.parse` calls and creates
-    different pip hub repositories based on the "hub_name".  Each of the
-    pip calls create spoke repos that uses a specific Python interpreter.
-
-    In a MODULES.bazel file we have:
-
-    pip.parse(
-        hub_name = "pip",
-        python_version = 3.9,
-        requirements_lock = "//:requirements_lock_3_9.txt",
-        requirements_windows = "//:requirements_windows_3_9.txt",
-    )
-    pip.parse(
-        hub_name = "pip",
-        python_version = 3.10,
-        requirements_lock = "//:requirements_lock_3_10.txt",
-        requirements_windows = "//:requirements_windows_3_10.txt",
-    )
-
-    For instance, we have a hub with the name of "pip".
-    A repository named the following is created. It is actually called last when
-    all of the pip spokes are collected.
-
-    - @@rules_python~override~pip~pip
-
-    As shown in the example code above we have the following.
-    Two different pip.parse statements exist in MODULE.bazel provide the hub_name "pip".
-    These definitions create two different pip spoke repositories that are
-    related to the hub "pip".
-    One spoke uses Python 3.9 and the other uses Python 3.10. This code automatically
-    determines the Python version and the interpreter.
-    Both of these pip spokes contain requirements files that includes websocket
-    and its dependencies.
-
-    We also need repositories for the wheels that the different pip spokes contain.
-    For each Python version a different wheel repository is created. In our example
-    each pip spoke had a requirements file that contained websockets. We
-    then create two different wheel repositories that are named the following.
-
-    - @@rules_python~override~pip~pip_39_websockets
-    - @@rules_python~override~pip~pip_310_websockets
-
-    And if the wheel has any other dependencies subsequent wheels are created in the same fashion.
-
-    The hub repository has aliases for `pkg`, `data`, etc, which have a select that resolves to
-    a spoke repository depending on the Python version.
-
-    Also we may have more than one hub as defined in a MODULES.bazel file.  So we could have multiple
-    hubs pointing to various different pip spokes.
-
-    Some other business rules notes. A hub can only have one spoke per Python version.  We cannot
-    have a hub named "pip" that has two spokes that use the Python 3.9 interpreter.  Second
-    we cannot have the same hub name used in sub-modules.  The hub name has to be globally
-    unique.
-
-    This implementation also handles the creation of whl_modification JSON files that are used
-    during the creation of wheel libraries. These JSON files used via the annotations argument
-    when calling wheel_installer.py.
-
-    Args:
-        module_ctx: module contents
-    """
-
-    # Build all of the wheel modifications if the tag class is called.
-    _whl_mods_impl(module_ctx)
-
-    _overriden_whl_set = {}
-    whl_overrides = {}
-
-    for module in module_ctx.modules:
-        for attr in module.tags.override:
-            if not module.is_root:
-                fail("overrides are only supported in root modules")
-
-            if not attr.file.endswith(".whl"):
-                fail("Only whl overrides are supported at this time")
-
-            whl_name = normalize_name(parse_whl_name(attr.file).distribution)
-
-            if attr.file in _overriden_whl_set:
-                fail("Duplicate module overrides for '{}'".format(attr.file))
-            _overriden_whl_set[attr.file] = None
-
-            for patch in attr.patches:
-                if whl_name not in whl_overrides:
-                    whl_overrides[whl_name] = {}
-
-                if patch not in whl_overrides[whl_name]:
-                    whl_overrides[whl_name][patch] = struct(
-                        patch_strip = attr.patch_strip,
-                        whls = [],
-                    )
-
-                whl_overrides[whl_name][patch].whls.append(attr.file)
-
-    # Used to track all the different pip hubs and the spoke pip Python
-    # versions.
-    pip_hub_map = {}
-
-    # Keeps track of all the hub's whl repos across the different versions.
-    # dict[hub, dict[whl, dict[version, str pip]]]
-    # Where hub, whl, and pip are the repo names
-    hub_whl_map = {}
-
-    for mod in module_ctx.modules:
-        for pip_attr in mod.tags.parse:
-            hub_name = pip_attr.hub_name
-            if hub_name not in pip_hub_map:
-                pip_hub_map[pip_attr.hub_name] = struct(
-                    module_name = mod.name,
-                    python_versions = [pip_attr.python_version],
-                )
-            elif pip_hub_map[hub_name].module_name != mod.name:
-                # We cannot have two hubs with the same name in different
-                # modules.
-                fail((
-                    "Duplicate cross-module pip hub named '{hub}': pip hub " +
-                    "names must be unique across modules. First defined " +
-                    "by module '{first_module}', second attempted by " +
-                    "module '{second_module}'"
-                ).format(
-                    hub = hub_name,
-                    first_module = pip_hub_map[hub_name].module_name,
-                    second_module = mod.name,
-                ))
-
-            elif pip_attr.python_version in pip_hub_map[hub_name].python_versions:
-                fail((
-                    "Duplicate pip python version '{version}' for hub " +
-                    "'{hub}' in module '{module}': the Python versions " +
-                    "used for a hub must be unique"
-                ).format(
-                    hub = hub_name,
-                    module = mod.name,
-                    version = pip_attr.python_version,
-                ))
-            else:
-                pip_hub_map[pip_attr.hub_name].python_versions.append(pip_attr.python_version)
-
-            _create_whl_repos(module_ctx, pip_attr, hub_whl_map, whl_overrides)
-
-    for hub_name, whl_map in hub_whl_map.items():
-        pip_repository(
-            name = hub_name,
-            repo_name = hub_name,
-            whl_map = whl_map,
-            default_version = full_version(DEFAULT_PYTHON_VERSION),
-        )
-
-def _pip_parse_ext_attrs():
-    attrs = dict({
-        "hub_name": attr.string(
-            mandatory = True,
-            doc = """
-The name of the repo pip dependencies will be accessible from.
-
-This name must be unique between modules; unless your module is guaranteed to
-always be the root module, it's highly recommended to include your module name
-in the hub name. Repo mapping, `use_repo(..., pip="my_modules_pip_deps")`, can
-be used for shorter local names within your module.
-
-Within a module, the same `hub_name` can be specified to group different Python
-versions of pip dependencies under one repository name. This allows using a
-Python version-agnostic name when referring to pip dependencies; the
-correct version will be automatically selected.
-
-Typically, a module will only have a single hub of pip dependencies, but this
-is not required. Each hub is a separate resolution of pip dependencies. This
-means if different programs need different versions of some library, separate
-hubs can be created, and each program can use its respective hub's targets.
-Targets from different hubs should not be used together.
-""",
-        ),
-        "python_version": attr.string(
-            mandatory = True,
-            doc = """
-The Python version the dependencies are targetting, in Major.Minor format
-(e.g., "3.11"). Patch level granularity (e.g. "3.11.1") is not supported.
-If not specified, then the default Python version (as set by the root module or
-rules_python) will be used.
-
-If an interpreter isn't explicitly provided (using `python_interpreter` or
-`python_interpreter_target`), then the version specified here must have
-a corresponding `python.toolchain()` configured.
-""",
-        ),
-        "whl_modifications": attr.label_keyed_string_dict(
-            mandatory = False,
-            doc = """\
-A dict of labels to wheel names that is typically generated by the whl_modifications.
-The labels are JSON config files describing the modifications.
-""",
-        ),
-    }, **pip_repository_attrs)
-
-    # Like the pip_repository rule, we end up setting this manually so
-    # don't allow users to override it.
-    attrs.pop("repo_prefix")
-
-    # incompatible_generate_aliases is always True in bzlmod
-    attrs.pop("incompatible_generate_aliases")
-
-    return attrs
-
-def _whl_mod_attrs():
-    attrs = {
-        "additive_build_content": attr.string(
-            doc = "(str, optional): Raw text to add to the generated `BUILD` file of a package.",
-        ),
-        "additive_build_content_file": attr.label(
-            doc = """\
-(label, optional): path to a BUILD file to add to the generated
-`BUILD` file of a package. You cannot use both additive_build_content and additive_build_content_file
-arguments at the same time.""",
-        ),
-        "copy_executables": attr.string_dict(
-            doc = """\
-(dict, optional): A mapping of `src` and `out` files for
-[@bazel_skylib//rules:copy_file.bzl][cf]. Targets generated here will also be flagged as
-executable.""",
-        ),
-        "copy_files": attr.string_dict(
-            doc = """\
-(dict, optional): A mapping of `src` and `out` files for
-[@bazel_skylib//rules:copy_file.bzl][cf]""",
-        ),
-        "data": attr.string_list(
-            doc = """\
-(list, optional): A list of labels to add as `data` dependencies to
-the generated `py_library` target.""",
-        ),
-        "data_exclude_glob": attr.string_list(
-            doc = """\
-(list, optional): A list of exclude glob patterns to add as `data` to
-the generated `py_library` target.""",
-        ),
-        "hub_name": attr.string(
-            doc = """\
-Name of the whl modification, hub we use this name to set the modifications for
-pip.parse. If you have different pip hubs you can use a different name,
-otherwise it is best practice to just use one.
-
-You cannot have the same `hub_name` in different modules.  You can reuse the same
-name in the same module for different wheels that you put in the same hub, but you
-cannot have a child module that uses the same `hub_name`.
-""",
-            mandatory = True,
-        ),
-        "srcs_exclude_glob": attr.string_list(
-            doc = """\
-(list, optional): A list of labels to add as `srcs` to the generated
-`py_library` target.""",
-        ),
-        "whl_name": attr.string(
-            doc = "The whl name that the modifications are used for.",
-            mandatory = True,
-        ),
-    }
-    return attrs
-
-# NOTE: the naming of 'override' is taken from the bzlmod native
-# 'archive_override', 'git_override' bzlmod functions.
-_override_tag = tag_class(
-    attrs = {
-        "file": attr.string(
-            doc = """\
-The Python distribution file name which needs to be patched. This will be
-applied to all repositories that setup this distribution via the pip.parse tag
-class.""",
-            mandatory = True,
-        ),
-        "patch_strip": attr.int(
-            default = 0,
-            doc = """\
-The number of leading path segments to be stripped from the file name in the
-patches.""",
-        ),
-        "patches": attr.label_list(
-            doc = """\
-A list of patches to apply to the repository *after* 'whl_library' is extracted
-and BUILD.bazel file is generated.""",
-            mandatory = True,
-        ),
-    },
-    doc = """\
-Apply any overrides (e.g. patches) to a given Python distribution defined by
-other tags in this extension.""",
-)
-
-def _extension_extra_args():
-    args = {}
-
-    if bazel_features.external_deps.module_extension_has_os_arch_dependent:
-        args = args | {
-            "arch_dependent": True,
-            "os_dependent": True,
-        }
-
-    return args
-
-pip = module_extension(
-    doc = """\
-This extension is used to make dependencies from pip available.
-
-pip.parse:
-To use, call `pip.parse()` and specify `hub_name` and your requirements file.
-Dependencies will be downloaded and made available in a repo named after the
-`hub_name` argument.
-
-Each `pip.parse()` call configures a particular Python version. Multiple calls
-can be made to configure different Python versions, and will be grouped by
-the `hub_name` argument. This allows the same logical name, e.g. `@pip//numpy`
-to automatically resolve to different, Python version-specific, libraries.
-
-pip.whl_mods:
-This tag class is used to help create JSON files to describe modifications to
-the BUILD files for wheels.
-""",
-    implementation = _pip_impl,
-    tag_classes = {
-        "override": _override_tag,
-        "parse": tag_class(
-            attrs = _pip_parse_ext_attrs(),
-            doc = """\
-This tag class is used to create a pip hub and all of the spokes that are part of that hub.
-This tag class reuses most of the pip attributes that are found in
-@rules_python//python/pip_install:pip_repository.bzl.
-The exceptions are it does not use the args 'repo_prefix',
-and 'incompatible_generate_aliases'.  We set the repository prefix
-for the user and the alias arg is always True in bzlmod.
-""",
-        ),
-        "whl_mods": tag_class(
-            attrs = _whl_mod_attrs(),
-            doc = """\
-This tag class is used to create JSON file that are used when calling wheel_builder.py.  These
-JSON files contain instructions on how to modify a wheel's project.  Each of the attributes
-create different modifications based on the type of attribute. Previously to bzlmod these
-JSON files where referred to as annotations, and were renamed to whl_modifications in this
-extension.
-""",
-        ),
-    },
-    **_extension_extra_args()
-)
-
-def _whl_mods_repo_impl(rctx):
-    rctx.file("BUILD.bazel", "")
-    for whl_name, mods in rctx.attr.whl_mods.items():
-        rctx.file("{}.json".format(whl_name), mods)
-
-_whl_mods_repo = repository_rule(
-    doc = """\
-This rule creates json files based on the whl_mods attribute.
-""",
-    implementation = _whl_mods_repo_impl,
-    attrs = {
-        "whl_mods": attr.string_dict(
-            mandatory = True,
-            doc = "JSON endcoded string that is provided to wheel_builder.py",
-        ),
-    },
-)
diff --git a/python/private/bzlmod/pip_repository.bzl b/python/private/bzlmod/pip_repository.bzl
deleted file mode 100644
index 9e6b0f4..0000000
--- a/python/private/bzlmod/pip_repository.bzl
+++ /dev/null
@@ -1,87 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-""
-
-load("//python/private:render_pkg_aliases.bzl", "render_pkg_aliases")
-load("//python/private:text_util.bzl", "render")
-
-_BUILD_FILE_CONTENTS = """\
-package(default_visibility = ["//visibility:public"])
-
-# Ensure the `requirements.bzl` source can be accessed by stardoc, since users load() from it
-exports_files(["requirements.bzl"])
-"""
-
-def _pip_repository_impl(rctx):
-    bzl_packages = rctx.attr.whl_map.keys()
-    aliases = render_pkg_aliases(
-        repo_name = rctx.attr.repo_name,
-        rules_python = rctx.attr._template.workspace_name,
-        default_version = rctx.attr.default_version,
-        whl_map = rctx.attr.whl_map,
-    )
-    for path, contents in aliases.items():
-        rctx.file(path, contents)
-
-    # NOTE: we are using the canonical name with the double '@' in order to
-    # always uniquely identify a repository, as the labels are being passed as
-    # a string and the resolution of the label happens at the call-site of the
-    # `requirement`, et al. macros.
-    macro_tmpl = "@@{name}//{{}}:{{}}".format(name = rctx.attr.name)
-
-    rctx.file("BUILD.bazel", _BUILD_FILE_CONTENTS)
-    rctx.template("requirements.bzl", rctx.attr._template, substitutions = {
-        "%%ALL_DATA_REQUIREMENTS%%": render.list([
-            macro_tmpl.format(p, "data")
-            for p in bzl_packages
-        ]),
-        "%%ALL_REQUIREMENTS%%": render.list([
-            macro_tmpl.format(p, p)
-            for p in bzl_packages
-        ]),
-        "%%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%": render.dict({
-            p: macro_tmpl.format(p, "whl")
-            for p in bzl_packages
-        }),
-        "%%MACRO_TMPL%%": macro_tmpl,
-        "%%NAME%%": rctx.attr.repo_name,
-    })
-
-pip_repository_attrs = {
-    "default_version": attr.string(
-        mandatory = True,
-        doc = """\
-This is the default python version in the format of X.Y.Z. This should match
-what is setup by the 'python' extension using the 'is_default = True'
-setting.""",
-    ),
-    "repo_name": attr.string(
-        mandatory = True,
-        doc = "The apparent name of the repo. This is needed because in bzlmod, the name attribute becomes the canonical name.",
-    ),
-    "whl_map": attr.string_list_dict(
-        mandatory = True,
-        doc = "The wheel map where values are python versions",
-    ),
-    "_template": attr.label(
-        default = ":requirements.bzl.tmpl",
-    ),
-}
-
-pip_repository = repository_rule(
-    attrs = pip_repository_attrs,
-    doc = """A rule for bzlmod mulitple pip repository creation. PRIVATE USE ONLY.""",
-    implementation = _pip_repository_impl,
-)
diff --git a/python/private/bzlmod/requirements.bzl.tmpl b/python/private/bzlmod/requirements.bzl.tmpl
deleted file mode 100644
index b99322d..0000000
--- a/python/private/bzlmod/requirements.bzl.tmpl
+++ /dev/null
@@ -1,46 +0,0 @@
-"""Starlark representation of locked requirements.
-
-@generated by rules_python pip.parse bzlmod extension.
-"""
-
-load("@rules_python//python:pip.bzl", "pip_utils")
-
-all_requirements = %%ALL_REQUIREMENTS%%
-
-all_whl_requirements_by_package = %%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%
-
-all_whl_requirements = all_whl_requirements_by_package.values()
-
-all_data_requirements = %%ALL_DATA_REQUIREMENTS%%
-
-def requirement(name):
-    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "pkg")
-
-def whl_requirement(name):
-    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "whl")
-
-def data_requirement(name):
-    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "data")
-
-def dist_info_requirement(name):
-    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "dist_info")
-
-def entry_point(pkg, script = None):
-    """entry_point returns the target of the canonical label of the package entrypoints.
-    """
-    actual_script = script or pkg
-
-    fail("""Please replace this instance of entry_point with the following:
-
-```
-load("@rules_python//python/entry_points:py_console_script_binary.bzl", "py_console_script_binary")
-
-py_console_script_binary(
-    name = "{pkg}",
-    pkg = "@%%NAME%%//{pkg}",{script}
-)
-```
-""".format(
-        pkg = pip_utils.normalize_name(pkg),
-        script = "" if not script else "\n    script = \"%s\"," % actual_script,
-    ))
diff --git a/python/private/common/BUILD.bazel b/python/private/common/BUILD.bazel
index e69eaff..a415e05 100644
--- a/python/private/common/BUILD.bazel
+++ b/python/private/common/BUILD.bazel
@@ -15,12 +15,13 @@
 load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
 
 package(
-    default_visibility = ["//python:__subpackages__"],
+    default_visibility = ["//:__subpackages__"],
 )
 
 bzl_library(
     name = "attributes_bazel_bzl",
     srcs = ["attributes_bazel.bzl"],
+    deps = ["//python/private:rules_cc_srcs_bzl"],
 )
 
 bzl_library(
@@ -31,7 +32,11 @@
         ":providers_bzl",
         ":py_internal_bzl",
         ":semantics_bzl",
+        "//python/private:enum_bzl",
+        "//python/private:flags_bzl",
         "//python/private:reexports_bzl",
+        "//python/private:rules_cc_srcs_bzl",
+        "@bazel_skylib//rules:common_settings",
     ],
 )
 
@@ -45,9 +50,12 @@
     name = "common_bazel_bzl",
     srcs = ["common_bazel.bzl"],
     deps = [
+        ":attributes_bzl",
         ":common_bzl",
         ":providers_bzl",
         ":py_internal_bzl",
+        "//python/private:py_interpreter_program_bzl",
+        "//python/private:toolchain_types_bzl",
         "@bazel_skylib//lib:paths",
     ],
 )
@@ -61,6 +69,7 @@
         ":py_internal_bzl",
         ":semantics_bzl",
         "//python/private:reexports_bzl",
+        "//python/private:rules_cc_srcs_bzl",
     ],
 )
 
@@ -74,6 +83,7 @@
     srcs = ["providers.bzl"],
     deps = [
         ":semantics_bzl",
+        "//python/private:rules_cc_srcs_bzl",
         "//python/private:util_bzl",
     ],
 )
@@ -121,7 +131,11 @@
         ":common_bzl",
         ":providers_bzl",
         ":py_internal_bzl",
+        "//python/private:flags_bzl",
+        "//python/private:rules_cc_srcs_bzl",
+        "//python/private:toolchain_types_bzl",
         "@bazel_skylib//lib:dicts",
+        "@bazel_skylib//rules:common_settings",
     ],
 )
 
@@ -139,7 +153,10 @@
         ":common_bzl",
         ":providers_bzl",
         ":py_internal_bzl",
+        "//python/private:flags_bzl",
+        "//python/private:toolchain_types_bzl",
         "@bazel_skylib//lib:dicts",
+        "@bazel_skylib//rules:common_settings",
     ],
 )
 
diff --git a/python/private/common/attributes.bzl b/python/private/common/attributes.bzl
index b26d02c..503578b 100644
--- a/python/private/common/attributes.bzl
+++ b/python/private/common/attributes.bzl
@@ -13,6 +13,10 @@
 # limitations under the License.
 """Attributes for Python rules."""
 
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("@rules_cc//cc:defs.bzl", "CcInfo")
+load("//python/private:enum.bzl", "enum")
+load("//python/private:flags.bzl", "PrecompileFlag")
 load("//python/private:reexports.bzl", "BuiltinPyInfo")
 load(":common.bzl", "union_attrs")
 load(":providers.bzl", "PyInfo")
@@ -23,14 +27,124 @@
     "SRCS_ATTR_ALLOW_FILES",
 )
 
-# TODO: Load CcInfo from rules_cc
-_CcInfo = CcInfo
 _PackageSpecificationInfo = getattr(py_internal, "PackageSpecificationInfo", None)
 
 _STAMP_VALUES = [-1, 0, 1]
 
+def _precompile_attr_get_effective_value(ctx):
+    precompile_flag = PrecompileFlag.get_effective_value(ctx)
+
+    if precompile_flag == PrecompileFlag.FORCE_ENABLED:
+        return PrecompileAttr.ENABLED
+    if precompile_flag == PrecompileFlag.FORCE_DISABLED:
+        return PrecompileAttr.DISABLED
+
+    precompile_attr = ctx.attr.precompile
+    if precompile_attr == PrecompileAttr.INHERIT:
+        precompile = precompile_flag
+    else:
+        precompile = precompile_attr
+
+    # Guard against bad final states because the two enums are similar with
+    # magic values.
+    if precompile not in (
+        PrecompileAttr.ENABLED,
+        PrecompileAttr.DISABLED,
+        PrecompileAttr.IF_GENERATED_SOURCE,
+    ):
+        fail("Unexpected final precompile value: {}".format(repr(precompile)))
+
+    return precompile
+
+# buildifier: disable=name-conventions
+PrecompileAttr = enum(
+    # Determine the effective value from --precompile
+    INHERIT = "inherit",
+    # Compile Python source files at build time. Note that
+    # --precompile_add_to_runfiles affects how the compiled files are included
+    # into a downstream binary.
+    ENABLED = "enabled",
+    # Don't compile Python source files at build time.
+    DISABLED = "disabled",
+    # Compile Python source files, but only if they're a generated file.
+    IF_GENERATED_SOURCE = "if_generated_source",
+    get_effective_value = _precompile_attr_get_effective_value,
+)
+
+# buildifier: disable=name-conventions
+PrecompileInvalidationModeAttr = enum(
+    # Automatically pick a value based on build settings.
+    AUTO = "auto",
+    # Use the pyc file if the hash of the originating source file matches the
+    # hash recorded in the pyc file.
+    CHECKED_HASH = "checked_hash",
+    # Always use the pyc file, even if the originating source has changed.
+    UNCHECKED_HASH = "unchecked_hash",
+)
+
+def _precompile_source_retention_get_effective_value(ctx):
+    attr_value = ctx.attr.precompile_source_retention
+    if attr_value == PrecompileSourceRetentionAttr.INHERIT:
+        attr_value = ctx.attr._precompile_source_retention_flag[BuildSettingInfo].value
+
+    if attr_value not in (
+        PrecompileSourceRetentionAttr.KEEP_SOURCE,
+        PrecompileSourceRetentionAttr.OMIT_SOURCE,
+        PrecompileSourceRetentionAttr.OMIT_IF_GENERATED_SOURCE,
+    ):
+        fail("Unexpected final precompile_source_retention value: {}".format(repr(attr_value)))
+    return attr_value
+
+# buildifier: disable=name-conventions
+PrecompileSourceRetentionAttr = enum(
+    INHERIT = "inherit",
+    KEEP_SOURCE = "keep_source",
+    OMIT_SOURCE = "omit_source",
+    OMIT_IF_GENERATED_SOURCE = "omit_if_generated_source",
+    get_effective_value = _precompile_source_retention_get_effective_value,
+)
+
+def _pyc_collection_attr_is_pyc_collection_enabled(ctx):
+    pyc_collection = ctx.attr.pyc_collection
+    if pyc_collection == PycCollectionAttr.INHERIT:
+        pyc_collection = ctx.attr._pyc_collection_flag[BuildSettingInfo].value
+
+    if pyc_collection not in (PycCollectionAttr.INCLUDE_PYC, PycCollectionAttr.DISABLED):
+        fail("Unexpected final pyc_collection value: {}".format(repr(pyc_collection)))
+
+    return pyc_collection == PycCollectionAttr.INCLUDE_PYC
+
+# buildifier: disable=name-conventions
+PycCollectionAttr = enum(
+    INHERIT = "inherit",
+    INCLUDE_PYC = "include_pyc",
+    DISABLED = "disabled",
+    is_pyc_collection_enabled = _pyc_collection_attr_is_pyc_collection_enabled,
+)
+
 def create_stamp_attr(**kwargs):
-    return {"stamp": attr.int(values = _STAMP_VALUES, **kwargs)}
+    return {
+        "stamp": attr.int(
+            values = _STAMP_VALUES,
+            doc = """
+Whether to encode build information into the binary. Possible values:
+
+* `stamp = 1`: Always stamp the build information into the binary, even in
+  `--nostamp` builds. **This setting should be avoided**, since it potentially kills
+  remote caching for the binary and any downstream actions that depend on it.
+* `stamp = 0`: Always replace build information by constant values. This gives
+  good build result caching.
+* `stamp = -1`: Embedding of build information is controlled by the
+  `--[no]stamp` flag.
+
+Stamped binaries are not rebuilt unless their dependencies change.
+
+WARNING: Stamping can harm build performance by reducing cache hits and should
+be avoided if possible.
+""",
+            **kwargs
+        ),
+    }
 
 def create_srcs_attr(*, mandatory):
     return {
@@ -40,6 +154,12 @@
             mandatory = mandatory,
             # Necessary for --compile_one_dependency to work.
             flags = ["DIRECT_COMPILE_TIME_INPUT"],
+            doc = """
+The list of Python source files that are processed to create the target. This
+includes all your checked-in code and may include generated source files.  The
+`.py` files belong in `srcs` and library targets belong in `deps`. Other binary
+files that may be needed at run time belong in `data`.
+""",
         ),
     }
 
@@ -51,6 +171,7 @@
         "srcs_version": attr.string(
             default = "PY2AND3",
             values = values,
+            doc = "Defunct, unused, does nothing.",
         ),
     }
 
@@ -81,6 +202,13 @@
     "data": attr.label_list(
         allow_files = True,
         flags = ["SKIP_CONSTRAINTS_OVERRIDE"],
+        doc = """
+The list of files need by this library at runtime. See comments about
+the [`data` attribute typically defined by rules](https://bazel.build/reference/be/common-definitions#typical-attributes).
+
+There is no `py_embed_data` like there is `cc_embed_data` and `go_embed_data`.
+This is because Python has a concept of runtime resources.
+""",
     ),
 }
 
@@ -111,6 +239,7 @@
 COMMON_ATTRS = union_attrs(
     DATA_ATTRS,
     NATIVE_RULES_ALLOWLIST_ATTRS,
+    # buildifier: disable=attr-licenses
     {
         # NOTE: This attribute is deprecated and slated for removal.
         "distribs": attr.string_list(),
@@ -130,12 +259,86 @@
         "deps": attr.label_list(
             providers = [
                 [PyInfo],
-                [_CcInfo],
+                [CcInfo],
                 [BuiltinPyInfo],
             ],
             # TODO(b/228692666): Google-specific; remove these allowances once
             # the depot is cleaned up.
             allow_rules = DEPS_ATTR_ALLOW_RULES,
+            doc = """
+List of additional libraries to be linked in to the target.
+See comments about
+the [`deps` attribute typically defined by
+rules](https://bazel.build/reference/be/common-definitions#typical-attributes).
+These are typically `py_library` rules.
+
+Targets that only provide data files used at runtime belong in the `data`
+attribute.
+""",
+        ),
+        "precompile": attr.string(
+            doc = """
+Whether py source files should be precompiled.
+
+See also: `--precompile` flag, which can override this attribute in some cases.
+
+Values:
+
+* `inherit`: Determine the value from the --precompile flag.
+* `enabled`: Compile Python source files at build time. Note that
+  --precompile_add_to_runfiles affects how the compiled files are included into
+  a downstream binary.
+* `disabled`: Don't compile Python source files at build time.
+* `if_generated_source`: Compile Python source files, but only if they're a
+  generated file.
+""",
+            default = PrecompileAttr.INHERIT,
+            values = sorted(PrecompileAttr.__members__.values()),
+        ),
+        "precompile_invalidation_mode": attr.string(
+            doc = """
+How precompiled files should be verified to be up-to-date with their associated
+source files. Possible values are:
+* `auto`: The effective value will be automatically determined by other build
+  settings.
+* `checked_hash`: Use the pyc file if the hash of the source file matches the hash
+  recorded in the pyc file. This is most useful when working with code that
+  you may modify.
+* `unchecked_hash`: Always use the pyc file; don't check the pyc's hash against
+  the source file. This is most useful when the code won't be modified.
+
+For more information on pyc invalidation modes, see
+https://docs.python.org/3/library/py_compile.html#py_compile.PycInvalidationMode
+""",
+            default = PrecompileInvalidationModeAttr.AUTO,
+            values = sorted(PrecompileInvalidationModeAttr.__members__.values()),
+        ),
+        "precompile_optimize_level": attr.int(
+            doc = """
+The optimization level for precompiled files.
+
+For more information about optimization levels, see the `compile()` function's
+`optimize` arg docs at https://docs.python.org/3/library/functions.html#compile
+
+NOTE: The value `-1` means "current interpreter", which will be the interpreter
+used _at build time when pycs are generated_, not the interpreter used at
+runtime when the code actually runs.
+""",
+            default = 0,
+        ),
+        "precompile_source_retention": attr.string(
+            default = PrecompileSourceRetentionAttr.INHERIT,
+            values = sorted(PrecompileSourceRetentionAttr.__members__.values()),
+            doc = """
+Determines, when a source file is compiled, if the source file is kept
+in the resulting output or not. Valid values are:
+
+* `inherit`: Inherit the value from the `--precompile_source_retention` flag.
+* `keep_source`: Include the original Python source.
+* `omit_source`: Don't include the original py source.
+* `omit_if_generated_source`: Keep the original source if it's a regular source
+  file, but omit it if it's a generated file.
+""",
         ),
         # Required attribute, but details vary by rule.
         # Use create_srcs_attr to create one.
@@ -146,6 +349,21 @@
         # Required attribute, but the details vary by rule.
         # Use create_srcs_version_attr to create one.
         "srcs_version": None,
+        "_precompile_add_to_runfiles_flag": attr.label(
+            default = "//python/config_settings:precompile_add_to_runfiles",
+            providers = [BuildSettingInfo],
+        ),
+        "_precompile_flag": attr.label(
+            default = "//python/config_settings:precompile",
+            providers = [BuildSettingInfo],
+        ),
+        "_precompile_source_retention_flag": attr.label(
+            default = "//python/config_settings:precompile_source_retention",
+            providers = [BuildSettingInfo],
+        ),
+        # Force enabling auto exec groups, see
+        # https://bazel.build/extending/auto-exec-groups#how-enable-particular-rule
+        "_use_auto_exec_groups": attr.bool(default = True),
     },
     allow_none = True,
 )
@@ -226,7 +444,7 @@
     "testonly",
     "toolchains",
     "visibility",
-] + COMMON_ATTRS.keys()
+] + list(COMMON_ATTRS)  # Use list() instead .keys() so it's valid Python
 
 # Attribute names common to all test=True rules
 TEST_ATTR_NAMES = COMMON_ATTR_NAMES + [
@@ -236,10 +454,10 @@
     "flaky",
     "shard_count",
     "local",
-] + AGNOSTIC_TEST_ATTRS.keys()
+] + list(AGNOSTIC_TEST_ATTRS)  # Use list() instead .keys() so it's valid Python
 
 # Attribute names common to all executable=True rules
 BINARY_ATTR_NAMES = COMMON_ATTR_NAMES + [
     "args",
     "output_licenses",  # NOTE: Common to all rules, but slated for removal
-] + AGNOSTIC_BINARY_ATTRS.keys()
+] + list(AGNOSTIC_BINARY_ATTRS)  # Use list() instead .keys() so it's valid Python
diff --git a/python/private/common/common.bzl b/python/private/common/common.bzl
index 75c117f..5559ccd 100644
--- a/python/private/common/common.bzl
+++ b/python/private/common/common.bzl
@@ -29,8 +29,6 @@
 _py_builtins = py_internal
 PackageSpecificationInfo = getattr(py_internal, "PackageSpecificationInfo", None)
 
-TOOLCHAIN_TYPE = "@bazel_tools//tools/python:toolchain_type"
-
 # Extensions without the dot
 _PYTHON_SOURCE_EXTENSIONS = ["py"]
 
@@ -155,7 +153,9 @@
         cc_info_for_self_link,
         cc_info_with_extra_link_time_libraries,
         extra_runfiles,
-        cc_toolchain):
+        cc_toolchain,
+        feature_config,
+        **kwargs):
     """Creates a CcDetails struct.
 
     Args:
@@ -172,6 +172,12 @@
             part of `cc_info_with_extra_link_time_libraries`; should be added to
             runfiles.
         cc_toolchain: CcToolchain that should be used when building.
+        feature_config: struct from cc_configure_features(); see
+            //python/private/common:py_executable.bzl%cc_configure_features.
+        **kwargs: Additional keys/values to set in the returned struct. This is to
+            facilitate extensions with less patching. Any added fields should
+            pick names that are unlikely to collide if the CcDetails API has
+            additional fields added.
 
     Returns:
         A `CcDetails` struct.
@@ -182,9 +188,11 @@
         cc_info_with_extra_link_time_libraries = cc_info_with_extra_link_time_libraries,
         extra_runfiles = extra_runfiles,
         cc_toolchain = cc_toolchain,
+        feature_config = feature_config,
+        **kwargs
     )
 
-def create_executable_result_struct(*, extra_files_to_build, output_groups):
+def create_executable_result_struct(*, extra_files_to_build, output_groups, extra_runfiles = None):
     """Creates a `CreateExecutableResult` struct.
 
     This is the return value type of the semantics create_executable function.
@@ -194,6 +202,7 @@
             included as default outputs.
         output_groups: dict[str, depset[File]]; additional output groups that
             should be returned.
+        extra_runfiles: A runfiles object of additional runfiles to include.
 
     Returns:
         A `CreateExecutableResult` struct.
@@ -201,6 +210,7 @@
     return struct(
         extra_files_to_build = extra_files_to_build,
         output_groups = output_groups,
+        extra_runfiles = extra_runfiles,
     )
 
 def union_attrs(*attr_dicts, allow_none = False):
@@ -338,7 +348,7 @@
         collect_default = True,
     )
 
-def create_py_info(ctx, *, direct_sources, imports):
+def create_py_info(ctx, *, direct_sources, direct_pyc_files, imports):
     """Create PyInfo provider.
 
     Args:
@@ -346,6 +356,7 @@
         direct_sources: depset of Files; the direct, raw `.py` sources for the
             target. This should only be Python source files. It should not
             include pyc files.
+        direct_pyc_files: depset of Files; the direct `.pyc` sources for the target.
         imports: depset of strings; the import path values to propagate.
 
     Returns:
@@ -358,6 +369,7 @@
     has_py3_only_sources = ctx.attr.srcs_version in ("PY3", "PY3ONLY")
     transitive_sources_depsets = []  # list of depsets
     transitive_sources_files = []  # list of Files
+    transitive_pyc_depsets = [direct_pyc_files]  # list of depsets
     for target in ctx.attr.deps:
         # PyInfo may not be present e.g. cc_library rules.
         if PyInfo in target or BuiltinPyInfo in target:
@@ -366,6 +378,10 @@
             uses_shared_libraries = uses_shared_libraries or info.uses_shared_libraries
             has_py2_only_sources = has_py2_only_sources or info.has_py2_only_sources
             has_py3_only_sources = has_py3_only_sources or info.has_py3_only_sources
+
+            # BuiltinPyInfo doesn't have this field.
+            if hasattr(info, "transitive_pyc_files"):
+                transitive_pyc_depsets.append(info.transitive_pyc_files)
         else:
             # TODO(b/228692666): Remove this once non-PyInfo targets are no
             # longer supported in `deps`.
@@ -412,11 +428,17 @@
         has_py2_only_sources = has_py2_only_sources,
         has_py3_only_sources = has_py3_only_sources,
         uses_shared_libraries = uses_shared_libraries,
+        direct_pyc_files = direct_pyc_files,
+        transitive_pyc_files = depset(transitive = transitive_pyc_depsets),
     )
 
     # TODO(b/203567235): Set `uses_shared_libraries` field, though the Bazel
     # docs indicate it's unused in Bazel and may be removed.
     py_info = PyInfo(**py_info_kwargs)
+
+    # Remove args that BuiltinPyInfo doesn't support
+    py_info_kwargs.pop("direct_pyc_files")
+    py_info_kwargs.pop("transitive_pyc_files")
     builtin_py_info = BuiltinPyInfo(**py_info_kwargs)
 
     return py_info, deps_transitive_sources, builtin_py_info
diff --git a/python/private/common/common_bazel.bzl b/python/private/common/common_bazel.bzl
index 7277337..c86abd2 100644
--- a/python/private/common/common_bazel.bzl
+++ b/python/private/common/common_bazel.bzl
@@ -14,16 +14,14 @@
 """Common functions that are specific to Bazel rule implementation"""
 
 load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@rules_cc//cc:defs.bzl", "CcInfo", "cc_common")
+load("//python/private:py_interpreter_program.bzl", "PyInterpreterProgramInfo")
+load("//python/private:toolchain_types.bzl", "EXEC_TOOLS_TOOLCHAIN_TYPE", "TARGET_TOOLCHAIN_TYPE")
+load(":attributes.bzl", "PrecompileAttr", "PrecompileInvalidationModeAttr", "PrecompileSourceRetentionAttr")
 load(":common.bzl", "is_bool")
 load(":providers.bzl", "PyCcLinkParamsProvider")
 load(":py_internal.bzl", "py_internal")
 
-# TODO: Load cc_common from rules_cc
-_cc_common = cc_common
-
-# TODO: Load CcInfo from rules_cc
-_CcInfo = CcInfo
-
 _py_builtins = py_internal
 
 def collect_cc_info(ctx, extra_deps = []):
@@ -42,13 +40,13 @@
         deps.extend(extra_deps)
     cc_infos = []
     for dep in deps:
-        if _CcInfo in dep:
-            cc_infos.append(dep[_CcInfo])
+        if CcInfo in dep:
+            cc_infos.append(dep[CcInfo])
 
         if PyCcLinkParamsProvider in dep:
             cc_infos.append(dep[PyCcLinkParamsProvider].cc_info)
 
-    return _cc_common.merge_cc_infos(cc_infos = cc_infos)
+    return cc_common.merge_cc_infos(cc_infos = cc_infos)
 
 def maybe_precompile(ctx, srcs):
     """Computes all the outputs (maybe precompiled) from the input srcs.
@@ -60,12 +58,160 @@
         srcs: List of Files; the inputs to maybe precompile.
 
     Returns:
-        List of Files; the desired output files derived from the input sources.
+        Struct of precompiling results with fields:
+        * `keep_srcs`: list of File; the input sources that should be included
+          as default outputs and runfiles.
+        * `pyc_files`: list of File; the precompiled files.
+        * `py_to_pyc_map`: dict of src File input to pyc File output. If a source
+          file wasn't precompiled, it won't be in the dict.
     """
-    _ = ctx  # @unused
 
-    # Precompilation isn't implemented yet, so just return srcs as-is
-    return srcs
+    # The exec tools toolchain and precompiler are optional. Rather than
+    # fail, just skip precompiling, as its mostly just an optimization.
+    exec_tools_toolchain = ctx.toolchains[EXEC_TOOLS_TOOLCHAIN_TYPE]
+    if exec_tools_toolchain == None or exec_tools_toolchain.exec_tools.precompiler == None:
+        precompile = PrecompileAttr.DISABLED
+    else:
+        precompile = PrecompileAttr.get_effective_value(ctx)
+
+    source_retention = PrecompileSourceRetentionAttr.get_effective_value(ctx)
+
+    result = struct(
+        keep_srcs = [],
+        pyc_files = [],
+        py_to_pyc_map = {},
+    )
+    for src in srcs:
+        # The logic below is a bit convoluted. The gist is:
+        # * If precompiling isn't done, add the py source to default outputs.
+        #   Otherwise, the source retention flag decides.
+        # * In order to determine `use_pycache`, we have to know if the source
+        #   is being added to the default outputs.
+        is_generated_source = not src.is_source
+        should_precompile = (
+            precompile == PrecompileAttr.ENABLED or
+            (precompile == PrecompileAttr.IF_GENERATED_SOURCE and is_generated_source)
+        )
+        keep_source = (
+            not should_precompile or
+            source_retention == PrecompileSourceRetentionAttr.KEEP_SOURCE or
+            (source_retention == PrecompileSourceRetentionAttr.OMIT_IF_GENERATED_SOURCE and not is_generated_source)
+        )
+        if should_precompile:
+            pyc = _precompile(ctx, src, use_pycache = keep_source)
+            result.pyc_files.append(pyc)
+            result.py_to_pyc_map[src] = pyc
+        if keep_source:
+            result.keep_srcs.append(src)
+
+    return result
+
+def _precompile(ctx, src, *, use_pycache):
+    """Compile a py file to pyc.
+
+    Args:
+        ctx: rule context.
+        src: File object to compile
+        use_pycache: bool. True if the output should be within the `__pycache__`
+            sub-directory. False if it should be alongside the original source
+            file.
+
+    Returns:
+        File of the generated pyc file.
+    """
+    exec_tools_info = ctx.toolchains[EXEC_TOOLS_TOOLCHAIN_TYPE].exec_tools
+    target_toolchain = ctx.toolchains[TARGET_TOOLCHAIN_TYPE].py3_runtime
+
+    # These args control starting the precompiler, e.g., when run as a worker,
+    # these args are only passed once.
+    precompiler_startup_args = ctx.actions.args()
+
+    env = {}
+    tools = []
+
+    precompiler = exec_tools_info.precompiler
+    if PyInterpreterProgramInfo in precompiler:
+        precompiler_executable = exec_tools_info.exec_interpreter[DefaultInfo].files_to_run
+        program_info = precompiler[PyInterpreterProgramInfo]
+        env.update(program_info.env)
+        precompiler_startup_args.add_all(program_info.interpreter_args)
+        default_info = precompiler[DefaultInfo]
+        precompiler_startup_args.add(default_info.files_to_run.executable)
+        tools.append(default_info.files_to_run)
+    elif precompiler[DefaultInfo].files_to_run:
+        precompiler_executable = precompiler[DefaultInfo].files_to_run
+    else:
+        fail(("Unrecognized precompiler: target '{}' does not provide " +
+              "PyInterpreterProgramInfo nor appears to be executable").format(
+            precompiler,
+        ))
+
+    stem = src.basename[:-(len(src.extension) + 1)]
+    if use_pycache:
+        if not target_toolchain.pyc_tag:
+            fail("Unable to create __pycache__ pyc: pyc_tag is empty")
+        pyc_path = "__pycache__/{stem}.{tag}.pyc".format(
+            stem = stem,
+            tag = target_toolchain.pyc_tag,
+        )
+    else:
+        pyc_path = "{}.pyc".format(stem)
+
+    pyc = ctx.actions.declare_file(pyc_path, sibling = src)
+
+    invalidation_mode = ctx.attr.precompile_invalidation_mode
+    if invalidation_mode == PrecompileInvalidationModeAttr.AUTO:
+        if ctx.var["COMPILATION_MODE"] == "opt":
+            invalidation_mode = PrecompileInvalidationModeAttr.UNCHECKED_HASH
+        else:
+            invalidation_mode = PrecompileInvalidationModeAttr.CHECKED_HASH
+
+    # Though --modify_execution_info exists, it can only set keys with
+    # empty values, which doesn't work for persistent worker settings.
+    execution_requirements = {}
+    if testing.ExecutionInfo in precompiler:
+        execution_requirements.update(precompiler[testing.ExecutionInfo].requirements)
+
+    # These args are passed for every precompilation request, e.g. as part of
+    # a request to a worker process.
+    precompile_request_args = ctx.actions.args()
+
+    # Always use param files so that it can be run as a persistent worker
+    precompile_request_args.use_param_file("@%s", use_always = True)
+    precompile_request_args.set_param_file_format("multiline")
+
+    precompile_request_args.add("--invalidation_mode", invalidation_mode)
+    precompile_request_args.add("--src", src)
+
+    # NOTE: src.short_path is used because src.path contains the platform and
+    # build-specific hash portions of the path, which we don't want in the
+    # pyc data. Note, however, for remote-remote files, short_path will
+    # have the repo name, which is likely to contain extraneous info.
+    precompile_request_args.add("--src_name", src.short_path)
+    precompile_request_args.add("--pyc", pyc)
+    precompile_request_args.add("--optimize", ctx.attr.precompile_optimize_level)
+
+    version_info = target_toolchain.interpreter_version_info
+    python_version = "{}.{}".format(version_info.major, version_info.minor)
+    precompile_request_args.add("--python_version", python_version)
+
+    ctx.actions.run(
+        executable = precompiler_executable,
+        arguments = [precompiler_startup_args, precompile_request_args],
+        inputs = [src],
+        outputs = [pyc],
+        mnemonic = "PyCompile",
+        progress_message = "Python precompiling %{input} into %{output}",
+        tools = tools,
+        env = env | {
+            "PYTHONHASHSEED": "0",  # Helps avoid non-deterministic behavior
+            "PYTHONNOUSERSITE": "1",  # Helps avoid non-deterministic behavior
+            "PYTHONSAFEPATH": "1",  # Helps avoid incorrect import issues
+        },
+        execution_requirements = execution_requirements,
+        toolchain = EXEC_TOOLS_TOOLCHAIN_TYPE,
+    )
+    return pyc
 
 def get_imports(ctx):
     """Gets the imports from a rule's `imports` attribute.
diff --git a/python/private/common/providers.bzl b/python/private/common/providers.bzl
index 38a7054..eb8b910 100644
--- a/python/private/common/providers.bzl
+++ b/python/private/common/providers.bzl
@@ -13,14 +13,12 @@
 # limitations under the License.
 """Providers for Python rules."""
 
+load("@rules_cc//cc:defs.bzl", "CcInfo")
 load("//python/private:util.bzl", "IS_BAZEL_6_OR_HIGHER")
 
-# TODO: load CcInfo from rules_cc
-_CcInfo = CcInfo
-
 DEFAULT_STUB_SHEBANG = "#!/usr/bin/env python3"
 
-DEFAULT_BOOTSTRAP_TEMPLATE = Label("//python/private:python_bootstrap_template.txt")
+DEFAULT_BOOTSTRAP_TEMPLATE = Label("//python/private:bootstrap_template")
 
 _PYTHON_VERSION_VALUES = ["PY2", "PY3"]
 
@@ -36,16 +34,53 @@
         return provider("Stub, not used", fields = []), None
     return provider(doc = doc, fields = fields, **kwargs)
 
+def _optional_int(value):
+    return int(value) if value != None else None
+
+def interpreter_version_info_struct_from_dict(info_dict):
+    """Create a struct of interpreter version info from a dict from an attribute.
+
+    Args:
+        info_dict: (dict | None) of version info fields. See interpreter_version_info
+            provider field docs.
+
+    Returns:
+        struct of version info; see interpreter_version_info provider field docs.
+    """
+    info_dict = dict(info_dict or {})  # Copy in case the original is frozen
+    if info_dict:
+        if not ("major" in info_dict and "minor" in info_dict):
+            fail("interpreter_version_info must have at least two keys, 'major' and 'minor'")
+    version_info_struct = struct(
+        major = _optional_int(info_dict.pop("major", None)),
+        minor = _optional_int(info_dict.pop("minor", None)),
+        micro = _optional_int(info_dict.pop("micro", None)),
+        releaselevel = str(info_dict.pop("releaselevel")) if "releaselevel" in info_dict else None,
+        serial = _optional_int(info_dict.pop("serial", None)),
+    )
+
+    if len(info_dict.keys()) > 0:
+        fail("unexpected keys {} in interpreter_version_info".format(
+            str(info_dict.keys()),
+        ))
+
+    return version_info_struct
+
 def _PyRuntimeInfo_init(
         *,
+        implementation_name = None,
         interpreter_path = None,
         interpreter = None,
         files = None,
         coverage_tool = None,
         coverage_files = None,
+        pyc_tag = None,
         python_version,
         stub_shebang = None,
-        bootstrap_template = None):
+        bootstrap_template = None,
+        interpreter_version_info = None,
+        stage2_bootstrap_template = None,
+        zip_main_template = None):
     if (interpreter_path and interpreter) or (not interpreter_path and not interpreter):
         fail("exactly one of interpreter or interpreter_path must be specified")
 
@@ -87,10 +122,15 @@
         "coverage_files": coverage_files,
         "coverage_tool": coverage_tool,
         "files": files,
+        "implementation_name": implementation_name,
         "interpreter": interpreter,
         "interpreter_path": interpreter_path,
+        "interpreter_version_info": interpreter_version_info_struct_from_dict(interpreter_version_info),
+        "pyc_tag": pyc_tag,
         "python_version": python_version,
+        "stage2_bootstrap_template": stage2_bootstrap_template,
         "stub_shebang": stub_shebang,
+        "zip_main_template": zip_main_template,
     }
 
 # TODO(#15897): Rename this to PyRuntimeInfo when we're ready to replace the Java
@@ -108,44 +148,169 @@
 """,
     init = _PyRuntimeInfo_init,
     fields = {
-        "bootstrap_template": (
-            "See py_runtime_rule.bzl%py_runtime.bootstrap_template for docs."
-        ),
-        "coverage_files": (
-            "The files required at runtime for using `coverage_tool`. " +
-            "Will be `None` if no `coverage_tool` was provided."
-        ),
-        "coverage_tool": (
-            "If set, this field is a `File` representing tool used for collecting code coverage information from python tests. Otherwise, this is `None`."
-        ),
-        "files": (
-            "If this is an in-build runtime, this field is a `depset` of `File`s" +
-            "that need to be added to the runfiles of an executable target that " +
-            "uses this runtime (in particular, files needed by `interpreter`). " +
-            "The value of `interpreter` need not be included in this field. If " +
-            "this is a platform runtime then this field is `None`."
-        ),
-        "interpreter": (
-            "If this is an in-build runtime, this field is a `File` representing " +
-            "the interpreter. Otherwise, this is `None`. Note that an in-build " +
-            "runtime can use either a prebuilt, checked-in interpreter or an " +
-            "interpreter built from source."
-        ),
-        "interpreter_path": (
-            "If this is a platform runtime, this field is the absolute " +
-            "filesystem path to the interpreter on the target platform. " +
-            "Otherwise, this is `None`."
-        ),
-        "python_version": (
-            "Indicates whether this runtime uses Python major version 2 or 3. " +
-            "Valid values are (only) `\"PY2\"` and " +
-            "`\"PY3\"`."
-        ),
-        "stub_shebang": (
-            "\"Shebang\" expression prepended to the bootstrapping Python stub " +
-            "script used when executing `py_binary` targets.  Does not " +
-            "apply to Windows."
-        ),
+        "bootstrap_template": """
+:type: File
+
+A template of code responsible for the initial startup of a program.
+
+This code is responsible for:
+
+* Locating the target interpreter. Typically it is in runfiles, but not always.
+* Setting necessary environment variables, command line flags, or other
+  configuration that can't be modified after the interpreter starts.
+* Invoking the appropriate entry point. This is usually a second-stage bootstrap
+  that performs additional setup prior to running a program's actual entry point.
+
+The {obj}`--bootstrap_impl` flag affects how this stage 1 bootstrap
+is expected to behave and the substutitions performed.
+
+* `--bootstrap_impl=system_python` substitutions: `%is_zipfile%`, `%python_binary%`,
+  `%target%`, `%workspace_name`, `%coverage_tool%`, `%import_all%`, `%imports%`,
+  `%main%`, `%shebang%`
+* `--bootstrap_impl=script` substititions: `%is_zipfile%`, `%python_binary%`,
+  `%target%`, `%workspace_name`, `%shebang%, `%stage2_bootstrap%`
+
+Substitution definitions:
+
+* `%shebang%`: The shebang to use with the bootstrap; the bootstrap template
+  may choose to ignore this.
+* `%stage2_bootstrap%`: A runfiles-relative path to the stage 2 bootstrap.
+* `%python_binary%`: The path to the target Python interpreter. There are three
+  types of paths:
+  * An absolute path to a system interpreter (e.g. begins with `/`).
+  * A runfiles-relative path to an interpreter (e.g. `somerepo/bin/python3`)
+  * A program to search for on PATH, i.e. a word without spaces, e.g. `python3`.
+* `%workspace_name%`: The name of the workspace the target belongs to.
+* `%is_zipfile%`: The string `1` if this template is prepended to a zipfile to
+  create a self-executable zip file. The string `0` otherwise.
+
+For the other substitution definitions, see the {obj}`stage2_bootstrap_template`
+docs.
+
+:::{versionchanged} 0.33.0
+The set of substitutions depends on {obj}`--bootstrap_impl`
+:::
+""",
+        "coverage_files": """
+:type: depset[File] | None
+
+The files required at runtime for using `coverage_tool`. Will be `None` if no
+`coverage_tool` was provided.
+""",
+        "coverage_tool": """
+:type: File | None
+
+If set, this field is a `File` representing tool used for collecting code
+coverage information from python tests. Otherwise, this is `None`.
+""",
+        "files": """
+:type: depset[File] | None
+
+If this is an in-build runtime, this field is a `depset` of `File`s that need to
+be added to the runfiles of an executable target that uses this runtime (in
+particular, files needed by `interpreter`). The value of `interpreter` need not
+be included in this field. If this is a platform runtime then this field is
+`None`.
+""",
+        "implementation_name": """
+:type: str | None
+
+The Python implementation name (`sys.implementation.name`)
+""",
+        "interpreter": """
+:type: File | None
+
+If this is an in-build runtime, this field is a `File` representing the
+interpreter. Otherwise, this is `None`. Note that an in-build runtime can use
+either a prebuilt, checked-in interpreter or an interpreter built from source.
+""",
+        "interpreter_path": """
+:type: str | None
+
+If this is a platform runtime, this field is the absolute filesystem path to the
+interpreter on the target platform. Otherwise, this is `None`.
+""",
+        "interpreter_version_info": """
+:type: struct
+
+Version information about the interpreter this runtime provides.
+It should match the format given by `sys.version_info`, however
+for simplicity, the micro, releaselevel, and serial values are
+optional.
+A struct with the following fields:
+* `major`: {type}`int`, the major version number
+* `minor`: {type}`int`, the minor version number
+* `micro`: {type}`int | None`, the micro version number
+* `releaselevel`: {type}`str | None`, the release level
+* `serial`: {type}`int | None`, the serial number of the release
+""",
+        "pyc_tag": """
+:type: str | None
+
+The tag portion of a pyc filename, e.g. the `cpython-39` infix
+of `foo.cpython-39.pyc`. See PEP 3147. If not specified, it will be computed
+from {obj}`implementation_name` and {obj}`interpreter_version_info`. If no
+pyc_tag is available, then only source-less pyc generation will function
+correctly.
+""",
+        "python_version": """
+:type: str
+
+Indicates whether this runtime uses Python major version 2 or 3. Valid values
+are (only) `"PY2"` and `"PY3"`.
+""",
+        "stage2_bootstrap_template": """
+:type: File
+
+A template of Python code that runs under the desired interpreter and is
+responsible for orchestrating calling the program's actual main code. This
+bootstrap is responsible for affecting the current runtime's state, such as
+import paths or enabling coverage, so that, when it runs the program's actual
+main code, it works properly under Bazel.
+
+The following substitutions are made during template expansion:
+* `%main%`: A runfiles-relative path to the program's actual main file. This
+  can be a `.py` or `.pyc` file, depending on precompile settings.
+* `%coverage_tool%`: Runfiles-relative path to the coverage library's entry point.
+  If coverage is not enabled or available, an empty string.
+* `%import_all%`: The string `True` if all repositories in the runfiles should
+  be added to sys.path. The string `False` otherwise.
+* `%imports%`: A colon-delimited string of runfiles-relative paths to add to
+  sys.path.
+* `%target%`: The name of the target this is for.
+* `%workspace_name%`: The name of the workspace the target belongs to.
+
+:::{versionadded} 0.33.0
+:::
+""",
+        "stub_shebang": """
+:type: str
+
+"Shebang" expression prepended to the bootstrapping Python stub
+script used when executing {obj}`py_binary` targets.  Does not
+apply to Windows.
+""",
+        "zip_main_template": """
+:type: File
+
+A template of Python code that becomes a zip file's top-level `__main__.py`
+file. The top-level `__main__.py` file is used when the zip file is explicitly
+passed to a Python interpreter. See PEP 441 for more information about zipapp
+support. Note that py_binary-generated zip files are self-executing and
+skip calling `__main__.py`.
+
+The following substitutions are made during template expansion:
+* `%stage2_bootstrap%`: A runfiles-relative string to the stage 2 bootstrap file.
+* `%python_binary%`: The path to the target Python interpreter. There are three
+  types of paths:
+  * An absolute path to a system interpreter (e.g. begins with `/`).
+  * A runfiles-relative path to an interpreter (e.g. `somerepo/bin/python3`)
+  * A program to search for on PATH, i.e. a word without spaces, e.g. `python3`.
+* `%workspace_name%`: The name of the workspace for the built target.
+
+:::{versionadded} 0.33.0
+:::
+""",
     },
 )
 
@@ -164,7 +329,9 @@
         uses_shared_libraries = False,
         imports = depset(),
         has_py2_only_sources = False,
-        has_py3_only_sources = False):
+        has_py3_only_sources = False,
+        direct_pyc_files = depset(),
+        transitive_pyc_files = depset()):
     _check_arg_type("transitive_sources", "depset", transitive_sources)
 
     # Verify it's postorder compatible, but retain is original ordering.
@@ -174,10 +341,14 @@
     _check_arg_type("imports", "depset", imports)
     _check_arg_type("has_py2_only_sources", "bool", has_py2_only_sources)
     _check_arg_type("has_py3_only_sources", "bool", has_py3_only_sources)
+    _check_arg_type("direct_pyc_files", "depset", direct_pyc_files)
+    _check_arg_type("transitive_pyc_files", "depset", transitive_pyc_files)
     return {
+        "direct_pyc_files": direct_pyc_files,
         "has_py2_only_sources": has_py2_only_sources,
         "has_py3_only_sources": has_py2_only_sources,
         "imports": imports,
+        "transitive_pyc_files": transitive_pyc_files,
         "transitive_sources": transitive_sources,
         "uses_shared_libraries": uses_shared_libraries,
     }
@@ -186,19 +357,44 @@
     doc = "Encapsulates information provided by the Python rules.",
     init = _PyInfo_init,
     fields = {
-        "has_py2_only_sources": "Whether any of this target's transitive sources requires a Python 2 runtime.",
-        "has_py3_only_sources": "Whether any of this target's transitive sources requires a Python 3 runtime.",
+        "direct_pyc_files": """
+:type: depset[File]
+
+Precompiled Python files that are considered directly provided
+by the target.
+""",
+        "has_py2_only_sources": """
+:type: bool
+
+Whether any of this target's transitive sources requires a Python 2 runtime.
+""",
+        "has_py3_only_sources": """
+:type: bool
+
+Whether any of this target's transitive sources requires a Python 3 runtime.
+""",
         "imports": """\
+:type: depset[str]
+
 A depset of import path strings to be added to the `PYTHONPATH` of executable
 Python targets. These are accumulated from the transitive `deps`.
 The order of the depset is not guaranteed and may be changed in the future. It
 is recommended to use `default` order (the default).
 """,
+        "transitive_pyc_files": """
+:type: depset[File]
+
+Direct and transitive precompiled Python files that are provided by the target.
+""",
         "transitive_sources": """\
+:type: depset[File]
+
 A (`postorder`-compatible) depset of `.py` files appearing in the target's
 `srcs` and the `srcs` of the target's transitive `deps`.
 """,
         "uses_shared_libraries": """
+:type: bool
+
 Whether any of this target's transitive `deps` has a shared library file (such
 as a `.so` file).
 
@@ -209,16 +405,20 @@
 
 def _PyCcLinkParamsProvider_init(cc_info):
     return {
-        "cc_info": _CcInfo(linking_context = cc_info.linking_context),
+        "cc_info": CcInfo(linking_context = cc_info.linking_context),
     }
 
 # buildifier: disable=name-conventions
 PyCcLinkParamsProvider, _unused_raw_py_cc_link_params_provider_ctor = _define_provider(
-    doc = ("Python-wrapper to forward CcInfo.linking_context. This is to " +
+    doc = ("Python-wrapper to forward {obj}`CcInfo.linking_context`. This is to " +
            "allow Python targets to propagate C++ linking information, but " +
            "without the Python target appearing to be a valid C++ rule dependency"),
     init = _PyCcLinkParamsProvider_init,
     fields = {
-        "cc_info": "A CcInfo instance; it has only linking_context set",
+        "cc_info": """
+:type: CcInfo
+
+Linking information; it has only {obj}`CcInfo.linking_context` set.
+""",
     },
 )
diff --git a/python/private/common/py_executable.bzl b/python/private/common/py_executable.bzl
index a24bad6..2b4a939 100644
--- a/python/private/common/py_executable.bzl
+++ b/python/private/common/py_executable.bzl
@@ -14,11 +14,21 @@
 """Common functionality between test/binary executables."""
 
 load("@bazel_skylib//lib:dicts.bzl", "dicts")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("@rules_cc//cc:defs.bzl", "cc_common")
+load("//python/private:flags.bzl", "PrecompileAddToRunfilesFlag")
+load("//python/private:reexports.bzl", "BuiltinPyRuntimeInfo")
+load(
+    "//python/private:toolchain_types.bzl",
+    "EXEC_TOOLS_TOOLCHAIN_TYPE",
+    TOOLCHAIN_TYPE = "TARGET_TOOLCHAIN_TYPE",
+)
 load(
     ":attributes.bzl",
     "AGNOSTIC_EXECUTABLE_ATTRS",
     "COMMON_ATTRS",
     "PY_SRCS_ATTRS",
+    "PycCollectionAttr",
     "SRCS_VERSION_ALL_VALUES",
     "create_srcs_attr",
     "create_srcs_version_attr",
@@ -26,7 +36,6 @@
 load(":cc_helper.bzl", "cc_helper")
 load(
     ":common.bzl",
-    "TOOLCHAIN_TYPE",
     "check_native_allowed",
     "collect_imports",
     "collect_runfiles",
@@ -41,6 +50,7 @@
 load(
     ":providers.bzl",
     "PyCcLinkParamsProvider",
+    "PyInfo",
     "PyRuntimeInfo",
 )
 load(":py_internal.bzl", "py_internal")
@@ -52,9 +62,6 @@
     "PY_RUNTIME_ATTR_NAME",
 )
 
-# TODO: Load cc_common from rules_cc
-_cc_common = cc_common
-
 _py_builtins = py_internal
 
 # Bazel 5.4 doesn't have config_common.toolchain_type
@@ -83,6 +90,23 @@
 filename in `srcs`, `main` must be specified.
 """,
         ),
+        "pyc_collection": attr.string(
+            default = PycCollectionAttr.INHERIT,
+            values = sorted(PycCollectionAttr.__members__.values()),
+            doc = """
+Determines whether pyc files from dependencies should be manually included.
+
+NOTE: This setting is only useful with `--precompile_add_to_runfiles=decided_elsewhere`.
+
+Valid values are:
+* `include_pyc`: Add pyc files from dependencies in the binary (from
+  `PyInfo.transitive_pyc_files`.
+* `disabled`: Don't explicitly add pyc files from dependencies. Note that
+  pyc files may still come from dependencies if a target includes them as
+  part of their runfiles (such as when `--precompile_add_to_runfiles=always`
+  is used).
+""",
+        ),
         # TODO(b/203567235): In Google, this attribute is deprecated, and can
         # only effectively be PY3. Externally, with Bazel, this attribute has
         # a separate story.
@@ -92,6 +116,15 @@
             default = "PY3",
             # NOTE: Some tests care about the order of these values.
             values = ["PY2", "PY3"],
+            doc = "Defunct, unused, does nothing.",
+        ),
+        "_bootstrap_impl_flag": attr.label(
+            default = "//python/config_settings:bootstrap_impl",
+            providers = [BuildSettingInfo],
+        ),
+        "_pyc_collection_flag": attr.label(
+            default = "//python/config_settings:pyc_collection",
+            providers = [BuildSettingInfo],
         ),
         "_windows_constraints": attr.label_list(
             default = [
@@ -125,9 +158,20 @@
 
     main_py = determine_main(ctx)
     direct_sources = filter_to_py_srcs(ctx.files.srcs)
-    output_sources = semantics.maybe_precompile(ctx, direct_sources)
+    precompile_result = semantics.maybe_precompile(ctx, direct_sources)
+
+    # Sourceless precompiled builds omit the main py file from outputs, so
+    # main has to be pointed to the precompiled main instead.
+    if main_py not in precompile_result.keep_srcs:
+        main_py = precompile_result.py_to_pyc_map[main_py]
+    direct_pyc_files = depset(precompile_result.pyc_files)
+
+    executable = _declare_executable_file(ctx)
+    default_outputs = [executable]
+    default_outputs.extend(precompile_result.keep_srcs)
+    default_outputs.extend(precompile_result.pyc_files)
+
     imports = collect_imports(ctx, semantics)
-    executable, files_to_build = _compute_outputs(ctx, output_sources)
 
     runtime_details = _get_runtime_details(ctx, semantics)
     if ctx.configuration.coverage_enabled:
@@ -151,7 +195,8 @@
         ctx,
         executable = executable,
         extra_deps = extra_deps,
-        files_to_build = files_to_build,
+        main_py_files = depset([main_py] + precompile_result.keep_srcs),
+        direct_pyc_files = direct_pyc_files,
         extra_common_runfiles = [
             runtime_details.runfiles,
             cc_details.extra_runfiles,
@@ -171,49 +216,53 @@
         native_deps_details = native_deps_details,
         runfiles_details = runfiles_details,
     )
-    files_to_build = depset(transitive = [
-        exec_result.extra_files_to_build,
-        files_to_build,
-    ])
-    extra_exec_runfiles = ctx.runfiles(transitive_files = files_to_build)
+
+    extra_exec_runfiles = exec_result.extra_runfiles.merge(
+        ctx.runfiles(transitive_files = exec_result.extra_files_to_build),
+    )
     runfiles_details = struct(
         default_runfiles = runfiles_details.default_runfiles.merge(extra_exec_runfiles),
         data_runfiles = runfiles_details.data_runfiles.merge(extra_exec_runfiles),
     )
 
-    legacy_providers, modern_providers = _create_providers(
+    return _create_providers(
         ctx = ctx,
         executable = executable,
         runfiles_details = runfiles_details,
         main_py = main_py,
         imports = imports,
         direct_sources = direct_sources,
-        files_to_build = files_to_build,
+        direct_pyc_files = direct_pyc_files,
+        default_outputs = depset(default_outputs, transitive = [exec_result.extra_files_to_build]),
         runtime_details = runtime_details,
         cc_info = cc_details.cc_info_for_propagating,
         inherited_environment = inherited_environment,
         semantics = semantics,
         output_groups = exec_result.output_groups,
     )
-    return struct(
-        legacy_providers = legacy_providers,
-        providers = modern_providers,
-    )
+
+def _get_build_info(ctx, cc_toolchain):
+    build_info_files = py_internal.cc_toolchain_build_info_files(cc_toolchain)
+    if cc_helper.is_stamping_enabled(ctx):
+        # Makes the target depend on BUILD_INFO_KEY, which helps to discover stamped targets
+        # See b/326620485 for more details.
+        ctx.version_file  # buildifier: disable=no-effect
+        return build_info_files.non_redacted_build_info_files.to_list()
+    else:
+        return build_info_files.redacted_build_info_files.to_list()
 
 def _validate_executable(ctx):
     if ctx.attr.python_version != "PY3":
         fail("It is not allowed to use Python 2")
     check_native_allowed(ctx)
 
-def _compute_outputs(ctx, output_sources):
+def _declare_executable_file(ctx):
     if target_platform_has_any_constraint(ctx, ctx.attr._windows_constraints):
         executable = ctx.actions.declare_file(ctx.label.name + ".exe")
     else:
         executable = ctx.actions.declare_file(ctx.label.name)
 
-    # TODO(b/208657718): Remove output_sources from the default outputs
-    # once the depot is cleaned up.
-    return executable, depset([executable] + output_sources)
+    return executable
 
 def _get_runtime_details(ctx, semantics):
     """Gets various information about the Python runtime to use.
@@ -344,7 +393,8 @@
         *,
         executable,
         extra_deps,
-        files_to_build,
+        main_py_files,
+        direct_pyc_files,
         extra_common_runfiles,
         semantics):
     """Returns the set of runfiles necessary prior to executable creation.
@@ -357,7 +407,8 @@
         executable: The main executable output.
         extra_deps: List of Targets; additional targets whose runfiles
             will be added to the common runfiles.
-        files_to_build: depset of File of the default outputs to add into runfiles.
+        main_py_files: depset of File of the default outputs to add into runfiles.
+        direct_pyc_files: depset of File of pyc files directly from this target.
         extra_common_runfiles: List of runfiles; additional runfiles that
             will be added to the common runfiles.
         semantics: A `BinarySemantics` struct; see `create_binary_semantics_struct`.
@@ -367,9 +418,20 @@
         * default_runfiles: The default runfiles
         * data_runfiles: The data runfiles
     """
+    common_runfiles_depsets = [main_py_files]
+
+    if ctx.attr._precompile_add_to_runfiles_flag[BuildSettingInfo].value == PrecompileAddToRunfilesFlag.ALWAYS:
+        common_runfiles_depsets.append(direct_pyc_files)
+    elif PycCollectionAttr.is_pyc_collection_enabled(ctx):
+        common_runfiles_depsets.append(direct_pyc_files)
+        for dep in (ctx.attr.deps + extra_deps):
+            if PyInfo not in dep:
+                continue
+            common_runfiles_depsets.append(dep[PyInfo].transitive_pyc_files)
+
     common_runfiles = collect_runfiles(ctx, depset(
         direct = [executable],
-        transitive = [files_to_build],
+        transitive = common_runfiles_depsets,
     ))
     if extra_deps:
         common_runfiles = common_runfiles.merge_all([
@@ -493,15 +555,7 @@
 
     dso = ctx.actions.declare_file(semantics.get_native_deps_dso_name(ctx))
     share_native_deps = py_internal.share_native_deps(ctx)
-    cc_feature_config = cc_configure_features(
-        ctx,
-        cc_toolchain = cc_details.cc_toolchain,
-        # See b/171276569#comment18: this feature string is just to allow
-        # Google's RBE to know the link action is for the Python case so it can
-        # take special actions (though as of Jun 2022, no special action is
-        # taken).
-        extra_features = ["native_deps_link"],
-    )
+    cc_feature_config = cc_details.feature_config
     if share_native_deps:
         linked_lib = _create_shared_native_deps_dso(
             ctx,
@@ -509,6 +563,7 @@
             is_test = is_test,
             requested_features = cc_feature_config.requested_features,
             feature_configuration = cc_feature_config.feature_configuration,
+            cc_toolchain = cc_details.cc_toolchain,
         )
         ctx.actions.symlink(
             output = dso,
@@ -517,19 +572,22 @@
         )
     else:
         linked_lib = dso
-    _cc_common.link(
+
+    # The regular cc_common.link API can't be used because several
+    # args are private-use only; see # private comments
+    py_internal.link(
         name = ctx.label.name,
         actions = ctx.actions,
         linking_contexts = [cc_info.linking_context],
         output_type = "dynamic_library",
-        never_link = True,
-        native_deps = True,
+        never_link = True,  # private
+        native_deps = True,  # private
         feature_configuration = cc_feature_config.feature_configuration,
         cc_toolchain = cc_details.cc_toolchain,
-        test_only_target = is_test,
+        test_only_target = is_test,  # private
         stamp = 1 if is_stamping_enabled(ctx, semantics) else 0,
-        main_output = linked_lib,
-        use_shareable_artifact_factory = True,
+        main_output = linked_lib,  # private
+        use_shareable_artifact_factory = True,  # private
         # NOTE: Only flags not captured by cc_info.linking_context need to
         # be manually passed
         user_link_flags = semantics.get_native_deps_user_link_flags(ctx),
@@ -545,14 +603,15 @@
         cc_info,
         is_test,
         feature_configuration,
-        requested_features):
-    linkstamps = cc_info.linking_context.linkstamps()
+        requested_features,
+        cc_toolchain):
+    linkstamps = py_internal.linking_context_linkstamps(cc_info.linking_context)
 
     partially_disabled_thin_lto = (
-        _cc_common.is_enabled(
+        cc_common.is_enabled(
             feature_name = "thin_lto_linkstatic_tests_use_shared_nonlto_backends",
             feature_configuration = feature_configuration,
-        ) and not _cc_common.is_enabled(
+        ) and not cc_common.is_enabled(
             feature_name = "thin_lto_all_linkstatic_use_shared_nonlto_backends",
             feature_configuration = feature_configuration,
         )
@@ -570,8 +629,11 @@
             for input in cc_info.linking_context.linker_inputs.to_list()
             for flag in input.user_link_flags
         ],
-        linkstamps = [linkstamp.file() for linkstamp in linkstamps.to_list()],
-        build_info_artifacts = _cc_common.get_build_info(ctx) if linkstamps else [],
+        linkstamps = [
+            py_internal.linkstamp_file(linkstamp)
+            for linkstamp in linkstamps.to_list()
+        ],
+        build_info_artifacts = _get_build_info(ctx, cc_toolchain) if linkstamps else [],
         features = requested_features,
         is_test_target_partially_disabled_thin_lto = is_test and partially_disabled_thin_lto,
     )
@@ -701,7 +763,8 @@
         executable,
         main_py,
         direct_sources,
-        files_to_build,
+        direct_pyc_files,
+        default_outputs,
         runfiles_details,
         imports,
         cc_info,
@@ -718,7 +781,8 @@
         direct_sources: list of Files; the direct, raw `.py` sources for the target.
             This should only be Python source files. It should not include pyc
             files.
-        files_to_build: depset of Files; the files for DefaultInfo.files
+        direct_pyc_files: depset of File; the direct pyc files for the target.
+        default_outputs: depset of Files; the files for DefaultInfo.files
         runfiles_details: runfiles that will become the default  and data runfiles.
         imports: depset of strings; the import paths to propagate
         cc_info: optional CcInfo; Linking information to propagate as
@@ -732,14 +796,12 @@
         semantics: BinarySemantics struct; see create_binary_semantics()
 
     Returns:
-        A two-tuple of:
-        1. A dict of legacy providers.
-        2. A list of modern providers.
+        A list of modern providers.
     """
     providers = [
         DefaultInfo(
             executable = executable,
-            files = files_to_build,
+            files = default_outputs,
             default_runfiles = _py_builtins.make_runfiles_respect_legacy_external_runfiles(
                 ctx,
                 runfiles_details.default_runfiles,
@@ -756,7 +818,28 @@
     # TODO(b/265840007): Make this non-conditional once Google enables
     # --incompatible_use_python_toolchains.
     if runtime_details.toolchain_runtime:
-        providers.append(runtime_details.toolchain_runtime)
+        py_runtime_info = runtime_details.toolchain_runtime
+        providers.append(py_runtime_info)
+
+        # Re-add the builtin PyRuntimeInfo for compatibility to make
+        # transitioning easier, but only if it isn't already added because
+        # returning the same provider type multiple times is an error.
+        # NOTE: The PyRuntimeInfo from the toolchain could be a rules_python
+        # PyRuntimeInfo or a builtin PyRuntimeInfo -- a user could have used the
+        # builtin py_runtime rule or defined their own. We can't directly detect
+        # the type of the provider object, but the rules_python PyRuntimeInfo
+        # object has an extra attribute that the builtin one doesn't.
+        if hasattr(py_runtime_info, "interpreter_version_info"):
+            providers.append(BuiltinPyRuntimeInfo(
+                interpreter_path = py_runtime_info.interpreter_path,
+                interpreter = py_runtime_info.interpreter,
+                files = py_runtime_info.files,
+                coverage_tool = py_runtime_info.coverage_tool,
+                coverage_files = py_runtime_info.coverage_files,
+                python_version = py_runtime_info.python_version,
+                stub_shebang = py_runtime_info.stub_shebang,
+                bootstrap_template = py_runtime_info.bootstrap_template,
+            ))
 
     # TODO(b/163083591): Remove the PyCcLinkParamsProvider once binaries-in-deps
     # are cleaned up.
@@ -768,6 +851,7 @@
     py_info, deps_transitive_sources, builtin_py_info = create_py_info(
         ctx,
         direct_sources = depset(direct_sources),
+        direct_pyc_files = direct_pyc_files,
         imports = imports,
     )
 
@@ -783,13 +867,13 @@
     providers.append(builtin_py_info)
     providers.append(create_output_group_info(py_info.transitive_sources, output_groups))
 
-    extra_legacy_providers, extra_providers = semantics.get_extra_providers(
+    extra_providers = semantics.get_extra_providers(
         ctx,
         main_py = main_py,
         runtime_details = runtime_details,
     )
     providers.extend(extra_providers)
-    return extra_legacy_providers, providers
+    return providers
 
 def _create_run_environment_info(ctx, inherited_environment):
     expanded_env = {}
@@ -822,12 +906,20 @@
     return rule(
         # TODO: add ability to remove attrs, i.e. for imports attr
         attrs = dicts.add(EXECUTABLE_ATTRS, attrs),
-        toolchains = [TOOLCHAIN_TYPE] + _CC_TOOLCHAINS,
+        toolchains = [
+            TOOLCHAIN_TYPE,
+            config_common.toolchain_type(EXEC_TOOLS_TOOLCHAIN_TYPE, mandatory = False),
+        ] + _CC_TOOLCHAINS,
         fragments = fragments,
         **kwargs
     )
 
-def cc_configure_features(ctx, *, cc_toolchain, extra_features):
+def cc_configure_features(
+        ctx,
+        *,
+        cc_toolchain,
+        extra_features,
+        linking_mode = "static_linking_mode"):
     """Configure C++ features for Python purposes.
 
     Args:
@@ -835,16 +927,19 @@
         cc_toolchain: The CcToolchain the target is using.
         extra_features: list of strings; additional features to request be
             enabled.
+        linking_mode: str; either "static_linking_mode" or
+            "dynamic_linking_mode". Specifies the linking mode feature for
+            C++ linking.
 
     Returns:
         struct of the feature configuration and all requested features.
     """
-    requested_features = ["static_linking_mode"]
+    requested_features = [linking_mode]
     requested_features.extend(extra_features)
     requested_features.extend(ctx.features)
     if "legacy_whole_archive" not in ctx.disabled_features:
         requested_features.append("legacy_whole_archive")
-    feature_configuration = _cc_common.configure_features(
+    feature_configuration = cc_common.configure_features(
         ctx = ctx,
         cc_toolchain = cc_toolchain,
         requested_features = requested_features,
diff --git a/python/private/common/py_executable_bazel.bzl b/python/private/common/py_executable_bazel.bzl
index ecdef9a..a0cfeba 100644
--- a/python/private/common/py_executable_bazel.bzl
+++ b/python/private/common/py_executable_bazel.bzl
@@ -15,6 +15,8 @@
 
 load("@bazel_skylib//lib:dicts.bzl", "dicts")
 load("@bazel_skylib//lib:paths.bzl", "paths")
+load("//python/private:flags.bzl", "BootstrapImplFlag")
+load("//python/private:toolchain_types.bzl", "TARGET_TOOLCHAIN_TYPE")
 load(":attributes_bazel.bzl", "IMPORTS_ATTRS")
 load(
     ":common.bzl",
@@ -59,8 +61,10 @@
         ),
         "_launcher": attr.label(
             cfg = "target",
-            default = "@bazel_tools//tools/launcher:launcher",
-            executable = True,
+            # NOTE: This is an executable, but is only used for Windows. It
+            # can't have executable=True because the backing target is an
+            # empty target for other platforms.
+            default = "//tools/launcher:launcher",
         ),
         "_py_interpreter": attr.label(
             # The configuration_field args are validated when called;
@@ -75,7 +79,7 @@
         # GraphlessQueryTest.testLabelsOperator relies on it to test for
         # query behavior of implicit dependencies.
         "_py_toolchain_type": attr.label(
-            default = "@bazel_tools//tools/python:toolchain_type",
+            default = TARGET_TOOLCHAIN_TYPE,
         ),
         "_windows_launcher_maker": attr.label(
             default = "@bazel_tools//tools/launcher:launcher_maker",
@@ -98,17 +102,13 @@
     )
 
 def py_executable_bazel_impl(ctx, *, is_test, inherited_environment):
-    """Common code for executables for Baze."""
-    result = py_executable_base_impl(
+    """Common code for executables for Bazel."""
+    return py_executable_base_impl(
         ctx = ctx,
         semantics = create_binary_semantics_bazel(),
         is_test = is_test,
         inherited_environment = inherited_environment,
     )
-    return struct(
-        providers = result.providers,
-        **result.legacy_providers
-    )
 
 def create_binary_semantics_bazel():
     return create_binary_semantics_struct(
@@ -143,7 +143,7 @@
 
 def _get_extra_providers(ctx, main_py, runtime_details):
     _ = ctx, main_py, runtime_details  # @unused
-    return {}, []
+    return []
 
 def _get_stamp_flag(ctx):
     # NOTE: Undocumented API; private to builtins
@@ -168,12 +168,6 @@
         runfiles_details):
     _ = is_test, cc_details, native_deps_details  # @unused
 
-    common_bootstrap_template_kwargs = dict(
-        main_py = main_py,
-        imports = imports,
-        runtime_details = runtime_details,
-    )
-
     is_windows = target_platform_has_any_constraint(ctx, ctx.attr._windows_constraints)
 
     if is_windows:
@@ -183,21 +177,47 @@
     else:
         base_executable_name = executable.basename
 
-    zip_bootstrap = ctx.actions.declare_file(base_executable_name + ".temp", sibling = executable)
-    zip_file = ctx.actions.declare_file(base_executable_name + ".zip", sibling = executable)
+    # The check for stage2_bootstrap_template is to support legacy
+    # BuiltinPyRuntimeInfo providers, which is likely to come from
+    # @bazel_tools//tools/python:autodetecting_toolchain, the toolchain used
+    # for workspace builds when no rules_python toolchain is configured.
+    if (BootstrapImplFlag.get_value(ctx) == BootstrapImplFlag.SCRIPT and
+        runtime_details.effective_runtime and
+        hasattr(runtime_details.effective_runtime, "stage2_bootstrap_template")):
+        stage2_bootstrap = _create_stage2_bootstrap(
+            ctx,
+            output_prefix = base_executable_name,
+            output_sibling = executable,
+            main_py = main_py,
+            imports = imports,
+            runtime_details = runtime_details,
+        )
+        extra_runfiles = ctx.runfiles([stage2_bootstrap])
+        zip_main = _create_zip_main(
+            ctx,
+            stage2_bootstrap = stage2_bootstrap,
+            runtime_details = runtime_details,
+        )
+    else:
+        stage2_bootstrap = None
+        extra_runfiles = ctx.runfiles()
+        zip_main = ctx.actions.declare_file(base_executable_name + ".temp", sibling = executable)
+        _create_stage1_bootstrap(
+            ctx,
+            output = zip_main,
+            main_py = main_py,
+            imports = imports,
+            is_for_zip = True,
+            runtime_details = runtime_details,
+        )
 
-    _expand_bootstrap_template(
-        ctx,
-        output = zip_bootstrap,
-        is_for_zip = True,
-        **common_bootstrap_template_kwargs
-    )
+    zip_file = ctx.actions.declare_file(base_executable_name + ".zip", sibling = executable)
     _create_zip_file(
         ctx,
         output = zip_file,
         original_nonzip_executable = executable,
-        executable_for_zip_file = zip_bootstrap,
-        runfiles = runfiles_details.default_runfiles,
+        zip_main = zip_main,
+        runfiles = runfiles_details.default_runfiles.merge(extra_runfiles),
     )
 
     extra_files_to_build = []
@@ -246,13 +266,22 @@
         if bootstrap_output != None:
             fail("Should not occur: bootstrap_output should not be used " +
                  "when creating an executable zip")
-        _create_executable_zip_file(ctx, output = executable, zip_file = zip_file)
+        _create_executable_zip_file(
+            ctx,
+            output = executable,
+            zip_file = zip_file,
+            stage2_bootstrap = stage2_bootstrap,
+            runtime_details = runtime_details,
+        )
     elif bootstrap_output:
-        _expand_bootstrap_template(
+        _create_stage1_bootstrap(
             ctx,
             output = bootstrap_output,
-            is_for_zip = build_zip_enabled,
-            **common_bootstrap_template_kwargs
+            stage2_bootstrap = stage2_bootstrap,
+            runtime_details = runtime_details,
+            is_for_zip = False,
+            imports = imports,
+            main_py = main_py,
         )
     else:
         # Otherwise, this should be the Windows case of launcher + zip.
@@ -270,16 +299,40 @@
     return create_executable_result_struct(
         extra_files_to_build = depset(extra_files_to_build),
         output_groups = {"python_zip_file": depset([zip_file])},
+        extra_runfiles = extra_runfiles,
     )
 
-def _expand_bootstrap_template(
+def _create_zip_main(ctx, *, stage2_bootstrap, runtime_details):
+    # The location of this file doesn't really matter. It's added to
+    # the zip file as the top-level __main__.py file and not included
+    # elsewhere.
+    output = ctx.actions.declare_file(ctx.label.name + "_zip__main__.py")
+    ctx.actions.expand_template(
+        template = runtime_details.effective_runtime.zip_main_template,
+        output = output,
+        substitutions = {
+            "%python_binary%": runtime_details.executable_interpreter_path,
+            "%stage2_bootstrap%": "{}/{}".format(
+                ctx.workspace_name,
+                stage2_bootstrap.short_path,
+            ),
+            "%workspace_name%": ctx.workspace_name,
+        },
+    )
+    return output
+
+def _create_stage2_bootstrap(
         ctx,
         *,
-        output,
+        output_prefix,
+        output_sibling,
         main_py,
         imports,
-        is_for_zip,
         runtime_details):
+    output = ctx.actions.declare_file(
+        "{}_stage2_bootstrap.py".format(output_prefix),
+        sibling = output_sibling,
+    )
     runtime = runtime_details.effective_runtime
     if (ctx.configuration.coverage_enabled and
         runtime and
@@ -291,12 +344,7 @@
     else:
         coverage_tool_runfiles_path = ""
 
-    if runtime:
-        shebang = runtime.stub_shebang
-        template = runtime.bootstrap_template
-    else:
-        shebang = DEFAULT_STUB_SHEBANG
-        template = ctx.file._bootstrap_template
+    template = runtime.stage2_bootstrap_template
 
     ctx.actions.expand_template(
         template = template,
@@ -305,18 +353,66 @@
             "%coverage_tool%": coverage_tool_runfiles_path,
             "%import_all%": "True" if ctx.fragments.bazel_py.python_import_all_repositories else "False",
             "%imports%": ":".join(imports.to_list()),
-            "%is_zipfile%": "True" if is_for_zip else "False",
-            "%main%": "{}/{}".format(
-                ctx.workspace_name,
-                main_py.short_path,
-            ),
-            "%python_binary%": runtime_details.executable_interpreter_path,
-            "%shebang%": shebang,
+            "%main%": "{}/{}".format(ctx.workspace_name, main_py.short_path),
             "%target%": str(ctx.label),
             "%workspace_name%": ctx.workspace_name,
         },
         is_executable = True,
     )
+    return output
+
+def _create_stage1_bootstrap(
+        ctx,
+        *,
+        output,
+        main_py = None,
+        stage2_bootstrap = None,
+        imports = None,
+        is_for_zip,
+        runtime_details):
+    runtime = runtime_details.effective_runtime
+
+    subs = {
+        "%is_zipfile%": "1" if is_for_zip else "0",
+        "%python_binary%": runtime_details.executable_interpreter_path,
+        "%target%": str(ctx.label),
+        "%workspace_name%": ctx.workspace_name,
+    }
+
+    if stage2_bootstrap:
+        subs["%stage2_bootstrap%"] = "{}/{}".format(
+            ctx.workspace_name,
+            stage2_bootstrap.short_path,
+        )
+        template = runtime.bootstrap_template
+        subs["%shebang%"] = runtime.stub_shebang
+    else:
+        if (ctx.configuration.coverage_enabled and
+            runtime and
+            runtime.coverage_tool):
+            coverage_tool_runfiles_path = "{}/{}".format(
+                ctx.workspace_name,
+                runtime.coverage_tool.short_path,
+            )
+        else:
+            coverage_tool_runfiles_path = ""
+        if runtime:
+            subs["%shebang%"] = runtime.stub_shebang
+            template = runtime.bootstrap_template
+        else:
+            subs["%shebang%"] = DEFAULT_STUB_SHEBANG
+            template = ctx.file._bootstrap_template
+
+        subs["%coverage_tool%"] = coverage_tool_runfiles_path
+        subs["%import_all%"] = ("True" if ctx.fragments.bazel_py.python_import_all_repositories else "False")
+        subs["%imports%"] = ":".join(imports.to_list())
+        subs["%main%"] = "{}/{}".format(ctx.workspace_name, main_py.short_path)
+
+    ctx.actions.expand_template(
+        template = template,
+        output = output,
+        substitutions = subs,
+    )
 
 def _create_windows_exe_launcher(
         ctx,
@@ -336,10 +432,11 @@
     launch_info.add(python_binary_path, format = "python_bin_path=%s")
     launch_info.add("1" if use_zip_file else "0", format = "use_zip_file=%s")
 
+    launcher = ctx.attr._launcher[DefaultInfo].files_to_run.executable
     ctx.actions.run(
         executable = ctx.executable._windows_launcher_maker,
-        arguments = [ctx.executable._launcher.path, launch_info, output.path],
-        inputs = [ctx.executable._launcher],
+        arguments = [launcher.path, launch_info, output.path],
+        inputs = [launcher],
         outputs = [output],
         mnemonic = "PyBuildLauncher",
         progress_message = "Creating launcher for %{label}",
@@ -347,7 +444,7 @@
         use_default_shell_env = True,
     )
 
-def _create_zip_file(ctx, *, output, original_nonzip_executable, executable_for_zip_file, runfiles):
+def _create_zip_file(ctx, *, output, original_nonzip_executable, zip_main, runfiles):
     workspace_name = ctx.workspace_name
     legacy_external_runfiles = _py_builtins.get_legacy_external_runfiles(ctx)
 
@@ -355,7 +452,7 @@
     manifest.use_param_file("@%s", use_always = True)
     manifest.set_param_file_format("multiline")
 
-    manifest.add("__main__.py={}".format(executable_for_zip_file.path))
+    manifest.add("__main__.py={}".format(zip_main.path))
     manifest.add("__init__.py=")
     manifest.add(
         "{}=".format(
@@ -376,7 +473,7 @@
 
     manifest.add_all(runfiles.files, map_each = map_zip_runfiles, allow_closure = True)
 
-    inputs = [executable_for_zip_file]
+    inputs = [zip_main]
     if _py_builtins.is_bzlmod_enabled(ctx):
         zip_repo_mapping_manifest = ctx.actions.declare_file(
             output.basename + ".repo_mapping",
@@ -425,17 +522,32 @@
         zip_runfiles_path = paths.normalize("{}/{}".format(workspace_name, path))
     return "{}/{}".format(_ZIP_RUNFILES_DIRECTORY_NAME, zip_runfiles_path)
 
-def _create_executable_zip_file(ctx, *, output, zip_file):
+def _create_executable_zip_file(ctx, *, output, zip_file, stage2_bootstrap, runtime_details):
+    prelude = ctx.actions.declare_file(
+        "{}_zip_prelude.sh".format(output.basename),
+        sibling = output,
+    )
+    if stage2_bootstrap:
+        _create_stage1_bootstrap(
+            ctx,
+            output = prelude,
+            stage2_bootstrap = stage2_bootstrap,
+            runtime_details = runtime_details,
+            is_for_zip = True,
+        )
+    else:
+        ctx.actions.write(prelude, "#!/usr/bin/env python3\n")
+
     ctx.actions.run_shell(
-        command = "echo '{shebang}' | cat - {zip} > {output}".format(
-            shebang = "#!/usr/bin/env python3",
+        command = "cat {prelude} {zip} > {output}".format(
+            prelude = prelude.path,
             zip = zip_file.path,
             output = output.path,
         ),
-        inputs = [zip_file],
+        inputs = [prelude, zip_file],
         outputs = [output],
         use_default_shell_env = True,
-        mnemonic = "BuildBinary",
+        mnemonic = "PyBuildExecutableZip",
         progress_message = "Build Python zip executable: %{label}",
     )
 
@@ -448,6 +560,7 @@
         extra_runfiles = ctx.runfiles(),
         # Though the rules require the CcToolchain, it isn't actually used.
         cc_toolchain = None,
+        feature_config = None,
     )
 
 def _get_interpreter_path(ctx, *, runtime, flag_interpreter_path):
diff --git a/python/private/common/py_library.bzl b/python/private/common/py_library.bzl
index 28ee7bf..673beed 100644
--- a/python/private/common/py_library.bzl
+++ b/python/private/common/py_library.bzl
@@ -14,6 +14,13 @@
 """Implementation of py_library rule."""
 
 load("@bazel_skylib//lib:dicts.bzl", "dicts")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("//python/private:flags.bzl", "PrecompileAddToRunfilesFlag")
+load(
+    "//python/private:toolchain_types.bzl",
+    "EXEC_TOOLS_TOOLCHAIN_TYPE",
+    TOOLCHAIN_TYPE = "TARGET_TOOLCHAIN_TYPE",
+)
 load(
     ":attributes.bzl",
     "COMMON_ATTRS",
@@ -57,14 +64,26 @@
     """
     check_native_allowed(ctx)
     direct_sources = filter_to_py_srcs(ctx.files.srcs)
-    output_sources = depset(semantics.maybe_precompile(ctx, direct_sources))
-    runfiles = collect_runfiles(ctx = ctx, files = output_sources)
+
+    precompile_result = semantics.maybe_precompile(ctx, direct_sources)
+    direct_pyc_files = depset(precompile_result.pyc_files)
+    default_outputs = depset(precompile_result.keep_srcs, transitive = [direct_pyc_files])
+
+    extra_runfiles_depsets = [depset(precompile_result.keep_srcs)]
+    if ctx.attr._precompile_add_to_runfiles_flag[BuildSettingInfo].value == PrecompileAddToRunfilesFlag.ALWAYS:
+        extra_runfiles_depsets.append(direct_pyc_files)
+
+    runfiles = collect_runfiles(
+        ctx = ctx,
+        files = depset(transitive = extra_runfiles_depsets),
+    )
 
     cc_info = semantics.get_cc_info_for_library(ctx)
     py_info, deps_transitive_sources, builtins_py_info = create_py_info(
         ctx,
         direct_sources = depset(direct_sources),
         imports = collect_imports(ctx, semantics),
+        direct_pyc_files = direct_pyc_files,
     )
 
     # TODO(b/253059598): Remove support for extra actions; https://github.com/bazelbuild/bazel/issues/16455
@@ -76,7 +95,7 @@
         )
 
     return [
-        DefaultInfo(files = output_sources, runfiles = runfiles),
+        DefaultInfo(files = default_outputs, runfiles = runfiles),
         py_info,
         builtins_py_info,
         create_instrumented_files_info(ctx),
@@ -84,6 +103,18 @@
         create_output_group_info(py_info.transitive_sources, extra_groups = {}),
     ]
 
+_DEFAULT_PY_LIBRARY_DOC = """
+A library of Python code that can be depended upon.
+
+Default outputs:
+* The input Python sources
+* The precompiled artifacts from the sources.
+
+NOTE: Precompilation affects which of the default outputs are included in the
+resulting runfiles. See the precompile-related attributes and flags for
+more information.
+"""
+
 def create_py_library_rule(*, attrs = {}, **kwargs):
     """Creates a py_library rule.
 
@@ -93,8 +124,15 @@
     Returns:
         A rule object
     """
+
+    # Within Google, the doc attribute is overridden
+    kwargs.setdefault("doc", _DEFAULT_PY_LIBRARY_DOC)
     return rule(
         attrs = dicts.add(LIBRARY_ATTRS, attrs),
+        toolchains = [
+            config_common.toolchain_type(TOOLCHAIN_TYPE, mandatory = False),
+            config_common.toolchain_type(EXEC_TOOLS_TOOLCHAIN_TYPE, mandatory = False),
+        ],
         # TODO(b/253818097): fragments=py is only necessary so that
         # RequiredConfigFragmentsTest passes
         fragments = ["py"],
diff --git a/python/private/common/py_runtime_rule.bzl b/python/private/common/py_runtime_rule.bzl
index 9d53543..e0b5fb2 100644
--- a/python/private/common/py_runtime_rule.bzl
+++ b/python/private/common/py_runtime_rule.bzl
@@ -79,11 +79,23 @@
 
     python_version = ctx.attr.python_version
 
+    interpreter_version_info = ctx.attr.interpreter_version_info
+
     # TODO: Uncomment this after --incompatible_python_disable_py2 defaults to true
     # if ctx.fragments.py.disable_py2 and python_version == "PY2":
     #     fail("Using Python 2 is not supported and disabled; see " +
     #          "https://github.com/bazelbuild/bazel/issues/15684")
 
+    pyc_tag = ctx.attr.pyc_tag
+    if not pyc_tag and (ctx.attr.implementation_name and
+                        interpreter_version_info.get("major") and
+                        interpreter_version_info.get("minor")):
+        pyc_tag = "{}-{}{}".format(
+            ctx.attr.implementation_name,
+            interpreter_version_info["major"],
+            interpreter_version_info["minor"],
+        )
+
     py_runtime_info_kwargs = dict(
         interpreter_path = interpreter_path or None,
         interpreter = interpreter,
@@ -95,8 +107,19 @@
         bootstrap_template = ctx.file.bootstrap_template,
     )
     builtin_py_runtime_info_kwargs = dict(py_runtime_info_kwargs)
+
+    # There are all args that BuiltinPyRuntimeInfo doesn't support
+    py_runtime_info_kwargs.update(dict(
+        implementation_name = ctx.attr.implementation_name,
+        interpreter_version_info = interpreter_version_info,
+        pyc_tag = pyc_tag,
+        stage2_bootstrap_template = ctx.file.stage2_bootstrap_template,
+        zip_main_template = ctx.file.zip_main_template,
+    ))
+
     if not IS_BAZEL_7_OR_HIGHER:
         builtin_py_runtime_info_kwargs.pop("bootstrap_template")
+
     return [
         PyRuntimeInfo(**py_runtime_info_kwargs),
         # Return the builtin provider for better compatibility.
@@ -137,7 +160,7 @@
 a checked-in interpreter or a wrapper script that accesses the system
 interpreter.
 
-# Example
+Example
 
 ```
 load("@rules_python//python:py_runtime.bzl", "py_runtime")
@@ -203,6 +226,9 @@
 runtime. For a platform runtime this attribute must not be set.
 """,
         ),
+        "implementation_name": attr.string(
+            doc = "The Python implementation name (`sys.implementation.name`)",
+        ),
         "interpreter": attr.label(
             # We set `allow_files = True` to allow specifying executable
             # targets from rules that have more than one default output,
@@ -232,6 +258,27 @@
 For a platform runtime, this is the absolute path of a Python interpreter on
 the target platform. For an in-build runtime this attribute must not be set.
 """),
+        "interpreter_version_info": attr.string_dict(
+            doc = """
+Version information about the interpreter this runtime provides. The
+supported keys match the names for `sys.version_info`. While the input
+values are strings, most are converted to ints. The supported keys are:
+  * major: int, the major version number
+  * minor: int, the minor version number
+  * micro: optional int, the micro version number
+  * releaselevel: optional str, the release level
+  * serial: optional int, the serial number of the release"
+            """,
+            mandatory = False,
+        ),
+        "pyc_tag": attr.string(
+            doc = """
+Optional string; the tag portion of a pyc filename, e.g. the `cpython-39` infix
+of `foo.cpython-39.pyc`. See PEP 3147. If not specified, it will be computed
+from `implementation_name` and `interpreter_version_info`. If no pyc_tag is
+available, then only source-less pyc generation will function correctly.
+""",
+        ),
         "python_version": attr.string(
             default = "PY3",
             values = ["PY2", "PY3"],
@@ -244,6 +291,17 @@
 value.
             """,
         ),
+        "stage2_bootstrap_template": attr.label(
+            default = "//python/private:stage2_bootstrap_template",
+            allow_single_file = True,
+            doc = """
+The template to use when two stage bootstrapping is enabled
+
+:::{seealso}
+{obj}`PyRuntimeInfo.stage2_bootstrap_template` and {obj}`--bootstrap_impl`
+:::
+""",
+        ),
         "stub_shebang": attr.string(
             default = DEFAULT_STUB_SHEBANG,
             doc = """
@@ -256,5 +314,18 @@
 Does not apply to Windows.
 """,
         ),
+        "zip_main_template": attr.label(
+            default = "//python/private:zip_main_template",
+            allow_single_file = True,
+            doc = """
+The template to use for a zip's top-level `__main__.py` file.
+
+This becomes the entry point executed when `python foo.zip` is run.
+
+:::{seealso}
+The {obj}`PyRuntimeInfo.zip_main_template` field.
+:::
+""",
+        ),
     }),
 )
diff --git a/python/private/common/py_test_rule_bazel.bzl b/python/private/common/py_test_rule_bazel.bzl
index 3479d03..369360d 100644
--- a/python/private/common/py_test_rule_bazel.bzl
+++ b/python/private/common/py_test_rule_bazel.bzl
@@ -45,7 +45,7 @@
         is_test = True,
         inherited_environment = ctx.attr.env_inherit,
     )
-    maybe_add_test_execution_info(providers.providers, ctx)
+    maybe_add_test_execution_info(providers, ctx)
     return providers
 
 py_test = create_executable_rule(
diff --git a/python/private/config_settings.bzl b/python/private/config_settings.bzl
new file mode 100644
index 0000000..0537655
--- /dev/null
+++ b/python/private/config_settings.bzl
@@ -0,0 +1,202 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module is used to construct the config settings in the BUILD file in this same package.
+"""
+
+load("@bazel_skylib//lib:selects.bzl", "selects")
+load("@bazel_skylib//rules:common_settings.bzl", "string_flag")
+load("//python:versions.bzl", "MINOR_MAPPING", "TOOL_VERSIONS")
+
+_PYTHON_VERSION_FLAG = str(Label("//python/config_settings:python_version"))
+
+def _ver_key(s):
+    major, _, s = s.partition(".")
+    minor, _, s = s.partition(".")
+    micro, _, s = s.partition(".")
+    return (int(major), int(minor), int(micro))
+
+def _flag_values(python_versions):
+    """Construct a map of python_version to a list of toolchain values.
+
+    This mapping maps the concept of a config setting to a list of compatible toolchain versions.
+    For using this in the code, the VERSION_FLAG_VALUES should be used instead.
+
+    Args:
+        python_versions: list of strings; all X.Y.Z python versions
+
+    Returns:
+        A `map[str, list[str]]`. Each key is a python_version flag value. Each value
+        is a list of the python_version flag values that should match when for the
+        `key`. For example:
+        ```
+         "3.8" -> ["3.8", "3.8.1", "3.8.2", ..., "3.8.19"]  # All 3.8 versions
+         "3.8.2" -> ["3.8.2"]  # Only 3.8.2
+         "3.8.19" -> ["3.8.19", "3.8"]  # The latest version should also match 3.8 so
+             as when the `3.8` toolchain is used we just use the latest `3.8` toolchain.
+             this makes the `select("is_python_3.8.19")` work no matter how the user
+             specifies the latest python version to use.
+        ```
+    """
+    ret = {}
+
+    for micro_version in sorted(python_versions, key = _ver_key):
+        minor_version, _, _ = micro_version.rpartition(".")
+
+        # This matches the raw flag value, e.g. --//python/config_settings:python_version=3.8
+        # It's private because matching the concept of e.g. "3.8" value is done
+        # using the `is_python_X.Y` config setting group, which is aware of the
+        # minor versions that could match instead.
+        ret.setdefault(minor_version, [minor_version]).append(micro_version)
+
+        # Ensure that is_python_3.9.8 is matched if python_version is set
+        # to 3.9 if MINOR_MAPPING points to 3.9.8
+        default_micro_version = MINOR_MAPPING[minor_version]
+        ret[micro_version] = [micro_version, minor_version] if default_micro_version == micro_version else [micro_version]
+
+    return ret
+
+VERSION_FLAG_VALUES = _flag_values(TOOL_VERSIONS.keys())
+
+def is_python_config_setting(name, *, python_version, reuse_conditions = None, **kwargs):
+    """Create a config setting for matching 'python_version' configuration flag.
+
+    This function is mainly intended for internal use within the `whl_library` and `pip_parse`
+    machinery.
+
+    The matching of the 'python_version' flag depends on the value passed in
+    `python_version` and here is the example for `3.8` (but the same applies
+    to other python versions present in @//python:versions.bzl#TOOL_VERSIONS):
+     * "3.8" -> ["3.8", "3.8.1", "3.8.2", ..., "3.8.19"]  # All 3.8 versions
+     * "3.8.2" -> ["3.8.2"]  # Only 3.8.2
+     * "3.8.19" -> ["3.8.19", "3.8"]  # The latest version should also match 3.8 so
+         as when the `3.8` toolchain is used we just use the latest `3.8` toolchain.
+         this makes the `select("is_python_3.8.19")` work no matter how the user
+         specifies the latest python version to use.
+
+    Args:
+        name: name for the target that will be created to be used in select statements.
+        python_version: The python_version to be passed in the `flag_values` in the
+            `config_setting`. Depending on the version, the matching python version list
+            can be as described above.
+        reuse_conditions: A dict of version to version label for which we should
+            reuse config_setting targets instead of creating them from scratch. This
+            is useful when using is_python_config_setting multiple times in the
+            same package with the same `major.minor` python versions.
+        **kwargs: extra kwargs passed to the `config_setting`.
+    """
+    if python_version not in name:
+        fail("The name '{}' must have the python version '{}' in it".format(name, python_version))
+
+    if python_version not in VERSION_FLAG_VALUES:
+        fail("The 'python_version' must be known to 'rules_python', choose from the values: {}".format(VERSION_FLAG_VALUES.keys()))
+
+    python_versions = VERSION_FLAG_VALUES[python_version]
+    extra_flag_values = kwargs.pop("flag_values", {})
+    if _PYTHON_VERSION_FLAG in extra_flag_values:
+        fail("Cannot set '{}' in the flag values".format(_PYTHON_VERSION_FLAG))
+
+    if len(python_versions) == 1:
+        native.config_setting(
+            name = name,
+            flag_values = {
+                _PYTHON_VERSION_FLAG: python_version,
+            } | extra_flag_values,
+            **kwargs
+        )
+        return
+
+    reuse_conditions = reuse_conditions or {}
+    create_config_settings = {
+        "_{}".format(name).replace(python_version, version): {_PYTHON_VERSION_FLAG: version}
+        for version in python_versions
+        if not reuse_conditions or version not in reuse_conditions
+    }
+    match_any = list(create_config_settings.keys())
+    for version, condition in reuse_conditions.items():
+        if len(VERSION_FLAG_VALUES[version]) == 1:
+            match_any.append(condition)
+            continue
+
+        # Convert the name to an internal label that this function would create,
+        # so that we are hitting the config_setting and not the config_setting_group.
+        condition = Label(condition)
+        if hasattr(condition, "same_package_label"):
+            condition = condition.same_package_label("_" + condition.name)
+        else:
+            condition = condition.relative("_" + condition.name)
+
+        match_any.append(condition)
+
+    for name_, flag_values_ in create_config_settings.items():
+        native.config_setting(
+            name = name_,
+            flag_values = flag_values_ | extra_flag_values,
+            **kwargs
+        )
+
+    # An alias pointing to an underscore-prefixed config_setting_group
+    # is used because config_setting_group creates
+    # `is_{version}_N` targets, which are easily confused with the
+    # `is_{minor}.{micro}` (dot) targets.
+    selects.config_setting_group(
+        name = "_{}_group".format(name),
+        match_any = match_any,
+        visibility = ["//visibility:private"],
+    )
+    native.alias(
+        name = name,
+        actual = "_{}_group".format(name),
+        visibility = kwargs.get("visibility", []),
+    )
+
+def construct_config_settings(name = None):  # buildifier: disable=function-docstring
+    """Create a 'python_version' config flag and construct all config settings used in rules_python.
+
+    This mainly includes the targets that are used in the toolchain and pip hub
+    repositories that only match on the 'python_version' flag values.
+
+    Args:
+        name(str): A dummy name value that is no-op for now.
+    """
+    string_flag(
+        name = "python_version",
+        # TODO: The default here should somehow match the MODULE config. Until
+        # then, use the empty string to indicate an unknown version. This
+        # also prevents version-unaware targets from inadvertently matching
+        # a select condition when they shouldn't.
+        build_setting_default = "",
+        values = [""] + VERSION_FLAG_VALUES.keys(),
+        visibility = ["//visibility:public"],
+    )
+
+    native.config_setting(
+        name = "is_python_version_unset",
+        flag_values = {
+            Label("//python/config_settings:python_version"): "",
+        },
+        visibility = ["//visibility:public"],
+    )
+
+    for version, matching_versions in VERSION_FLAG_VALUES.items():
+        is_python_config_setting(
+            name = "is_python_{}".format(version),
+            python_version = version,
+            reuse_conditions = {
+                v: native.package_relative_label("is_python_{}".format(v))
+                for v in matching_versions
+                if v != version
+            },
+            visibility = ["//visibility:public"],
+        )
diff --git a/python/private/coverage.patch b/python/private/coverage.patch
index cb4402e..051f7fc 100644
--- a/python/private/coverage.patch
+++ b/python/private/coverage.patch
@@ -5,11 +5,11 @@
 # NOTE @aignas 2023-06-05: we have to do this before anything from coverage gets
 # imported.
 diff --git a/coverage/__main__.py b/coverage/__main__.py
-index 79aa4e2b..291fcff8 100644
+index ce2d8db..7d7d0a0 100644
 --- a/coverage/__main__.py
 +++ b/coverage/__main__.py
-@@ -4,5 +4,6 @@
- """Coverage.py's main entry point."""
+@@ -6,5 +6,6 @@
+ from __future__ import annotations
 
  import sys
 +sys.path.append(sys.path.pop(0))
diff --git a/python/private/coverage_deps.bzl b/python/private/coverage_deps.bzl
index 7980779..d69fab9 100644
--- a/python/private/coverage_deps.bzl
+++ b/python/private/coverage_deps.bzl
@@ -19,82 +19,100 @@
 load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
 load("//python/private:version_label.bzl", "version_label")
 
-# START: maintained by 'bazel run //tools/private:update_coverage_deps'
+# START: maintained by 'bazel run //tools/private/update_deps:update_coverage_deps <version>'
 _coverage_deps = {
     "cp310": {
         "aarch64-apple-darwin": (
-            "https://files.pythonhosted.org/packages/3d/80/7060a445e1d2c9744b683dc935248613355657809d6c6b2716cdf4ca4766/coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl",
-            "6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb",
+            "https://files.pythonhosted.org/packages/a3/36/b5ae380c05f58544a40ff36f87fa1d6e45f5c2f299335586aac140c341ce/coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl",
+            "718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4",
         ),
         "aarch64-unknown-linux-gnu": (
-            "https://files.pythonhosted.org/packages/b8/9d/926fce7e03dbfc653104c2d981c0fa71f0572a9ebd344d24c573bd6f7c4f/coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
-            "ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6",
+            "https://files.pythonhosted.org/packages/9e/48/5ae1ccf4601500af0ca36eba0a2c1f1796e58fb7495de6da55ed43e13e5f/coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+            "767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524",
         ),
         "x86_64-apple-darwin": (
-            "https://files.pythonhosted.org/packages/01/24/be01e62a7bce89bcffe04729c540382caa5a06bee45ae42136c93e2499f5/coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl",
-            "d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8",
+            "https://files.pythonhosted.org/packages/50/5a/d727fcd2e0fc3aba61591b6f0fe1e87865ea9b6275f58f35810d6f85b05b/coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl",
+            "8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6",
         ),
         "x86_64-unknown-linux-gnu": (
-            "https://files.pythonhosted.org/packages/b4/bd/1b2331e3a04f4cc9b7b332b1dd0f3a1261dfc4114f8479bebfcc2afee9e8/coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
-            "31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063",
+            "https://files.pythonhosted.org/packages/23/0a/ab5b0f6d6b24f7156624e7697ec7ab49f9d5cdac922da90d9927ae5de1cf/coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+            "ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb",
         ),
     },
     "cp311": {
         "aarch64-apple-darwin": (
-            "https://files.pythonhosted.org/packages/67/d7/cd8fe689b5743fffac516597a1222834c42b80686b99f5b44ef43ccc2a43/coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl",
-            "5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe",
+            "https://files.pythonhosted.org/packages/f8/a1/161102d2e26fde2d878d68cc1ed303758dc7b01ee14cc6aa70f5fd1b910d/coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl",
+            "489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113",
         ),
         "aarch64-unknown-linux-gnu": (
-            "https://files.pythonhosted.org/packages/8c/95/16eed713202406ca0a37f8ac259bbf144c9d24f9b8097a8e6ead61da2dbb/coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
-            "fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3",
+            "https://files.pythonhosted.org/packages/a7/af/1510df1132a68ca876013c0417ca46836252e43871d2623b489e4339c980/coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+            "451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe",
         ),
         "x86_64-apple-darwin": (
-            "https://files.pythonhosted.org/packages/c6/fa/529f55c9a1029c840bcc9109d5a15ff00478b7ff550a1ae361f8745f8ad5/coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl",
-            "06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f",
+            "https://files.pythonhosted.org/packages/ca/77/f17a5b199e8ca0443ace312f7e07ff3e4e7ba7d7c52847567d6f1edb22a7/coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl",
+            "cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47",
         ),
         "x86_64-unknown-linux-gnu": (
-            "https://files.pythonhosted.org/packages/a7/cd/3ce94ad9d407a052dc2a74fbeb1c7947f442155b28264eb467ee78dea812/coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
-            "63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb",
+            "https://files.pythonhosted.org/packages/a9/1a/e2120233177b3e2ea9dcfd49a050748060166c74792b2b1db4a803307da4/coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+            "b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3",
+        ),
+    },
+    "cp312": {
+        "aarch64-apple-darwin": (
+            "https://files.pythonhosted.org/packages/9d/d8/111ec1a65fef57ad2e31445af627d481f660d4a9218ee5c774b45187812a/coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl",
+            "d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328",
+        ),
+        "aarch64-unknown-linux-gnu": (
+            "https://files.pythonhosted.org/packages/8f/eb/28416f1721a3b7fa28ea499e8a6f867e28146ea2453839c2bca04a001eeb/coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+            "3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30",
+        ),
+        "x86_64-apple-darwin": (
+            "https://files.pythonhosted.org/packages/11/5c/2cf3e794fa5d1eb443aa8544e2ba3837d75073eaf25a1fda64d232065609/coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl",
+            "b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10",
+        ),
+        "x86_64-unknown-linux-gnu": (
+            "https://files.pythonhosted.org/packages/2f/db/70900f10b85a66f761a3a28950ccd07757d51548b1d10157adc4b9415f15/coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+            "b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e",
         ),
     },
     "cp38": {
         "aarch64-apple-darwin": (
-            "https://files.pythonhosted.org/packages/28/d7/9a8de57d87f4bbc6f9a6a5ded1eaac88a89bf71369bb935dac3c0cf2893e/coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl",
-            "3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5",
+            "https://files.pythonhosted.org/packages/96/71/1c299b12e80d231e04a2bfd695e761fb779af7ab66f8bd3cb15649be82b3/coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl",
+            "280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e",
         ),
         "aarch64-unknown-linux-gnu": (
-            "https://files.pythonhosted.org/packages/c8/e4/e6182e4697665fb594a7f4e4f27cb3a4dd00c2e3d35c5c706765de8c7866/coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
-            "5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9",
+            "https://files.pythonhosted.org/packages/c7/a7/b00eaa53d904193478eae01625d784b2af8b522a98028f47c831dcc95663/coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+            "6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2",
         ),
         "x86_64-apple-darwin": (
-            "https://files.pythonhosted.org/packages/c6/fc/be19131010930a6cf271da48202c8cc1d3f971f68c02fb2d3a78247f43dc/coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl",
-            "54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5",
+            "https://files.pythonhosted.org/packages/e2/bc/f54b24b476db0069ac04ff2cdeb28cd890654c8619761bf818726022c76a/coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl",
+            "28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454",
         ),
         "x86_64-unknown-linux-gnu": (
-            "https://files.pythonhosted.org/packages/44/55/49f65ccdd4dfd6d5528e966b28c37caec64170c725af32ab312889d2f857/coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
-            "8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e",
+            "https://files.pythonhosted.org/packages/d0/3a/e882caceca2c7d65791a4a759764a1bf803bbbd10caf38ec41d73a45219e/coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+            "dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6",
         ),
     },
     "cp39": {
         "aarch64-apple-darwin": (
-            "https://files.pythonhosted.org/packages/ca/0c/3dfeeb1006c44b911ee0ed915350db30325d01808525ae7cc8d57643a2ce/coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl",
-            "06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2",
+            "https://files.pythonhosted.org/packages/66/f2/57f5d3c9d2e78c088e4c8dbc933b85fa81c424f23641f10c1aa64052ee4f/coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl",
+            "77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c",
         ),
         "aarch64-unknown-linux-gnu": (
-            "https://files.pythonhosted.org/packages/61/af/5964b8d7d9a5c767785644d9a5a63cacba9a9c45cc42ba06d25895ec87be/coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
-            "201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7",
+            "https://files.pythonhosted.org/packages/ad/3f/cde6fd2e4cc447bd24e3dc2e79abd2e0fba67ac162996253d3505f8efef4/coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+            "6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e",
         ),
         "x86_64-apple-darwin": (
-            "https://files.pythonhosted.org/packages/88/da/495944ebf0ad246235a6bd523810d9f81981f9b81c6059ba1f56e943abe0/coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl",
-            "537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9",
+            "https://files.pythonhosted.org/packages/d6/cf/4094ac6410b680c91c5e55a56f25f4b3a878e2fcbf773c1cecfbdbaaec4f/coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl",
+            "3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f",
         ),
         "x86_64-unknown-linux-gnu": (
-            "https://files.pythonhosted.org/packages/fe/57/e4f8ad64d84ca9e759d783a052795f62a9f9111585e46068845b1cb52c2b/coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
-            "6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1",
+            "https://files.pythonhosted.org/packages/b5/ad/effc12b8f72321cb847c5ba7f4ea7ce3e5c19c641f6418131f8fb0ab2f61/coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+            "8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee",
         ),
     },
 }
-# END: maintained by 'bazel run //tools/private:update_coverage_deps'
+# END: maintained by 'bazel run //tools/private/update_deps:update_coverage_deps <version>'
 
 _coverage_patch = Label("//python/private:coverage.patch")
 
diff --git a/python/private/current_py_cc_headers.bzl b/python/private/current_py_cc_headers.bzl
index be7f8f8..e72199e 100644
--- a/python/private/current_py_cc_headers.bzl
+++ b/python/private/current_py_cc_headers.bzl
@@ -14,6 +14,8 @@
 
 """Implementation of current_py_cc_headers rule."""
 
+load("@rules_cc//cc:defs.bzl", "CcInfo")
+
 def _current_py_cc_headers_impl(ctx):
     py_cc_toolchain = ctx.toolchains["//python/cc:toolchain_type"].py_cc_toolchain
     return py_cc_toolchain.headers.providers_map.values()
diff --git a/python/private/current_py_cc_libs.bzl b/python/private/current_py_cc_libs.bzl
new file mode 100644
index 0000000..d66c401
--- /dev/null
+++ b/python/private/current_py_cc_libs.bzl
@@ -0,0 +1,43 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Implementation of current_py_cc_libs rule."""
+
+load("@rules_cc//cc:defs.bzl", "CcInfo")
+
+def _current_py_cc_libs_impl(ctx):
+    py_cc_toolchain = ctx.toolchains["//python/cc:toolchain_type"].py_cc_toolchain
+    return py_cc_toolchain.libs.providers_map.values()
+
+current_py_cc_libs = rule(
+    implementation = _current_py_cc_libs_impl,
+    toolchains = ["//python/cc:toolchain_type"],
+    provides = [CcInfo],
+    doc = """\
+Provides the currently active Python toolchain's C libraries.
+
+This is a wrapper around the underlying `cc_library()` for the
+C libraries for the consuming target's currently active Python toolchain.
+
+To use, simply depend on this target where you would have wanted the
+toolchain's underlying `:libpython` target:
+
+```starlark
+cc_library(
+    name = "foo",
+    deps = ["@rules_python//python/cc:current_py_cc_libs"]
+)
+```
+""",
+)
diff --git a/python/private/enum.bzl b/python/private/enum.bzl
new file mode 100644
index 0000000..011d9fb
--- /dev/null
+++ b/python/private/enum.bzl
@@ -0,0 +1,36 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Enum-like object utilities
+
+This is a separate file to minimize transitive loads.
+"""
+
+def enum(**kwargs):
+    """Creates a struct whose primary purpose is to be like an enum.
+
+    Args:
+        **kwargs: The fields of the returned struct. All uppercase names will
+            be treated as enum values and added to `__members__`.
+
+    Returns:
+        `struct` with the given values. It also has the field `__members__`,
+        which is a dict of the enum names and values.
+    """
+    members = {
+        key: value
+        for key, value in kwargs.items()
+        if key.upper() == key
+    }
+    return struct(__members__ = members, **kwargs)
diff --git a/python/private/envsubst.bzl b/python/private/envsubst.bzl
new file mode 100644
index 0000000..b2fdb99
--- /dev/null
+++ b/python/private/envsubst.bzl
@@ -0,0 +1,65 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Substitute environment variables in shell format strings."""
+
+def envsubst(template_string, varnames, getenv):
+    """Helper function to substitute environment variables.
+
+    Supports `$VARNAME`, `${VARNAME}` and `${VARNAME:-default}`
+    syntaxes in the `template_string`, looking up each `VARNAME`
+    listed in the `varnames` list in the environment defined by the
+    `getenv` function. Typically called with `getenv = rctx.getenv`
+    (if it is available) or `getenv = rctx.os.environ.get` (on e.g.
+    Bazel 6 or Bazel 7, which don't have `rctx.getenv` yet).
+
+    Limitations: Unlike the shell, we don't support `${VARNAME}` and
+    `${VARNAME:-default}` in the default expression for a different
+    environment variable expansion. We do support the braceless syntax
+    in the default, so an expression such as `${HOME:-/home/$USER}` is
+    valid.
+
+    Args:
+      template_string: String that may contain variables to be expanded.
+      varnames: List of variable names of variables to expand in
+        `template_string`.
+      getenv: Callable mapping variable names (in the first argument)
+        to their values, or returns the default (provided in the
+        second argument to `getenv`) if a value wasn't found.
+
+    Returns:
+      `template_string` with environment variables expanded according
+      to their values as determined by `getenv`.
+    """
+
+    if not varnames:
+        return template_string
+
+    for varname in varnames:
+        value = getenv(varname, "")
+        template_string = template_string.replace("$%s" % varname, value)
+        template_string = template_string.replace("${%s}" % varname, value)
+        segments = template_string.split("${%s:-" % varname)
+        template_string = segments.pop(0)
+        for segment in segments:
+            default_value, separator, rest = segment.partition("}")
+            if "{" in default_value:
+                fail("Environment substitution expression " +
+                     "\"${%s:-\" has an opening \"{\" " % varname +
+                     "in default value \"%s\"." % default_value)
+            if not separator:
+                fail("Environment substitution expression " +
+                     "\"${%s:-\" is missing the final \"}\"" % varname)
+            template_string += (value if value else default_value) + rest
+    return template_string
diff --git a/python/private/flags.bzl b/python/private/flags.bzl
new file mode 100644
index 0000000..fa31262
--- /dev/null
+++ b/python/private/flags.bzl
@@ -0,0 +1,112 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Values and helpers for flags.
+
+NOTE: The transitive loads of this should be kept minimal. This avoids loading
+unnecessary files when all that are needed are flag definitions.
+"""
+
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("//python/private:enum.bzl", "enum")
+
+def _bootstrap_impl_flag_get_value(ctx):
+    return ctx.attr._bootstrap_impl_flag[BuildSettingInfo].value
+
+# buildifier: disable=name-conventions
+BootstrapImplFlag = enum(
+    SYSTEM_PYTHON = "system_python",
+    SCRIPT = "script",
+    get_value = _bootstrap_impl_flag_get_value,
+)
+
+def _precompile_flag_get_effective_value(ctx):
+    value = ctx.attr._precompile_flag[BuildSettingInfo].value
+    if value == PrecompileFlag.AUTO:
+        value = PrecompileFlag.DISABLED
+    return value
+
+# Determines if the Python exec tools toolchain should be registered.
+# buildifier: disable=name-conventions
+ExecToolsToolchainFlag = enum(
+    # Enable registering the exec tools toolchain using the hermetic toolchain.
+    ENABLED = "enabled",
+    # Disable registering the exec tools toolchain using the hermetic toolchain.
+    DISABLED = "disabled",
+)
+
+# Determines if Python source files should be compiled at build time.
+#
+# NOTE: The flag value is overridden by the target-level attribute, except
+# for the case of `force_enabled` and `forced_disabled`.
+# buildifier: disable=name-conventions
+PrecompileFlag = enum(
+    # Automatically decide the effective value based on environment,
+    # target platform, etc.
+    AUTO = "auto",
+    # Compile Python source files at build time. Note that
+    # --precompile_add_to_runfiles affects how the compiled files are included
+    # into a downstream binary.
+    ENABLED = "enabled",
+    # Don't compile Python source files at build time.
+    DISABLED = "disabled",
+    # Compile Python source files, but only if they're a generated file.
+    IF_GENERATED_SOURCE = "if_generated_source",
+    # Like `enabled`, except overrides target-level setting. This is mostly
+    # useful for development, testing enabling precompilation more broadly, or
+    # as an escape hatch if build-time compiling is not available.
+    FORCE_ENABLED = "force_enabled",
+    # Like `disabled`, except overrides target-level setting. This is useful
+    # useful for development, testing enabling precompilation more broadly, or
+    # as an escape hatch if build-time compiling is not available.
+    FORCE_DISABLED = "force_disabled",
+    get_effective_value = _precompile_flag_get_effective_value,
+)
+
+# Determines if, when a source file is compiled, if the source file is kept
+# in the resulting output or not.
+# buildifier: disable=name-conventions
+PrecompileSourceRetentionFlag = enum(
+    # Include the original py source in the output.
+    KEEP_SOURCE = "keep_source",
+    # Don't include the original py source.
+    OMIT_SOURCE = "omit_source",
+    # Keep the original py source if it's a regular source file, but omit it
+    # if it's a generated file.
+    OMIT_IF_GENERATED_SOURCE = "omit_if_generated_source",
+)
+
+# Determines if a target adds its compiled files to its runfiles. When a target
+# compiles its files, but doesn't add them to its own runfiles, it relies on
+# a downstream target to retrieve them from `PyInfo.transitive_pyc_files`
+# buildifier: disable=name-conventions
+PrecompileAddToRunfilesFlag = enum(
+    # Always include the compiled files in the target's runfiles.
+    ALWAYS = "always",
+    # Don't include the compiled files in the target's runfiles; they are
+    # still added to `PyInfo.transitive_pyc_files`. See also:
+    # `py_binary.pyc_collection` attribute. This is useful for allowing
+    # incrementally enabling precompilation on a per-binary basis.
+    DECIDED_ELSEWHERE = "decided_elsewhere",
+)
+
+# Determine if `py_binary` collects transitive pyc files.
+# NOTE: This flag is only respect if `py_binary.pyc_collection` is `inherit`.
+# buildifier: disable=name-conventions
+PycCollectionFlag = enum(
+    # Include `PyInfo.transitive_pyc_files` as part of the binary.
+    INCLUDE_PYC = "include_pyc",
+    # Don't include `PyInfo.transitive_pyc_files` as part of the binary.
+    DISABLED = "disabled",
+)
diff --git a/python/private/full_version.bzl b/python/private/full_version.bzl
index 68c9694..98eeee5 100644
--- a/python/private/full_version.bzl
+++ b/python/private/full_version.bzl
@@ -40,4 +40,4 @@
             ),
         )
     else:
-        fail("Unknown version format: {}".format(version))
+        fail("Unknown version format: '{}'".format(version))
diff --git a/python/private/get_local_runtime_info.py b/python/private/get_local_runtime_info.py
new file mode 100644
index 0000000..0207f56
--- /dev/null
+++ b/python/private/get_local_runtime_info.py
@@ -0,0 +1,49 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import sys
+import sysconfig
+
+data = {
+    "major": sys.version_info.major,
+    "minor": sys.version_info.minor,
+    "micro": sys.version_info.micro,
+    "include": sysconfig.get_path("include"),
+    "implementation_name": sys.implementation.name,
+}
+
+config_vars = [
+    # The libpythonX.Y.so file. Usually?
+    # It might be a static archive (.a) file instead.
+    "LDLIBRARY",
+    # The directory with library files. Supposedly.
+    # It's not entirely clear how to get the directory with libraries.
+    # There's several types of libraries with different names and a plethora
+    # of settings.
+    # https://stackoverflow.com/questions/47423246/get-pythons-lib-path
+    # For now, it seems LIBDIR has what is needed, so just use that.
+    "LIBDIR",
+    # The versioned libpythonX.Y.so.N file. Usually?
+    # It might be a static archive (.a) file instead.
+    "INSTSONAME",
+    # The libpythonX.so file. Usually?
+    # It might be a static archive (a.) file instead.
+    "PY3LIBRARY",
+    # The platform-specific filename suffix for library files.
+    # Includes the dot, e.g. `.so`
+    "SHLIB_SUFFIX",
+]
+data.update(zip(config_vars, sysconfig.get_config_vars(*config_vars)))
+print(json.dumps(data))
diff --git a/python/private/internal_config_repo.bzl b/python/private/internal_config_repo.bzl
index cfc7616..c37bc35 100644
--- a/python/private/internal_config_repo.bzl
+++ b/python/private/internal_config_repo.bzl
@@ -18,10 +18,8 @@
 settings for rules to later use.
 """
 
-load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
-
 _ENABLE_PYSTAR_ENVVAR_NAME = "RULES_PYTHON_ENABLE_PYSTAR"
-_ENABLE_PYSTAR_DEFAULT = "0"
+_ENABLE_PYSTAR_DEFAULT = "1"
 
 _CONFIG_TEMPLATE = """\
 config = struct(
@@ -58,18 +56,20 @@
 """
 
 def _internal_config_repo_impl(rctx):
-    enable_pystar = _bool_from_environ(rctx, _ENABLE_PYSTAR_ENVVAR_NAME, _ENABLE_PYSTAR_DEFAULT)
+    pystar_requested = _bool_from_environ(rctx, _ENABLE_PYSTAR_ENVVAR_NAME, _ENABLE_PYSTAR_DEFAULT)
+
+    # Bazel 7+ (dev and later) has native.starlark_doc_extract, and thus the
+    # py_internal global, which are necessary for the pystar implementation.
+    if pystar_requested and hasattr(native, "starlark_doc_extract"):
+        enable_pystar = pystar_requested
+    else:
+        enable_pystar = False
+
     rctx.file("rules_python_config.bzl", _CONFIG_TEMPLATE.format(
         enable_pystar = enable_pystar,
     ))
 
-    if enable_pystar or (
-        # Bazel 7+ (dev and later) has native.starlark_doc_extract, and thus the py_internal global
-        hasattr(native, "starlark_doc_extract") and
-        # The logic to allow the symbol doesn't work properly under bzlmod,
-        # even if the symbol is otherwise functional.
-        not BZLMOD_ENABLED
-    ):
+    if enable_pystar:
         shim_content = _PY_INTERNAL_SHIM
         py_internal_dep = '"@rules_python//tools/build_defs/python/private:py_internal_renamed_bzl"'
     else:
@@ -92,6 +92,7 @@
 
 internal_config_repo = repository_rule(
     implementation = _internal_config_repo_impl,
+    configure = True,
     environ = [_ENABLE_PYSTAR_ENVVAR_NAME],
 )
 
diff --git a/python/private/internal_deps.bzl b/python/private/internal_deps.bzl
new file mode 100644
index 0000000..6ea3fa4
--- /dev/null
+++ b/python/private/internal_deps.bzl
@@ -0,0 +1,22 @@
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"Python toolchain module extension for internal rule use"
+
+load("@bazel_skylib//lib:modules.bzl", "modules")
+load("//python/private/pypi:deps.bzl", "pypi_deps")
+load(":internal_config_repo.bzl", "internal_config_repo")
+
+def _internal_deps():
+    internal_config_repo(name = "rules_python_internal")
+    pypi_deps()
+
+internal_deps = modules.as_extension(
+    _internal_deps,
+    doc = "This extension registers internal rules_python dependencies.",
+)
diff --git a/python/private/local_runtime_repo.bzl b/python/private/local_runtime_repo.bzl
new file mode 100644
index 0000000..4e7edde
--- /dev/null
+++ b/python/private/local_runtime_repo.bzl
@@ -0,0 +1,237 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create a repository for a locally installed Python runtime."""
+
+load("//python/private:enum.bzl", "enum")
+load(":repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "repo_utils")
+
+# buildifier: disable=name-conventions
+_OnFailure = enum(
+    SKIP = "skip",
+    WARN = "warn",
+    FAIL = "fail",
+)
+
+_TOOLCHAIN_IMPL_TEMPLATE = """\
+# Generated by python/private/local_runtime_repo.bzl
+
+load("@rules_python//python/private:local_runtime_repo_setup.bzl", "define_local_runtime_toolchain_impl")
+
+define_local_runtime_toolchain_impl(
+    name = "local_runtime",
+    lib_ext = "{lib_ext}",
+    major = "{major}",
+    minor = "{minor}",
+    micro = "{micro}",
+    interpreter_path = "{interpreter_path}",
+    implementation_name = "{implementation_name}",
+    os = "{os}",
+)
+"""
+
+def _local_runtime_repo_impl(rctx):
+    logger = repo_utils.logger(rctx)
+    on_failure = rctx.attr.on_failure
+
+    result = _resolve_interpreter_path(rctx)
+    if not result.resolved_path:
+        if on_failure == "fail":
+            fail("interpreter not found: {}".format(result.describe_failure()))
+
+        if on_failure == "warn":
+            logger.warn(lambda: "interpreter not found: {}".format(result.describe_failure()))
+
+        # else, on_failure must be skip
+        rctx.file("BUILD.bazel", _expand_incompatible_template())
+        return
+    else:
+        interpreter_path = result.resolved_path
+
+    logger.info(lambda: "resolved interpreter {} to {}".format(rctx.attr.interpreter_path, interpreter_path))
+
+    exec_result = repo_utils.execute_unchecked(
+        rctx,
+        op = "local_runtime_repo.GetPythonInfo({})".format(rctx.name),
+        arguments = [
+            interpreter_path,
+            rctx.path(rctx.attr._get_local_runtime_info),
+        ],
+        quiet = True,
+        logger = logger,
+    )
+    if exec_result.return_code != 0:
+        if on_failure == "fail":
+            fail("GetPythonInfo failed: {}".format(exec_result.describe_failure()))
+        if on_failure == "warn":
+            logger.warn(lambda: "GetPythonInfo failed: {}".format(exec_result.describe_failure()))
+
+        # else, on_failure must be skip
+        rctx.file("BUILD.bazel", _expand_incompatible_template())
+        return
+
+    info = json.decode(exec_result.stdout)
+    logger.info(lambda: _format_get_info_result(info))
+
+    # NOTE: Keep in sync with recursive glob in define_local_runtime_toolchain_impl
+    repo_utils.watch_tree(rctx, rctx.path(info["include"]))
+
+    # The cc_library.includes values have to be non-absolute paths, otherwise
+    # the toolchain will give an error. Work around this error by making them
+    # appear as part of this repo.
+    rctx.symlink(info["include"], "include")
+
+    shared_lib_names = [
+        info["PY3LIBRARY"],
+        info["LDLIBRARY"],
+        info["INSTSONAME"],
+    ]
+
+    # In some cases, the value may be empty. Not clear why.
+    shared_lib_names = [v for v in shared_lib_names if v]
+
+    # In some cases, the same value is returned for multiple keys. Not clear why.
+    shared_lib_names = {v: None for v in shared_lib_names}.keys()
+    shared_lib_dir = info["LIBDIR"]
+
+    # The specific files are symlinked instead of the whole directory
+    # because it can point to a directory that has more than just
+    # the Python runtime shared libraries, e.g. /usr/lib, or a Python
+    # specific directory with pip-installed shared libraries.
+    rctx.report_progress("Symlinking external Python shared libraries")
+    for name in shared_lib_names:
+        origin = rctx.path("{}/{}".format(shared_lib_dir, name))
+
+        # The reported names don't always exist; it depends on the particulars
+        # of the runtime installation.
+        if origin.exists:
+            repo_utils.watch(rctx, origin)
+            rctx.symlink(origin, "lib/" + name)
+
+    rctx.file("WORKSPACE", "")
+    rctx.file("MODULE.bazel", "")
+    rctx.file("REPO.bazel", "")
+    rctx.file("BUILD.bazel", _TOOLCHAIN_IMPL_TEMPLATE.format(
+        major = info["major"],
+        minor = info["minor"],
+        micro = info["micro"],
+        interpreter_path = interpreter_path,
+        lib_ext = info["SHLIB_SUFFIX"],
+        implementation_name = info["implementation_name"],
+        os = "@platforms//os:{}".format(repo_utils.get_platforms_os_name(rctx)),
+    ))
+
+local_runtime_repo = repository_rule(
+    implementation = _local_runtime_repo_impl,
+    doc = """
+Use a locally installed Python runtime as a toolchain implementation.
+
+Note this uses the runtime as a *platform runtime*. A platform runtime means
+means targets don't include the runtime itself as part of their runfiles or
+inputs. Instead, users must assure that where the targets run have the runtime
+pre-installed or otherwise available.
+
+This results in lighter weight binaries (in particular, Bazel doesn't have to
+create thousands of files for every `py_test`), at the risk of having to rely on
+a system having the necessary Python installed.
+""",
+    attrs = {
+        "interpreter_path": attr.string(
+            doc = """
+An absolute path or program name on the `PATH` env var.
+
+Values with slashes are assumed to be the path to a program. Otherwise, it is
+treated as something to search for on `PATH`
+
+Note that, when a plain program name is used, the path to the interpreter is
+resolved at repository evalution time, not runtime of any resulting binaries.
+""",
+            default = "python3",
+        ),
+        "on_failure": attr.string(
+            default = _OnFailure.SKIP,
+            values = sorted(_OnFailure.__members__.values()),
+            doc = """
+How to handle errors when trying to automatically determine settings.
+
+* `skip` will silently skip creating a runtime. Instead, a non-functional
+  runtime will be generated and marked as incompatible so it cannot be used.
+  This is best if a local runtime is known not to work or be available
+  in certain cases and that's OK. e.g., one use windows paths when there
+  are people running on linux.
+* `warn` will print a warning message. This is useful when you expect
+  a runtime to be available, but are OK with it missing and falling back
+  to some other runtime.
+* `fail` will result in a failure. This is only recommended if you must
+  ensure the runtime is available.
+""",
+        ),
+        "_get_local_runtime_info": attr.label(
+            allow_single_file = True,
+            default = "//python/private:get_local_runtime_info.py",
+        ),
+        "_rule_name": attr.string(default = "local_runtime_repo"),
+    },
+    environ = ["PATH", REPO_DEBUG_ENV_VAR],
+)
+
+def _expand_incompatible_template():
+    return _TOOLCHAIN_IMPL_TEMPLATE.format(
+        interpreter_path = "/incompatible",
+        implementation_name = "incompatible",
+        lib_ext = "incompatible",
+        major = "0",
+        minor = "0",
+        micro = "0",
+        os = "@platforms//:incompatible",
+    )
+
+def _resolve_interpreter_path(rctx):
+    """Find the absolute path for an interpreter.
+
+    Args:
+        rctx: A repository_ctx object
+
+    Returns:
+        `struct` with the following fields:
+        * `resolved_path`: `path` object of a path that exists
+        * `describe_failure`: `Callable | None`. If a path that doesn't exist,
+          returns a description of why it couldn't be resolved
+        A path object or None. The path may not exist.
+    """
+    if "/" not in rctx.attr.interpreter_path and "\\" not in rctx.attr.interpreter_path:
+        # Provide a bit nicer integration with pyenv: recalculate the runtime if the
+        # user changes the python version using e.g. `pyenv shell`
+        repo_utils.getenv(rctx, "PYENV_VERSION")
+        result = repo_utils.which_unchecked(rctx, rctx.attr.interpreter_path)
+        resolved_path = result.binary
+        describe_failure = result.describe_failure
+    else:
+        repo_utils.watch(rctx, rctx.attr.interpreter_path)
+        resolved_path = rctx.path(rctx.attr.interpreter_path)
+        if not resolved_path.exists:
+            describe_failure = lambda: "Path not found: {}".format(repr(rctx.attr.interpreter_path))
+        else:
+            describe_failure = None
+
+    return struct(
+        resolved_path = resolved_path,
+        describe_failure = describe_failure,
+    )
+
+def _format_get_info_result(info):
+    lines = ["GetPythonInfo result:"]
+    for key, value in sorted(info.items()):
+        lines.append("  {}: {}".format(key, value if value != "" else "<empty string>"))
+    return "\n".join(lines)
diff --git a/python/private/local_runtime_repo_setup.bzl b/python/private/local_runtime_repo_setup.bzl
new file mode 100644
index 0000000..23fa99d
--- /dev/null
+++ b/python/private/local_runtime_repo_setup.bzl
@@ -0,0 +1,141 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Setup code called by the code generated by `local_runtime_repo`."""
+
+load("@bazel_skylib//lib:selects.bzl", "selects")
+load("@rules_cc//cc:defs.bzl", "cc_library")
+load("@rules_python//python:py_runtime.bzl", "py_runtime")
+load("@rules_python//python:py_runtime_pair.bzl", "py_runtime_pair")
+load("@rules_python//python/cc:py_cc_toolchain.bzl", "py_cc_toolchain")
+load("@rules_python//python/private:py_exec_tools_toolchain.bzl", "py_exec_tools_toolchain")
+
+_PYTHON_VERSION_FLAG = Label("@rules_python//python/config_settings:python_version")
+
+def define_local_runtime_toolchain_impl(
+        name,
+        lib_ext,
+        major,
+        minor,
+        micro,
+        interpreter_path,
+        implementation_name,
+        os):
+    """Defines a toolchain implementation for a local Python runtime.
+
+    Generates public targets:
+    * `python_runtimes`: The target toolchain type implementation
+    * `py_exec_tools_toolchain`: The exec tools toolchain type implementation
+    * `py_cc_toolchain`: The py cc toolchain type implementation
+    * `os`: A constraint (or alias to one) for the `target_compatible_with` this
+      toolchain is compatible with.
+    * `is_matching_python_version`: A `config_setting` for `target_settings`
+      this toolchain is compatible with.
+
+    Args:
+        name: `str` Only present to satisfy tooling
+        lib_ext: `str` The file extension for the `libpython` shared libraries
+        major: `str` The major Python version, e.g. `3` of `3.9.1`.
+        minor: `str` The minor Python version, e.g. `9` of `3.9.1`.
+        micro: `str` The micro Python version, e.g. "1" of `3.9.1`.
+        interpreter_path: `str` Absolute path to the interpreter.
+        implementation_name: `str` The implementation name, as returned by
+            `sys.implementation.name`.
+        os: `str` A label to the OS constraint (e.g. `@platforms//os:linux`) for
+            this runtime.
+    """
+    major_minor = "{}.{}".format(major, minor)
+    major_minor_micro = "{}.{}".format(major_minor, micro)
+
+    cc_library(
+        name = "_python_headers",
+        # NOTE: Keep in sync with watch_tree() called in local_runtime_repo
+        srcs = native.glob(["include/**/*.h"]),
+        includes = ["include"],
+    )
+
+    cc_library(
+        name = "_libpython",
+        # Don't use a recursive glob because the lib/ directory usually contains
+        # a subdirectory of the stdlib -- lots of unrelated files
+        srcs = native.glob([
+            "lib/*{}".format(lib_ext),  # Match libpython*.so
+            "lib/*{}*".format(lib_ext),  # Also match libpython*.so.1.0
+        ]),
+        hdrs = [":_python_headers"],
+    )
+
+    py_runtime(
+        name = "_py3_runtime",
+        interpreter_path = interpreter_path,
+        python_version = "PY3",
+        interpreter_version_info = {
+            "major": major,
+            "micro": micro,
+            "minor": minor,
+        },
+        implementation_name = implementation_name,
+    )
+
+    py_runtime_pair(
+        name = "python_runtimes",
+        py2_runtime = None,
+        py3_runtime = ":_py3_runtime",
+        visibility = ["//visibility:public"],
+    )
+
+    py_exec_tools_toolchain(
+        name = "py_exec_tools_toolchain",
+        visibility = ["//visibility:public"],
+        precompiler = "@rules_python//tools/precompiler:precompiler",
+    )
+
+    py_cc_toolchain(
+        name = "py_cc_toolchain",
+        headers = ":_python_headers",
+        libs = ":_libpython",
+        python_version = major_minor_micro,
+        visibility = ["//visibility:public"],
+    )
+
+    native.alias(
+        name = "os",
+        # Call Label() to force the string to evaluate in the context of
+        # rules_python, not the calling BUILD-file code. This is because
+        # the value is an `@platforms//foo` string, which @rules_python has
+        # visibility to, but the calling repo may not.
+        actual = Label(os),
+        visibility = ["//visibility:public"],
+    )
+
+    native.config_setting(
+        name = "_is_major_minor",
+        flag_values = {
+            _PYTHON_VERSION_FLAG: major_minor,
+        },
+    )
+    native.config_setting(
+        name = "_is_major_minor_micro",
+        flag_values = {
+            _PYTHON_VERSION_FLAG: major_minor_micro,
+        },
+    )
+    selects.config_setting_group(
+        name = "is_matching_python_version",
+        match_any = [
+            ":_is_major_minor",
+            ":_is_major_minor_micro",
+        ],
+        visibility = ["//visibility:public"],
+    )
diff --git a/python/private/local_runtime_toolchains_repo.bzl b/python/private/local_runtime_toolchains_repo.bzl
new file mode 100644
index 0000000..880fbfe
--- /dev/null
+++ b/python/private/local_runtime_toolchains_repo.bzl
@@ -0,0 +1,93 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create a repository to hold a local Python toolchain definitions."""
+
+load("//python/private:text_util.bzl", "render")
+load(":repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "repo_utils")
+
+_TOOLCHAIN_TEMPLATE = """
+# Generated by local_runtime_toolchains_repo.bzl
+
+load("@rules_python//python/private:py_toolchain_suite.bzl", "define_local_toolchain_suites")
+
+define_local_toolchain_suites(
+    name = "toolchains",
+    version_aware_repo_names = {version_aware_names},
+    version_unaware_repo_names = {version_unaware_names},
+)
+"""
+
+def _local_runtime_toolchains_repo(rctx):
+    logger = repo_utils.logger(rctx)
+    rctx.file("WORKSPACE", "")
+    rctx.file("MODULE.bazel", "")
+    rctx.file("REPO.bazel", "")
+
+    logger.info(lambda: _format_toolchains_for_logging(rctx))
+
+    rctx.file("BUILD.bazel", _TOOLCHAIN_TEMPLATE.format(
+        version_aware_names = render.list(rctx.attr.runtimes),
+        version_unaware_names = render.list(rctx.attr.default_runtimes or rctx.attr.runtimes),
+    ))
+
+local_runtime_toolchains_repo = repository_rule(
+    implementation = _local_runtime_toolchains_repo,
+    doc = """
+Create a repo of toolchains definitions for local runtimes.
+
+This is intended to be used on the toolchain implemenations generated by
+`local_runtime_repo`.
+
+NOTE: This does not call `native.register_toolchains` -- the caller is
+responsible for registering the toolchains this defines.
+""",
+    attrs = {
+        "default_runtimes": attr.string_list(
+            doc = """
+The repo names of `local_runtime_repo` repos to define as toolchains.
+
+These will be defined as *version-unaware* toolchains. This means they will
+match any Python version. As such, they are registered after the version-aware
+toolchains defined by the `runtimes` attribute.
+
+Note that order matters: it determines the toolchain priority within the
+package.
+""",
+        ),
+        "runtimes": attr.string_list(
+            doc = """
+The repo names of `local_runtime_repo` repos to define as toolchains.
+
+These will be defined as *version-aware* toolchains. This means they require the
+`--//python/config_settings:python_version` to be set in order to match. These
+are registered before `default_runtimes`.
+
+Note that order matters: it determines the toolchain priority within the
+package.
+""",
+        ),
+        "_rule_name": attr.string(default = "local_toolchains_repo"),
+    },
+    environ = [REPO_DEBUG_ENV_VAR],
+)
+
+def _format_toolchains_for_logging(rctx):
+    lines = ["Local toolchain priority order:"]
+    i = 0
+    for i, name in enumerate(rctx.attr.runtimes, start = i):
+        lines.append("  {}: {} (version aware)".format(i, name))
+    for i, name in enumerate(rctx.attr.default_runtimes, start = i):
+        lines.append("  {}: {} (version unaware)".format(i, name))
+    return "\n".join(lines)
diff --git a/python/private/normalize_name.bzl b/python/private/normalize_name.bzl
index aaeca80..7898222 100644
--- a/python/private/normalize_name.bzl
+++ b/python/private/normalize_name.bzl
@@ -38,7 +38,6 @@
 https://packaging.python.org/en/latest/specifications/name-normalization/
 """
 
-# Keep in sync with ../pip_install/tools/lib/bazel.py
 def normalize_name(name):
     """normalize a PyPI package name and return a valid bazel label.
 
diff --git a/python/private/proto/py_proto_library.bzl b/python/private/proto/py_proto_library.bzl
index 91faa2d..e123ff8 100644
--- a/python/private/proto/py_proto_library.bzl
+++ b/python/private/proto/py_proto_library.bzl
@@ -17,7 +17,7 @@
 load("@rules_proto//proto:defs.bzl", "ProtoInfo", "proto_common")
 load("//python:defs.bzl", "PyInfo")
 
-ProtoLangToolchainInfo = proto_common.ProtoLangToolchainInfo
+PY_PROTO_TOOLCHAIN = "@rules_python//python/proto:toolchain_type"
 
 _PyProtoInfo = provider(
     doc = "Encapsulates information needed by the Python proto rules.",
@@ -35,6 +35,9 @@
 def _filter_provider(provider, *attrs):
     return [dep[provider] for attr in attrs for dep in attr if provider in dep]
 
+def _incompatible_toolchains_enabled():
+    return getattr(proto_common, "INCOMPATIBLE_ENABLE_PROTO_TOOLCHAIN_RESOLUTION", False)
+
 def _py_proto_aspect_impl(target, ctx):
     """Generates and compiles Python code for a proto_library.
 
@@ -51,7 +54,6 @@
       ([_PyProtoInfo]) Providers collecting transitive information about
       generated files.
     """
-
     _proto_library = ctx.rule.attr
 
     # Check Proto file names
@@ -61,7 +63,14 @@
                 proto.path,
             ))
 
-    proto_lang_toolchain_info = ctx.attr._aspect_proto_toolchain[ProtoLangToolchainInfo]
+    if _incompatible_toolchains_enabled():
+        toolchain = ctx.toolchains[PY_PROTO_TOOLCHAIN]
+        if not toolchain:
+            fail("No toolchains registered for '%s'." % PY_PROTO_TOOLCHAIN)
+        proto_lang_toolchain_info = toolchain.proto
+    else:
+        proto_lang_toolchain_info = getattr(ctx.attr, "_aspect_proto_toolchain")[proto_common.ProtoLangToolchainInfo]
+
     api_deps = [proto_lang_toolchain_info.runtime]
 
     generated_sources = []
@@ -123,7 +132,7 @@
 
 _py_proto_aspect = aspect(
     implementation = _py_proto_aspect_impl,
-    attrs = {
+    attrs = {} if _incompatible_toolchains_enabled() else {
         "_aspect_proto_toolchain": attr.label(
             default = ":python_toolchain",
         ),
@@ -131,6 +140,7 @@
     attr_aspects = ["deps"],
     required_providers = [ProtoInfo],
     provides = [_PyProtoInfo],
+    toolchains = [PY_PROTO_TOOLCHAIN] if _incompatible_toolchains_enabled() else [],
 )
 
 def _py_proto_library_rule(ctx):
diff --git a/python/private/py_cc_toolchain_info.bzl b/python/private/py_cc_toolchain_info.bzl
index a2e62a8..ae46bf4 100644
--- a/python/private/py_cc_toolchain_info.bzl
+++ b/python/private/py_cc_toolchain_info.bzl
@@ -18,7 +18,9 @@
     doc = "C/C++ information about the Python runtime.",
     fields = {
         "headers": """\
-(struct) Information about the header files, with fields:
+:type: struct
+
+Information about the header files, struct with fields:
   * providers_map: a dict of string to provider instances. The key should be
     a fully qualified name (e.g. `@rules_foo//bar:baz.bzl#MyInfo`) of the
     provider to uniquely identify its type.
@@ -38,6 +40,33 @@
     e.g. `:current_py_cc_headers` to act as the underlying headers target it
     represents).
 """,
-        "python_version": "(str) The Python Major.Minor version.",
+        "libs": """\
+:type: struct
+
+Information about C libraries, struct with fields:
+  * providers_map: A dict of string to provider instances. The key should be
+    a fully qualified name (e.g. `@rules_foo//bar:baz.bzl#MyInfo`) of the
+    provider to uniquely identify its type.
+
+    The following keys are always present:
+      * CcInfo: the CcInfo provider instance for the libraries.
+      * DefaultInfo: the DefaultInfo provider instance for the headers.
+
+    A map is used to allow additional providers from the originating libraries
+    target (typically a `cc_library`) to be propagated to consumers (directly
+    exposing a Target object can cause memory issues and is an anti-pattern).
+
+    When consuming this map, it's suggested to use `providers_map.values()` to
+    return all providers; or copy the map and filter out or replace keys as
+    appropriate. Note that any keys beginning with `_` (underscore) are
+    considered private and should be forward along as-is (this better allows
+    e.g. `:current_py_cc_headers` to act as the underlying headers target it
+    represents).
+""",
+        "python_version": """
+:type: str
+
+The Python Major.Minor version.
+""",
     },
 )
diff --git a/python/private/py_cc_toolchain_rule.bzl b/python/private/py_cc_toolchain_rule.bzl
index c80f845..1599415 100644
--- a/python/private/py_cc_toolchain_rule.bzl
+++ b/python/private/py_cc_toolchain_rule.bzl
@@ -18,6 +18,8 @@
 https://github.com/bazelbuild/rules_python/issues/824 is considered done.
 """
 
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("@rules_cc//cc:defs.bzl", "CcInfo")
 load(":py_cc_toolchain_info.bzl", "PyCcToolchainInfo")
 
 def _py_cc_toolchain_impl(ctx):
@@ -28,10 +30,20 @@
                 "DefaultInfo": ctx.attr.headers[DefaultInfo],
             },
         ),
+        libs = struct(
+            providers_map = {
+                "CcInfo": ctx.attr.libs[CcInfo],
+                "DefaultInfo": ctx.attr.libs[DefaultInfo],
+            },
+        ),
         python_version = ctx.attr.python_version,
     )
+    extra_kwargs = {}
+    if ctx.attr._visible_for_testing[BuildSettingInfo].value:
+        extra_kwargs["toolchain_label"] = ctx.label
     return [platform_common.ToolchainInfo(
         py_cc_toolchain = py_cc_toolchain,
+        **extra_kwargs
     )]
 
 py_cc_toolchain = rule(
@@ -43,10 +55,19 @@
             providers = [CcInfo],
             mandatory = True,
         ),
+        "libs": attr.label(
+            doc = ("Target that provides the Python runtime libraries for linking. " +
+                   "Typically this is a cc_library target of `.so` files."),
+            providers = [CcInfo],
+            mandatory = True,
+        ),
         "python_version": attr.string(
             doc = "The Major.minor Python version, e.g. 3.11",
             mandatory = True,
         ),
+        "_visible_for_testing": attr.label(
+            default = "//python/private:visible_for_testing",
+        ),
     },
     doc = """\
 A toolchain for a Python runtime's C/C++ information (e.g. headers)
diff --git a/python/private/py_console_script_binary.bzl b/python/private/py_console_script_binary.bzl
index deeded2..7347ebe 100644
--- a/python/private/py_console_script_binary.bzl
+++ b/python/private/py_console_script_binary.bzl
@@ -27,16 +27,23 @@
     rules_python does not know anything about the hub repos that the user has
     available.
 
-    NOTE: Works with `incompatible_generate_aliases` and without by assuming the
-    following formats:
-        * @pypi_pylint//:pkg
+    NOTE: Works with assuming the following label formats:
         * @pypi//pylint
         * @pypi//pylint:pkg
         * Label("@pypi//pylint:pkg")
+        * Label("@pypi//pylint")
     """
 
-    # str() is called to convert Label objects
-    return str(pkg).replace(":pkg", "") + ":dist_info"
+    if type(pkg) == type(""):
+        label = native.package_relative_label(pkg)
+    else:
+        label = pkg
+
+    if hasattr(label, "same_package_label"):
+        return label.same_package_label("dist_info")
+    else:
+        # NOTE @aignas 2024-03-25: this is deprecated but kept for compatibility
+        return label.relative("dist_info")
 
 def py_console_script_binary(
         *,
@@ -70,7 +77,6 @@
 
     py_console_script_gen(
         name = "_{}_gen".format(name),
-        # NOTE @aignas 2023-08-05: Works with `incompatible_generate_aliases` and without.
         entry_points_txt = entry_points_txt or _dist_info(pkg),
         out = main,
         console_script = script,
diff --git a/python/private/py_exec_tools_info.bzl b/python/private/py_exec_tools_info.bzl
new file mode 100644
index 0000000..2998543
--- /dev/null
+++ b/python/private/py_exec_tools_info.bzl
@@ -0,0 +1,71 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Implementation of the exec tools toolchain provider."""
+
+PyExecToolsInfo = provider(
+    doc = "Build tools used as part of building Python programs.",
+    fields = {
+        "exec_interpreter": """
+Optional Target; an interpreter valid for running in the exec configuration.
+When running it in an action, use `DefaultInfo.files_to_run` to ensure all its
+files are appropriately available. An exec interpreter may not be available,
+e.g. if all the exec tools are prebuilt binaries.
+
+NOTE: this interpreter is really only for use when a build tool cannot use
+the Python toolchain itself. When possible, prefeer to define a `py_binary`
+instead and use it via a `cfg=exec` attribute; this makes it much easier
+to setup the runtime environment for the binary. See also:
+`py_interpreter_program` rule.
+
+NOTE: What interpreter is used depends on the toolchain constraints. Ensure
+the proper target constraints are being applied when obtaining this from
+the toolchain.
+""",
+        "precompiler": """
+Optional Target. The tool to use for generating pyc files. If not available,
+precompiling will not be available.
+
+Must provide one of the following:
+  * PyInterpreterProgramInfo
+  * DefaultInfo.files_to_run
+
+This target provides either the `PyInterpreterProgramInfo` provider or is a
+regular executable binary (provides DefaultInfo.files_to_run). When the
+`PyInterpreterProgramInfo` provider is present, it means the precompiler program
+doesn't know how to find the interpreter itself, so the caller must provide it
+when constructing the action invocation for running the precompiler program
+(typically `exec_interpreter`). See the `PyInterpreterProgramInfo` provider docs
+for details on how to construct an invocation.
+
+If `testing.ExecutionInfo` is provided, it will be used to set execution
+requirements. This can be used to control persistent worker settings.
+
+The precompiler command line API is:
+* `--invalidation_mode`: The type of pyc invalidation mode to use. Should be
+  one of `unchecked_hash` or `checked_hash`.
+* `--optimize`: The optimization level as an integer.
+* `--python_version`: The Python version, in `Major.Minor` format, e.g. `3.12`
+
+The following args are repeated and form a list of 3-tuples of their values. At
+least one 3-tuple will be passed.
+* `--src`: Path to the source `.py` file to precompile.
+* `--src_name`: The human-friendly file name to record in the pyc output.
+* `--pyc`: Path to where pyc output should be written.
+
+NOTE: These arguments _may_ be stored in a file instead, in which case, the
+path to that file will be a positional arg starting with `@`, e.g. `@foo/bar`.
+The format of the file is one arg per line.
+""",
+    },
+)
diff --git a/python/private/py_exec_tools_toolchain.bzl b/python/private/py_exec_tools_toolchain.bzl
new file mode 100644
index 0000000..a4516d8
--- /dev/null
+++ b/python/private/py_exec_tools_toolchain.bzl
@@ -0,0 +1,79 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Rule that defines a toolchain for build tools."""
+
+load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+load("//python/private:toolchain_types.bzl", "TARGET_TOOLCHAIN_TYPE")
+load(":py_exec_tools_info.bzl", "PyExecToolsInfo")
+
+def _py_exec_tools_toolchain_impl(ctx):
+    extra_kwargs = {}
+    if ctx.attr._visible_for_testing[BuildSettingInfo].value:
+        extra_kwargs["toolchain_label"] = ctx.label
+
+    return [platform_common.ToolchainInfo(
+        exec_tools = PyExecToolsInfo(
+            exec_interpreter = ctx.attr.exec_interpreter,
+            precompiler = ctx.attr.precompiler,
+        ),
+        **extra_kwargs
+    )]
+
+py_exec_tools_toolchain = rule(
+    implementation = _py_exec_tools_toolchain_impl,
+    attrs = {
+        "exec_interpreter": attr.label(
+            default = "//python/private:current_interpreter_executable",
+            cfg = "exec",
+            doc = "See PyexecToolsInfo.exec_interpreter.",
+        ),
+        "precompiler": attr.label(
+            allow_files = True,
+            cfg = "exec",
+            doc = "See PyExecToolsInfo.precompiler",
+        ),
+        "_visible_for_testing": attr.label(
+            default = "//python/private:visible_for_testing",
+        ),
+    },
+)
+
+def _current_interpreter_executable_impl(ctx):
+    toolchain = ctx.toolchains[TARGET_TOOLCHAIN_TYPE]
+    runtime = toolchain.py3_runtime
+
+    # NOTE: We name the output filename after the underlying file name
+    # because of things like pyenv: they use $0 to determine what to
+    # re-exec. If it's not a recognized name, then they fail.
+    if runtime.interpreter:
+        executable = ctx.actions.declare_file(runtime.interpreter.basename)
+        ctx.actions.symlink(output = executable, target_file = runtime.interpreter, is_executable = True)
+    else:
+        executable = ctx.actions.declare_symlink(paths.basename(runtime.interpreter_path))
+        ctx.actions.symlink(output = executable, target_path = runtime.interpreter_path)
+    return [
+        toolchain,
+        DefaultInfo(
+            executable = executable,
+            runfiles = ctx.runfiles([executable], transitive_files = runtime.files),
+        ),
+    ]
+
+current_interpreter_executable = rule(
+    implementation = _current_interpreter_executable_impl,
+    toolchains = [TARGET_TOOLCHAIN_TYPE],
+    executable = True,
+)
diff --git a/python/private/py_interpreter_program.bzl b/python/private/py_interpreter_program.bzl
new file mode 100644
index 0000000..cd62a71
--- /dev/null
+++ b/python/private/py_interpreter_program.bzl
@@ -0,0 +1,103 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Internal only bootstrap level binary-like rule."""
+
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
+
+PyInterpreterProgramInfo = provider(
+    doc = "Information about how to run a program with an external interpreter.",
+    fields = {
+        "env": "dict[str, str] of environment variables to set prior to execution.",
+        "interpreter_args": "List of strings; additional args to pass " +
+                            "to the interpreter before the main program.",
+        "main": "File; the .py file that is the entry point.",
+    },
+)
+
+def _py_interpreter_program_impl(ctx):
+    # Bazel requires the executable file to be an output created by this target.
+    executable = ctx.actions.declare_file(ctx.label.name)
+    ctx.actions.symlink(output = executable, target_file = ctx.file.main)
+    execution_requirements = {}
+    execution_requirements.update([
+        value.split("=", 1)
+        for value in ctx.attr.execution_requirements[BuildSettingInfo].value
+        if value.strip()
+    ])
+
+    return [
+        DefaultInfo(
+            executable = executable,
+            files = depset([executable]),
+            runfiles = ctx.runfiles(files = [
+                executable,
+            ]),
+        ),
+        PyInterpreterProgramInfo(
+            env = ctx.attr.env,
+            interpreter_args = ctx.attr.interpreter_args,
+            main = ctx.file.main,
+        ),
+        testing.ExecutionInfo(
+            requirements = execution_requirements,
+        ),
+    ]
+
+py_interpreter_program = rule(
+    doc = """
+Binary-like rule that doesn't require a toolchain because its part of
+implementing build tools for the toolchain. This rule expects the Python
+interprter to be externally provided.
+
+To run a `py_interpreter_program` as an action, pass it as a tool that is
+used by the actual interpreter executable. This ensures its runfiles are
+setup. Also pass along any interpreter args, environment, and requirements.
+
+```starlark
+ctx.actions.run(
+    executable = <python interpreter executable>,
+    args = (
+        target[PyInterpreterProgramInfo].interpreter_args +
+        [target[DefaultInfo].files_to_run.executable]
+    ),
+    tools = target[DefaultInfo].files_to_run,
+    env = target[PyInterpreterProgramInfo].env,
+    execution_requirements = target[testing.ExecutionInfo].requirements,
+)
+```
+
+""",
+    implementation = _py_interpreter_program_impl,
+    attrs = {
+        "env": attr.string_dict(
+            doc = "Environment variables that should set prior to running.",
+        ),
+        "execution_requirements": attr.label(
+            doc = "Execution requirements to set when running it as an action",
+            providers = [BuildSettingInfo],
+        ),
+        "interpreter_args": attr.string_list(
+            doc = "Args that should be passed to the interpreter.",
+        ),
+        "main": attr.label(
+            doc = "The entry point Python file.",
+            allow_single_file = True,
+        ),
+    },
+    # This is set to False because this isn't a binary/executable in the usual
+    # Bazel sense (even though it sets DefaultInfo.files_to_run). It just holds
+    # information so that a caller can construct how to execute it correctly.
+    executable = False,
+)
diff --git a/python/private/py_runtime_pair_rule.bzl b/python/private/py_runtime_pair_rule.bzl
index d17b008..eb91413 100644
--- a/python/private/py_runtime_pair_rule.bzl
+++ b/python/private/py_runtime_pair_rule.bzl
@@ -14,6 +14,7 @@
 
 """Implementation of py_runtime_pair."""
 
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
 load("//python:py_runtime_info.bzl", "PyRuntimeInfo")
 load("//python/private:reexports.bzl", "BuiltinPyRuntimeInfo")
 load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")
@@ -40,9 +41,14 @@
     #     fail("Using Python 2 is not supported and disabled; see " +
     #          "https://github.com/bazelbuild/bazel/issues/15684")
 
+    extra_kwargs = {}
+    if ctx.attr._visible_for_testing[BuildSettingInfo].value:
+        extra_kwargs["toolchain_label"] = ctx.label
+
     return [platform_common.ToolchainInfo(
         py2_runtime = py2_runtime,
         py3_runtime = py3_runtime,
+        **extra_kwargs
     )]
 
 def _get_py_runtime_info(target):
@@ -85,6 +91,9 @@
 `PY3`.
 """,
         ),
+        "_visible_for_testing": attr.label(
+            default = "//python/private:visible_for_testing",
+        ),
     },
     fragments = ["py"],
     doc = """\
@@ -98,15 +107,12 @@
 Usually the wrapped runtimes are declared using the `py_runtime` rule, but any
 rule returning a `PyRuntimeInfo` provider may be used.
 
-This rule returns a `platform_common.ToolchainInfo` provider with the following
-schema:
+This rule returns a {obj}`ToolchainInfo` provider with fields:
 
-```python
-platform_common.ToolchainInfo(
-    py2_runtime = <PyRuntimeInfo or None>,
-    py3_runtime = <PyRuntimeInfo or None>,
-)
-```
+* `py2_runtime`: {type}`PyRuntimeInfo | None`, runtime information for a
+  Python 2 runtime.
+* `py3_runtime`: {type}`PyRuntimeInfo | None`. runtime information for a
+  Python 3 runtime.
 
 Example usage:
 
diff --git a/python/private/py_toolchain_suite.bzl b/python/private/py_toolchain_suite.bzl
new file mode 100644
index 0000000..3fead95
--- /dev/null
+++ b/python/private/py_toolchain_suite.bzl
@@ -0,0 +1,179 @@
+# Copyright 2022 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Create the toolchain defs in a BUILD.bazel file."""
+
+load("@bazel_skylib//lib:selects.bzl", "selects")
+load("//python/private:text_util.bzl", "render")
+load(
+    ":toolchain_types.bzl",
+    "EXEC_TOOLS_TOOLCHAIN_TYPE",
+    "PY_CC_TOOLCHAIN_TYPE",
+    "TARGET_TOOLCHAIN_TYPE",
+)
+
+_IS_EXEC_TOOLCHAIN_ENABLED = Label("//python/config_settings:is_exec_tools_toolchain_enabled")
+
+# buildifier: disable=unnamed-macro
+def py_toolchain_suite(
+        *,
+        prefix,
+        user_repository_name,
+        python_version,
+        set_python_version_constraint,
+        flag_values,
+        target_compatible_with = []):
+    """For internal use only.
+
+    Args:
+        prefix: Prefix for toolchain target names.
+        user_repository_name: The name of the user repository.
+        python_version: The full (X.Y.Z) version of the interpreter.
+        set_python_version_constraint: True or False as a string.
+        flag_values: Extra flag values to match for this toolchain.
+        target_compatible_with: list constraints the toolchains are compatible with.
+    """
+
+    # We have to use a String value here because bzlmod is passing in a
+    # string as we cannot have list of bools in build rule attributes.
+    # This if statement does not appear to work unless it is in the
+    # toolchain file.
+    if set_python_version_constraint in ["True", "False"]:
+        major_minor, _, _ = python_version.rpartition(".")
+        python_versions = [major_minor, python_version]
+        if set_python_version_constraint == "False":
+            python_versions.append("")
+
+        match_any = []
+        for i, v in enumerate(python_versions):
+            name = "{prefix}_{python_version}_{i}".format(
+                prefix = prefix,
+                python_version = python_version,
+                i = i,
+            )
+            match_any.append(name)
+            native.config_setting(
+                name = name,
+                flag_values = flag_values | {
+                    Label("@rules_python//python/config_settings:python_version"): v,
+                },
+                visibility = ["//visibility:private"],
+            )
+
+        name = "{prefix}_version_setting_{python_version}".format(
+            prefix = prefix,
+            python_version = python_version,
+            visibility = ["//visibility:private"],
+        )
+        selects.config_setting_group(
+            name = name,
+            match_any = match_any,
+            visibility = ["//visibility:private"],
+        )
+        target_settings = [name]
+    else:
+        fail(("Invalid set_python_version_constraint value: got {} {}, wanted " +
+              "either the string 'True' or the string 'False'; " +
+              "(did you convert bool to string?)").format(
+            type(set_python_version_constraint),
+            repr(set_python_version_constraint),
+        ))
+
+    _internal_toolchain_suite(
+        prefix = prefix,
+        runtime_repo_name = user_repository_name,
+        target_settings = target_settings,
+        target_compatible_with = target_compatible_with,
+    )
+
+def _internal_toolchain_suite(prefix, runtime_repo_name, target_compatible_with, target_settings):
+    native.toolchain(
+        name = "{prefix}_toolchain".format(prefix = prefix),
+        toolchain = "@{runtime_repo_name}//:python_runtimes".format(
+            runtime_repo_name = runtime_repo_name,
+        ),
+        toolchain_type = TARGET_TOOLCHAIN_TYPE,
+        target_settings = target_settings,
+        target_compatible_with = target_compatible_with,
+    )
+
+    native.toolchain(
+        name = "{prefix}_py_cc_toolchain".format(prefix = prefix),
+        toolchain = "@{runtime_repo_name}//:py_cc_toolchain".format(
+            runtime_repo_name = runtime_repo_name,
+        ),
+        toolchain_type = PY_CC_TOOLCHAIN_TYPE,
+        target_settings = target_settings,
+        target_compatible_with = target_compatible_with,
+    )
+
+    native.toolchain(
+        name = "{prefix}_py_exec_tools_toolchain".format(prefix = prefix),
+        toolchain = "@{runtime_repo_name}//:py_exec_tools_toolchain".format(
+            runtime_repo_name = runtime_repo_name,
+        ),
+        toolchain_type = EXEC_TOOLS_TOOLCHAIN_TYPE,
+        target_settings = select({
+            _IS_EXEC_TOOLCHAIN_ENABLED: target_settings,
+            # Whatever the default is, it has to map to a `config_setting`
+            # that will never match. Since the default branch is only taken if
+            # _IS_EXEC_TOOLCHAIN_ENABLED is false, then it will never match
+            # when later evaluated during toolchain resolution.
+            # Note that @platforms//:incompatible can't be used here because
+            # the RHS must be a `config_setting`.
+            "//conditions:default": [_IS_EXEC_TOOLCHAIN_ENABLED],
+        }),
+        exec_compatible_with = target_compatible_with,
+    )
+
+    # NOTE: When adding a new toolchain, for WORKSPACE builds to see the
+    # toolchain, the name must be added to the native.register_toolchains()
+    # call in python/repositories.bzl. Bzlmod doesn't need anything; it will
+    # register `:all`.
+
+def define_local_toolchain_suites(name, version_aware_repo_names, version_unaware_repo_names):
+    """Define toolchains for `local_runtime_repo` backed toolchains.
+
+    This generates `toolchain` targets that can be registered using `:all`. The
+    specific names of the toolchain targets are not defined. The priority order
+    of the toolchains is the order that is passed in, with version-aware having
+    higher priority than version-unaware.
+
+    Args:
+        name: `str` Unused; only present to satisfy tooling.
+        version_aware_repo_names: `list[str]` of the repo names that will have
+            version-aware toolchains defined.
+        version_unaware_repo_names: `list[str]` of the repo names that will have
+            version-unaware toolchains defined.
+    """
+    i = 0
+    for i, repo in enumerate(version_aware_repo_names, start = i):
+        prefix = render.left_pad_zero(i, 4)
+        _internal_toolchain_suite(
+            prefix = prefix,
+            runtime_repo_name = repo,
+            target_compatible_with = ["@{}//:os".format(repo)],
+            target_settings = ["@{}//:is_matching_python_version".format(repo)],
+        )
+
+    # The version unaware entries must go last because they will match any Python
+    # version.
+    for i, repo in enumerate(version_unaware_repo_names, start = i + 1):
+        prefix = render.left_pad_zero(i, 4)
+        _internal_toolchain_suite(
+            prefix = prefix,
+            runtime_repo_name = repo,
+            target_settings = [],
+            target_compatible_with = ["@{}//:os".format(repo)],
+        )
diff --git a/python/private/py_wheel.bzl b/python/private/py_wheel.bzl
index f451389..ef9e6f2 100644
--- a/python/private/py_wheel.bzl
+++ b/python/private/py_wheel.bzl
@@ -39,7 +39,7 @@
         doc = """\
 Name of the distribution.
 
-This should match the project name onm PyPI. It's also the name that is used to
+This should match the project name on PyPI. It's also the name that is used to
 refer to the package in other packages' dependencies.
 
 Workspace status keys are expanded using `{NAME}` format, for example:
@@ -118,37 +118,31 @@
     ),
 }
 
-_feature_flags = {
-    "incompatible_normalize_name": attr.bool(
-        default = True,
-        doc = """\
-Normalize the package distribution name according to latest
-Python packaging standards.
+_feature_flags = {}
 
-See https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode
-and https://packaging.python.org/en/latest/specifications/name-normalization/.
-
-Apart from the valid names according to the above, we also accept
-'{' and '}', which may be used as placeholders for stamping.
-""",
-    ),
-    "incompatible_normalize_version": attr.bool(
-        default = True,
-        doc = "Normalize the package version according to PEP440 standard. " +
-              "With this option set to True, if the user wants to pass any " +
-              "stamp variables, they have to be enclosed in '{}', e.g. " +
-              "'{BUILD_TIMESTAMP}'.",
-    ),
-}
-
+ALLOWED_DATA_FILE_PREFIX = ("purelib", "platlib", "headers", "scripts", "data")
 _requirement_attrs = {
     "extra_requires": attr.string_list_dict(
-        doc = "List of optional requirements for this package",
+        doc = ("A mapping of [extras](https://peps.python.org/pep-0508/#extras) options to lists of requirements (similar to `requires`). This attribute " +
+               "is mutually exclusive with `extra_requires_file`."),
+    ),
+    "extra_requires_files": attr.label_keyed_string_dict(
+        doc = ("A mapping of requirements files (similar to `requires_file`) to the name of an [extras](https://peps.python.org/pep-0508/#extras) option " +
+               "This attribute is mutually exclusive with `extra_requires`."),
+        allow_files = True,
     ),
     "requires": attr.string_list(
         doc = ("List of requirements for this package. See the section on " +
                "[Declaring required dependency](https://setuptools.readthedocs.io/en/latest/userguide/dependency_management.html#declaring-dependencies) " +
-               "for details and examples of the format of this argument."),
+               "for details and examples of the format of this argument. This " +
+               "attribute is mutually exclusive with `requires_file`."),
+    ),
+    "requires_file": attr.label(
+        doc = ("A file containing a list of requirements for this package. See the section on " +
+               "[Declaring required dependency](https://setuptools.readthedocs.io/en/latest/userguide/dependency_management.html#declaring-dependencies) " +
+               "for details and examples of the format of this argument. This " +
+               "attribute is mutually exclusive with `requires`."),
+        allow_single_file = True,
     ),
 }
 
@@ -179,6 +173,11 @@
     "classifiers": attr.string_list(
         doc = "A list of strings describing the categories for the package. For valid classifiers see https://pypi.org/classifiers",
     ),
+    "data_files": attr.label_keyed_string_dict(
+        doc = ("Any file that is not normally installed inside site-packages goes into the .data directory, named " +
+               "as the .dist-info directory but with the .data/ extension.  Allowed paths: {prefixes}".format(prefixes = ALLOWED_DATA_FILE_PREFIX)),
+        allow_files = True,
+    ),
     "description_content_type": attr.string(
         doc = ("The type of contents in description_file. " +
                "If not provided, the type will be inferred from the extension of description_file. " +
@@ -237,9 +236,16 @@
     '{' and '}', which may be used as placeholders for stamping.
     """
     escaped = ""
+    _inside_stamp_var = False
     for character in name.elems():
-        if character.isalnum() or character in ["{", "}"]:
-            escaped += character.lower()
+        if character == "{":
+            _inside_stamp_var = True
+            escaped += character
+        elif character == "}":
+            _inside_stamp_var = False
+            escaped += character
+        elif character.isalnum():
+            escaped += character if _inside_stamp_var else character.lower()
         elif character in ["-", "_", "."]:
             if escaped == "":
                 fail(
@@ -297,23 +303,13 @@
     python_tag = _replace_make_variables(ctx.attr.python_tag, ctx)
     version = _replace_make_variables(ctx.attr.version, ctx)
 
-    filename_segments = []
-
-    if ctx.attr.incompatible_normalize_name:
-        filename_segments.append(_escape_filename_distribution_name(ctx.attr.distribution))
-    else:
-        filename_segments.append(_escape_filename_segment(ctx.attr.distribution))
-
-    if ctx.attr.incompatible_normalize_version:
-        filename_segments.append(normalize_pep440(version))
-    else:
-        filename_segments.append(_escape_filename_segment(version))
-
-    filename_segments.extend([
+    filename_segments = [
+        _escape_filename_distribution_name(ctx.attr.distribution),
+        normalize_pep440(version),
         _escape_filename_segment(python_tag),
         _escape_filename_segment(abi),
         _escape_filename_segment(ctx.attr.platform),
-    ])
+    ]
 
     outfile = ctx.actions.declare_file("-".join(filename_segments) + ".whl")
 
@@ -344,10 +340,6 @@
     args.add("--out", outfile)
     args.add("--name_file", name_file)
     args.add_all(ctx.attr.strip_path_prefixes, format_each = "--strip_path_prefix=%s")
-    if not ctx.attr.incompatible_normalize_name:
-        args.add("--noincompatible_normalize_name")
-    if not ctx.attr.incompatible_normalize_version:
-        args.add("--noincompatible_normalize_version")
 
     # Pass workspace status files if stamping is enabled
     if is_stamping_enabled(ctx.attr):
@@ -393,15 +385,50 @@
 
     if ctx.attr.python_requires:
         metadata_contents.append("Requires-Python: %s" % ctx.attr.python_requires)
-    for requirement in ctx.attr.requires:
-        metadata_contents.append("Requires-Dist: %s" % requirement)
 
+    if ctx.attr.requires and ctx.attr.requires_file:
+        fail("`requires` and `requires_file` are mutually exclusive. Please update {}".format(ctx.label))
+
+    for requires in ctx.attr.requires:
+        metadata_contents.append("Requires-Dist: %s" % requires)
+    if ctx.attr.requires_file:
+        # The @ prefixed paths will be resolved by the PyWheel action.
+        # Expanding each line containing a constraint in place of this
+        # directive.
+        metadata_contents.append("Requires-Dist: @%s" % ctx.file.requires_file.path)
+        other_inputs.append(ctx.file.requires_file)
+
+    if ctx.attr.extra_requires and ctx.attr.extra_requires_files:
+        fail("`extra_requires` and `extra_requires_files` are mutually exclusive. Please update {}".format(ctx.label))
     for option, option_requirements in sorted(ctx.attr.extra_requires.items()):
         metadata_contents.append("Provides-Extra: %s" % option)
         for requirement in option_requirements:
             metadata_contents.append(
                 "Requires-Dist: %s; extra == '%s'" % (requirement, option),
             )
+    extra_requires_files = {}
+    for option_requires_target, option in ctx.attr.extra_requires_files.items():
+        if option in extra_requires_files:
+            fail("Duplicate `extra_requires_files` option '{}' found on target {}".format(option, ctx.label))
+        option_requires_files = option_requires_target[DefaultInfo].files.to_list()
+        if len(option_requires_files) != 1:
+            fail("Labels in `extra_requires_files` must result in a single file, but {label} provides {files} from {owner}".format(
+                label = ctx.label,
+                files = option_requires_files,
+                owner = option_requires_target.label,
+            ))
+        extra_requires_files.update({option: option_requires_files[0]})
+
+    for option, option_requires_file in sorted(extra_requires_files.items()):
+        metadata_contents.append("Provides-Extra: %s" % option)
+        metadata_contents.append(
+            # The @ prefixed paths will be resolved by the PyWheel action.
+            # Expanding each line containing a constraint in place of this
+            # directive and appending the extra option.
+            "Requires-Dist: @%s; extra == '%s'" % (option_requires_file.path, option),
+        )
+        other_inputs.append(option_requires_file)
+
     ctx.actions.write(
         output = metadata_file,
         content = "\n".join(metadata_contents) + "\n",
@@ -452,7 +479,30 @@
             filename + ";" + target_files[0].path,
         )
 
+    for target, filename in ctx.attr.data_files.items():
+        target_files = target.files.to_list()
+        if len(target_files) != 1:
+            fail(
+                "Multi-file target listed in data_files %s",
+                filename,
+            )
+
+        if filename.partition("/")[0] not in ALLOWED_DATA_FILE_PREFIX:
+            fail(
+                "The target data file must start with one of these prefixes: '%s'.  Target filepath: '%s'" %
+                (
+                    ",".join(ALLOWED_DATA_FILE_PREFIX),
+                    filename,
+                ),
+            )
+        other_inputs.extend(target_files)
+        args.add(
+            "--data_files",
+            filename + ";" + target_files[0].path,
+        )
+
     ctx.actions.run(
+        mnemonic = "PyWheel",
         inputs = depset(direct = other_inputs, transitive = [inputs_to_package]),
         outputs = [outfile, name_file],
         arguments = [args],
diff --git a/python/private/pypi/BUILD.bazel b/python/private/pypi/BUILD.bazel
new file mode 100644
index 0000000..3b11dbe
--- /dev/null
+++ b/python/private/pypi/BUILD.bazel
@@ -0,0 +1,325 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
+
+package(default_visibility = ["//:__subpackages__"])
+
+licenses(["notice"])
+
+filegroup(
+    name = "distribution",
+    srcs = glob(
+        ["**"],
+        exclude = ["requirements.txt"],
+    ) + [
+        "//python/private/pypi/dependency_resolver:distribution",
+        "//python/private/pypi/whl_installer:distribution",
+    ],
+    visibility = ["//python/private:__pkg__"],
+)
+
+# Filegroup of bzl files that can be used by downstream rules for documentation generation
+filegroup(
+    name = "bzl",
+    srcs = glob(["**/*.bzl"]),
+    visibility = [
+        "//python/private:__pkg__",
+        "//tools/private:__pkg__",
+    ],
+)
+
+filegroup(
+    name = "requirements_txt",
+    srcs = ["requirements.txt"],
+    visibility = ["//tools/private/update_deps:__pkg__"],
+)
+
+# Keep sorted by library name and keep the files named by the main symbol they export
+
+bzl_library(
+    name = "attrs_bzl",
+    srcs = ["attrs.bzl"],
+)
+
+bzl_library(
+    name = "extension_bzl",
+    srcs = ["extension.bzl"],
+    deps = [
+        ":attrs_bzl",
+        ":hub_repository_bzl",
+        ":parse_requirements_bzl",
+        ":evaluate_markers_bzl",
+        ":parse_whl_name_bzl",
+        ":pip_repository_attrs_bzl",
+        ":simpleapi_download_bzl",
+        ":whl_library_bzl",
+        ":whl_repo_name_bzl",
+        "//python/private:full_version_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private:version_label_bzl",
+        "@bazel_features//:features",
+    ] + [
+        "@pythons_hub//:interpreters_bzl",
+    ] if BZLMOD_ENABLED else [],
+)
+
+bzl_library(
+    name = "config_settings_bzl",
+    srcs = ["config_settings.bzl"],
+    deps = ["flags_bzl"],
+)
+
+bzl_library(
+    name = "deps_bzl",
+    srcs = ["deps.bzl"],
+    deps = [
+        "//python/private:bazel_tools_bzl",
+    ],
+)
+
+bzl_library(
+    name = "evaluate_markers_bzl",
+    srcs = ["evaluate_markers.bzl"],
+    deps = [
+        ":pypi_repo_utils_bzl",
+    ],
+)
+
+bzl_library(
+    name = "flags_bzl",
+    srcs = ["flags.bzl"],
+    deps = [
+        "//python/private:enum_bzl",
+        "@bazel_skylib//rules:common_settings",
+    ],
+)
+
+bzl_library(
+    name = "generate_whl_library_build_bazel_bzl",
+    srcs = ["generate_whl_library_build_bazel.bzl"],
+    deps = [
+        ":labels_bzl",
+        "//python/private:normalize_name_bzl",
+    ],
+)
+
+bzl_library(
+    name = "generate_group_library_build_bazel_bzl",
+    srcs = ["generate_group_library_build_bazel.bzl"],
+    deps = [
+        ":labels_bzl",
+        "//python/private:normalize_name_bzl",
+    ],
+)
+
+bzl_library(
+    name = "group_library_bzl",
+    srcs = ["group_library.bzl"],
+    deps = [
+        ":generate_group_library_build_bazel_bzl",
+    ],
+)
+
+bzl_library(
+    name = "hub_repository_bzl",
+    srcs = ["hub_repository.bzl"],
+    visibility = ["//:__subpackages__"],
+    deps = [
+        ":render_pkg_aliases_bzl",
+        "//python/private:text_util_bzl",
+    ],
+)
+
+bzl_library(
+    name = "index_sources_bzl",
+    srcs = ["index_sources.bzl"],
+)
+
+bzl_library(
+    name = "labels_bzl",
+    srcs = ["labels.bzl"],
+)
+
+bzl_library(
+    name = "multi_pip_parse_bzl",
+    srcs = ["multi_pip_parse.bzl"],
+    deps = ["pip_repository_bzl"],
+)
+
+bzl_library(
+    name = "package_annotation_bzl",
+    srcs = ["package_annotation.bzl"],
+)
+
+bzl_library(
+    name = "parse_requirements_bzl",
+    srcs = ["parse_requirements.bzl"],
+    deps = [
+        ":index_sources_bzl",
+        ":parse_requirements_txt_bzl",
+        ":pypi_repo_utils_bzl",
+        ":requirements_files_by_platform_bzl",
+        ":whl_target_platforms_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private:repo_utils_bzl",
+    ],
+)
+
+bzl_library(
+    name = "parse_requirements_txt_bzl",
+    srcs = ["parse_requirements_txt.bzl"],
+)
+
+bzl_library(
+    name = "parse_simpleapi_html_bzl",
+    srcs = ["parse_simpleapi_html.bzl"],
+)
+
+bzl_library(
+    name = "parse_whl_name_bzl",
+    srcs = ["parse_whl_name.bzl"],
+)
+
+bzl_library(
+    name = "patch_whl_bzl",
+    srcs = ["patch_whl.bzl"],
+    deps = [
+        ":parse_whl_name_bzl",
+        "//python/private:repo_utils_bzl",
+    ],
+)
+
+bzl_library(
+    name = "pip_bzl",
+    srcs = ["pip.bzl"],
+    deps = [
+        ":extension_bzl",
+    ],
+)
+
+bzl_library(
+    name = "pip_compile_bzl",
+    srcs = ["pip_compile.bzl"],
+    deps = [
+        ":deps_bzl",
+        "//python:defs_bzl",
+    ],
+)
+
+bzl_library(
+    name = "pip_repository_bzl",
+    srcs = ["pip_repository.bzl"],
+    deps = [
+        ":attrs_bzl",
+        ":evaluate_markers_bzl",
+        ":parse_requirements_bzl",
+        ":pip_repository_attrs_bzl",
+        ":render_pkg_aliases_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private:repo_utils_bzl",
+        "//python/private:text_util_bzl",
+        "@bazel_skylib//lib:sets",
+    ],
+)
+
+bzl_library(
+    name = "pip_repository_attrs_bzl",
+    srcs = ["pip_repository_attrs.bzl"],
+)
+
+bzl_library(
+    name = "pypi_repo_utils_bzl",
+    srcs = ["pypi_repo_utils.bzl"],
+    deps = [
+        "//python/private:repo_utils_bzl",
+        "@bazel_skylib//lib:types",
+    ],
+)
+
+bzl_library(
+    name = "render_pkg_aliases_bzl",
+    srcs = ["render_pkg_aliases.bzl"],
+    deps = [
+        ":generate_group_library_build_bazel_bzl",
+        ":labels_bzl",
+        ":parse_whl_name_bzl",
+        ":whl_target_platforms_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private:text_util_bzl",
+    ],
+)
+
+bzl_library(
+    name = "requirements_files_by_platform_bzl",
+    srcs = ["requirements_files_by_platform.bzl"],
+    deps = [
+        ":whl_target_platforms_bzl",
+    ],
+)
+
+bzl_library(
+    name = "simpleapi_download_bzl",
+    srcs = ["simpleapi_download.bzl"],
+    deps = [
+        ":parse_simpleapi_html_bzl",
+        "//python/private:auth_bzl",
+        "//python/private:normalize_name_bzl",
+        "//python/private:text_util_bzl",
+        "@bazel_features//:features",
+    ],
+)
+
+bzl_library(
+    name = "whl_library_alias_bzl",
+    srcs = ["whl_library_alias.bzl"],
+    deps = [
+        ":render_pkg_aliases_bzl",
+        "//python/private:full_version_bzl",
+    ],
+)
+
+bzl_library(
+    name = "whl_library_bzl",
+    srcs = ["whl_library.bzl"],
+    deps = [
+        ":attrs_bzl",
+        ":deps_bzl",
+        ":generate_whl_library_build_bazel_bzl",
+        ":parse_whl_name_bzl",
+        ":patch_whl_bzl",
+        ":pypi_repo_utils_bzl",
+        ":whl_target_platforms_bzl",
+        "//python:repositories_bzl",
+        "//python/private:auth_bzl",
+        "//python/private:envsubst_bzl",
+        "//python/private:repo_utils_bzl",
+    ],
+)
+
+bzl_library(
+    name = "whl_repo_name_bzl",
+    srcs = ["whl_repo_name.bzl"],
+    deps = [
+        ":parse_whl_name_bzl",
+        "//python/private:normalize_name_bzl",
+    ],
+)
+
+bzl_library(
+    name = "whl_target_platforms_bzl",
+    srcs = ["whl_target_platforms.bzl"],
+    deps = [":parse_whl_name_bzl"],
+)
diff --git a/python/private/pypi/README.md b/python/private/pypi/README.md
new file mode 100644
index 0000000..6be5703
--- /dev/null
+++ b/python/private/pypi/README.md
@@ -0,0 +1,9 @@
+# PyPI integration code
+
+This code is for integrating with PyPI and other compatible indexes. At the
+moment we have code for:
+* Downloading packages using `pip` or `repository_ctx.download`.
+* Interacting with PyPI compatible indexes via [SimpleAPI] spec.
+* Locking a `requirements.in` or [PEP621] compliant `pyproject.toml`.
+
+[PEP621]: https://peps.python.org/pep-0621/
diff --git a/python/private/pypi/attrs.bzl b/python/private/pypi/attrs.bzl
new file mode 100644
index 0000000..c6132cb
--- /dev/null
+++ b/python/private/pypi/attrs.bzl
@@ -0,0 +1,221 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"common attributes for whl_library and pip_repository"
+
+ATTRS = {
+    "download_only": attr.bool(
+        doc = """
+Whether to use "pip download" instead of "pip wheel". Disables building wheels from source, but allows use of
+--platform, --python-version, --implementation, and --abi in --extra_pip_args to download wheels for a different
+platform from the host platform.
+        """,
+    ),
+    "enable_implicit_namespace_pkgs": attr.bool(
+        default = False,
+        doc = """
+If true, disables conversion of native namespace packages into pkg-util style namespace packages. When set all py_binary
+and py_test targets must specify either `legacy_create_init=False` or the global Bazel option
+`--incompatible_default_to_explicit_init_py` to prevent `__init__.py` being automatically generated in every directory.
+
+This option is required to support some packages which cannot handle the conversion to pkg-util style.
+            """,
+    ),
+    "environment": attr.string_dict(
+        doc = """
+Environment variables to set in the pip subprocess.
+Can be used to set common variables such as `http_proxy`, `https_proxy` and `no_proxy`
+Note that pip is run with "--isolated" on the CLI so `PIP_<VAR>_<NAME>`
+style env vars are ignored, but env vars that control requests and urllib3
+can be passed. If you need `PIP_<VAR>_<NAME>`, take a look at `extra_pip_args`
+and `envsubst`.
+        """,
+        default = {},
+    ),
+    "envsubst": attr.string_list(
+        mandatory = False,
+        doc = """\
+A list of environment variables to substitute (e.g. `["PIP_INDEX_URL",
+"PIP_RETRIES"]`). The corresponding variables are expanded in `extra_pip_args`
+using the syntax `$VARNAME` or `${VARNAME}` (expanding to empty string if unset)
+or `${VARNAME:-default}` (expanding to default if the variable is unset or empty
+in the environment). Note: On Bazel 6 and Bazel 7.0 changes to the variables named
+here do not cause packages to be re-fetched. Don't fetch different things based
+on the value of these variables.
+""",
+    ),
+    "experimental_requirement_cycles": attr.string_list_dict(
+        default = {},
+        doc = """\
+A mapping of dependency cycle names to a list of requirements which form that cycle.
+
+Requirements which form cycles will be installed together and taken as
+dependencies together in order to ensure that the cycle is always satisified.
+
+Example:
+  `sphinx` depends on `sphinxcontrib-serializinghtml`
+  When listing both as requirements, ala
+
+  ```
+  py_binary(
+    name = "doctool",
+    ...
+    deps = [
+      "@pypi//sphinx:pkg",
+      "@pypi//sphinxcontrib_serializinghtml",
+     ]
+  )
+  ```
+
+  Will produce a Bazel error such as
+
+  ```
+  ERROR: .../external/pypi_sphinxcontrib_serializinghtml/BUILD.bazel:44:6: in alias rule @pypi_sphinxcontrib_serializinghtml//:pkg: cycle in dependency graph:
+      //:doctool (...)
+      @pypi//sphinxcontrib_serializinghtml:pkg (...)
+  .-> @pypi_sphinxcontrib_serializinghtml//:pkg (...)
+  |   @pypi_sphinxcontrib_serializinghtml//:_pkg (...)
+  |   @pypi_sphinx//:pkg (...)
+  |   @pypi_sphinx//:_pkg (...)
+  `-- @pypi_sphinxcontrib_serializinghtml//:pkg (...)
+  ```
+
+  Which we can resolve by configuring these two requirements to be installed together as a cycle
+
+  ```
+  pip_parse(
+    ...
+    experimental_requirement_cycles = {
+      "sphinx": [
+        "sphinx",
+        "sphinxcontrib-serializinghtml",
+      ]
+    },
+  )
+  ```
+
+Warning:
+  If a dependency participates in multiple cycles, all of those cycles must be
+  collapsed down to one. For instance `a <-> b` and `a <-> c` cannot be listed
+  as two separate cycles.
+""",
+    ),
+    "experimental_target_platforms": attr.string_list(
+        default = [],
+        doc = """\
+A list of platforms that we will generate the conditional dependency graph for
+cross platform wheels by parsing the wheel metadata. This will generate the
+correct dependencies for packages like `sphinx` or `pylint`, which include
+`colorama` when installed and used on Windows platforms.
+
+An empty list means falling back to the legacy behaviour where the host
+platform is the target platform.
+
+WARNING: It may not work as expected in cases where the python interpreter
+implementation that is being used at runtime is different between different platforms.
+This has been tested for CPython only.
+
+For specific target platforms use values of the form `<os>_<arch>` where `<os>`
+is one of `linux`, `osx`, `windows` and arch is one of `x86_64`, `x86_32`,
+`aarch64`, `s390x` and `ppc64le`.
+
+You can also target a specific Python version by using `cp3<minor_version>_<os>_<arch>`.
+If multiple python versions are specified as target platforms, then select statements
+of the `lib` and `whl` targets will include usage of version aware toolchain config
+settings like `@rules_python//python/config_settings:is_python_3.y`.
+
+Special values: `host` (for generating deps for the host platform only) and
+`<prefix>_*` values. For example, `cp39_*`, `linux_*`, `cp39_linux_*`.
+
+NOTE: this is not for cross-compiling Python wheels but rather for parsing the `whl` METADATA correctly.
+""",
+    ),
+    "extra_pip_args": attr.string_list(
+        doc = """Extra arguments to pass on to pip. Must not contain spaces.
+
+Supports environment variables using the syntax `$VARNAME` or
+`${VARNAME}` (expanding to empty string if unset) or
+`${VARNAME:-default}` (expanding to default if the variable is unset
+or empty in the environment), if `"VARNAME"` is listed in the
+`envsubst` attribute. See also `envsubst`.
+""",
+    ),
+    "isolated": attr.bool(
+        doc = """\
+Whether or not to pass the [--isolated](https://pip.pypa.io/en/stable/cli/pip/#cmdoption-isolated) flag to
+the underlying pip command. Alternatively, the {envvar}`RULES_PYTHON_PIP_ISOLATED` environment variable can be used
+to control this flag.
+""",
+        default = True,
+    ),
+    "pip_data_exclude": attr.string_list(
+        doc = "Additional data exclusion parameters to add to the pip packages BUILD file.",
+    ),
+    "python_interpreter": attr.string(
+        doc = """\
+The python interpreter to use. This can either be an absolute path or the name
+of a binary found on the host's `PATH` environment variable. If no value is set
+`python3` is defaulted for Unix systems and `python.exe` for Windows.
+""",
+        # NOTE: This attribute should not have a default. See `_get_python_interpreter_attr`
+        # default = "python3"
+    ),
+    "python_interpreter_target": attr.label(
+        allow_single_file = True,
+        doc = """
+If you are using a custom python interpreter built by another repository rule,
+use this attribute to specify its BUILD target. This allows pip_repository to invoke
+pip using the same interpreter as your toolchain. If set, takes precedence over
+python_interpreter. An example value: "@python3_x86_64-unknown-linux-gnu//:python".
+""",
+    ),
+    "quiet": attr.bool(
+        default = True,
+        doc = """\
+If True, suppress printing stdout and stderr output to the terminal.
+
+If you would like to get more diagnostic output, set
+{envvar}`RULES_PYTHON_REPO_DEBUG=1 <RULES_PYTHON_REPO_DEBUG>`
+or
+{envvar}`RULES_PYTHON_REPO_DEBUG_VERBOSITY=<INFO|DEBUG|TRACE> <RULES_PYTHON_REPO_DEBUG_VERBOSITY>`
+""",
+    ),
+    # 600 is documented as default here: https://docs.bazel.build/versions/master/skylark/lib/repository_ctx.html#execute
+    "timeout": attr.int(
+        default = 600,
+        doc = "Timeout (in seconds) on the rule's execution duration.",
+    ),
+}
+
+def use_isolated(ctx, attr):
+    """Determine whether or not to pass the pip `--isolated` flag to the pip invocation.
+
+    Args:
+        ctx: repository or module context
+        attr: attributes for the repo rule or tag extension
+
+    Returns:
+        True if --isolated should be passed
+    """
+    use_isolated = attr.isolated
+
+    # The environment variable will take precedence over the attribute
+    isolated_env = ctx.os.environ.get("RULES_PYTHON_PIP_ISOLATED", None)
+    if isolated_env != None:
+        if isolated_env.lower() in ("0", "false"):
+            use_isolated = False
+        else:
+            use_isolated = True
+
+    return use_isolated
diff --git a/python/private/pypi/config_settings.bzl b/python/private/pypi/config_settings.bzl
new file mode 100644
index 0000000..9741217
--- /dev/null
+++ b/python/private/pypi/config_settings.bzl
@@ -0,0 +1,320 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module is used to construct the config settings for selecting which distribution is used in the pip hub repository.
+
+Bazel's selects work by selecting the most-specialized configuration setting
+that matches the target platform. We can leverage this fact to ensure that the
+most specialized wheels are used by default with the users being able to
+configure string_flag values to select the less specialized ones.
+
+The list of specialization of the dists goes like follows:
+* sdist
+* py*-none-any.whl
+* py*-abi3-any.whl
+* py*-cpxy-any.whl
+* cp*-none-any.whl
+* cp*-abi3-any.whl
+* cp*-cpxy-plat.whl
+* py*-none-plat.whl
+* py*-abi3-plat.whl
+* py*-cpxy-plat.whl
+* cp*-none-plat.whl
+* cp*-abi3-plat.whl
+* cp*-cpxy-plat.whl
+
+Note, that here the specialization of musl vs manylinux wheels is the same in
+order to ensure that the matching fails if the user requests for `musl` and we don't have it or vice versa.
+"""
+
+load(":flags.bzl", "INTERNAL_FLAGS", "UniversalWhlFlag", "WhlLibcFlag")
+
+FLAGS = struct(
+    **{
+        f: str(Label("//python/config_settings:" + f))
+        for f in [
+            "python_version",
+            "pip_whl_glibc_version",
+            "pip_whl_muslc_version",
+            "pip_whl_osx_arch",
+            "pip_whl_osx_version",
+            "py_linux_libc",
+            "is_pip_whl_no",
+            "is_pip_whl_only",
+            "is_pip_whl_auto",
+        ]
+    }
+)
+
+# Here we create extra string flags that are just to work with the select
+# selecting the most specialized match. We don't allow the user to change
+# them.
+_flags = struct(
+    **{
+        f: str(Label("//python/config_settings:_internal_pip_" + f))
+        for f in INTERNAL_FLAGS
+    }
+)
+
+def config_settings(
+        *,
+        python_versions = [],
+        glibc_versions = [],
+        muslc_versions = [],
+        osx_versions = [],
+        target_platforms = [],
+        name = None,
+        visibility = None,
+        native = native):
+    """Generate all of the pip config settings.
+
+    Args:
+        name (str): Currently unused.
+        python_versions (list[str]): The list of python versions to configure
+            config settings for.
+        glibc_versions (list[str]): The list of glibc version of the wheels to
+            configure config settings for.
+        muslc_versions (list[str]): The list of musl version of the wheels to
+            configure config settings for.
+        osx_versions (list[str]): The list of OSX OS versions to configure
+            config settings for.
+        target_platforms (list[str]): The list of "{os}_{cpu}" for deriving
+            constraint values for each condition.
+        visibility (list[str], optional): The visibility to be passed to the
+            exposed labels. All other labels will be private.
+        native (struct): The struct containing alias and config_setting rules
+            to use for creating the objects. Can be overridden for unit tests
+            reasons.
+    """
+
+    glibc_versions = [""] + glibc_versions
+    muslc_versions = [""] + muslc_versions
+    osx_versions = [""] + osx_versions
+    target_platforms = [("", "")] + [
+        t.split("_", 1)
+        for t in target_platforms
+    ]
+
+    for python_version in [""] + python_versions:
+        is_python = "is_python_{}".format(python_version or "version_unset")
+        native.alias(
+            name = is_python,
+            actual = Label("//python/config_settings:" + is_python),
+            visibility = visibility,
+        )
+
+        for os, cpu in target_platforms:
+            constraint_values = []
+            suffix = ""
+            if os:
+                constraint_values.append("@platforms//os:" + os)
+                suffix += "_" + os
+            if cpu:
+                constraint_values.append("@platforms//cpu:" + cpu)
+                suffix += "_" + cpu
+
+            _dist_config_settings(
+                suffix = suffix,
+                plat_flag_values = _plat_flag_values(
+                    os = os,
+                    cpu = cpu,
+                    osx_versions = osx_versions,
+                    glibc_versions = glibc_versions,
+                    muslc_versions = muslc_versions,
+                ),
+                constraint_values = constraint_values,
+                python_version = python_version,
+                is_python = is_python,
+                visibility = visibility,
+                native = native,
+            )
+
+def _dist_config_settings(*, suffix, plat_flag_values, **kwargs):
+    flag_values = {_flags.dist: ""}
+
+    # First create an sdist, we will be building upon the flag values, which
+    # will ensure that each sdist config setting is the least specialized of
+    # all. However, we need at least one flag value to cover the case where we
+    # have `sdist` for any platform, hence we have a non-empty `flag_values`
+    # here.
+    _dist_config_setting(
+        name = "sdist{}".format(suffix),
+        flag_values = flag_values,
+        is_pip_whl = FLAGS.is_pip_whl_no,
+        **kwargs
+    )
+
+    for name, f in [
+        ("py_none", _flags.whl_py2_py3),
+        ("py3_none", _flags.whl_py3),
+        ("py3_abi3", _flags.whl_py3_abi3),
+        ("cp3x_none", _flags.whl_pycp3x),
+        ("cp3x_abi3", _flags.whl_pycp3x_abi3),
+        ("cp3x_cp", _flags.whl_pycp3x_abicp),
+    ]:
+        if f in flag_values:
+            # This should never happen as all of the different whls should have
+            # unique flag values.
+            fail("BUG: the flag {} is attempted to be added twice to the list".format(f))
+        else:
+            flag_values[f] = ""
+
+        _dist_config_setting(
+            name = "{}_any{}".format(name, suffix),
+            flag_values = flag_values,
+            is_pip_whl = FLAGS.is_pip_whl_only,
+            **kwargs
+        )
+
+    generic_flag_values = flag_values
+
+    for (suffix, flag_values) in plat_flag_values:
+        flag_values = flag_values | generic_flag_values
+
+        for name, f in [
+            ("py_none", _flags.whl_plat),
+            ("py3_none", _flags.whl_plat_py3),
+            ("py3_abi3", _flags.whl_plat_py3_abi3),
+            ("cp3x_none", _flags.whl_plat_pycp3x),
+            ("cp3x_abi3", _flags.whl_plat_pycp3x_abi3),
+            ("cp3x_cp", _flags.whl_plat_pycp3x_abicp),
+        ]:
+            if f in flag_values:
+                # This should never happen as all of the different whls should have
+                # unique flag values.
+                fail("BUG: the flag {} is attempted to be added twice to the list".format(f))
+            else:
+                flag_values[f] = ""
+
+            _dist_config_setting(
+                name = "{}_{}".format(name, suffix),
+                flag_values = flag_values,
+                is_pip_whl = FLAGS.is_pip_whl_only,
+                **kwargs
+            )
+
+def _to_version_string(version, sep = "."):
+    if not version:
+        return ""
+
+    return "{}{}{}".format(version[0], sep, version[1])
+
+def _plat_flag_values(os, cpu, osx_versions, glibc_versions, muslc_versions):
+    ret = []
+    if os == "":
+        return []
+    elif os == "windows":
+        ret.append(("{}_{}".format(os, cpu), {}))
+    elif os == "osx":
+        for cpu_, arch in {
+            cpu: UniversalWhlFlag.ARCH,
+            cpu + "_universal2": UniversalWhlFlag.UNIVERSAL,
+        }.items():
+            for osx_version in osx_versions:
+                flags = {
+                    FLAGS.pip_whl_osx_version: _to_version_string(osx_version),
+                }
+                if arch == UniversalWhlFlag.ARCH:
+                    flags[FLAGS.pip_whl_osx_arch] = arch
+
+                if not osx_version:
+                    suffix = "{}_{}".format(os, cpu_)
+                else:
+                    suffix = "{}_{}_{}".format(os, _to_version_string(osx_version, "_"), cpu_)
+
+                ret.append((suffix, flags))
+
+    elif os == "linux":
+        for os_prefix, linux_libc in {
+            os: WhlLibcFlag.GLIBC,
+            "many" + os: WhlLibcFlag.GLIBC,
+            "musl" + os: WhlLibcFlag.MUSL,
+        }.items():
+            if linux_libc == WhlLibcFlag.GLIBC:
+                libc_versions = glibc_versions
+                libc_flag = FLAGS.pip_whl_glibc_version
+            elif linux_libc == WhlLibcFlag.MUSL:
+                libc_versions = muslc_versions
+                libc_flag = FLAGS.pip_whl_muslc_version
+            else:
+                fail("Unsupported libc type: {}".format(linux_libc))
+
+            for libc_version in libc_versions:
+                if libc_version and os_prefix == os:
+                    continue
+                elif libc_version:
+                    suffix = "{}_{}_{}".format(os_prefix, _to_version_string(libc_version, "_"), cpu)
+                else:
+                    suffix = "{}_{}".format(os_prefix, cpu)
+
+                ret.append((
+                    suffix,
+                    {
+                        FLAGS.py_linux_libc: linux_libc,
+                        libc_flag: _to_version_string(libc_version),
+                    },
+                ))
+    else:
+        fail("Unsupported os: {}".format(os))
+
+    return ret
+
+def _dist_config_setting(*, name, is_pip_whl, is_python, python_version, native = native, **kwargs):
+    """A macro to create a target that matches is_pip_whl_auto and one more value.
+
+    Args:
+        name: The name of the public target.
+        is_pip_whl: The config setting to match in addition to
+            `is_pip_whl_auto` when evaluating the config setting.
+        is_python: The python version config_setting to match.
+        python_version: The python version name.
+        native (struct): The struct containing alias and config_setting rules
+            to use for creating the objects. Can be overridden for unit tests
+            reasons.
+        **kwargs: The kwargs passed to the config_setting rule. Visibility of
+            the main alias target is also taken from the kwargs.
+    """
+    _name = "_is_" + name
+
+    visibility = kwargs.get("visibility")
+    native.alias(
+        name = "is_cp{}_{}".format(python_version, name) if python_version else "is_{}".format(name),
+        actual = select({
+            # First match by the python version
+            is_python: _name,
+            "//conditions:default": is_python,
+        }),
+        visibility = visibility,
+    )
+
+    if python_version:
+        # Reuse the config_setting targets that we use with the default
+        # `python_version` setting.
+        return
+
+    config_setting_name = _name + "_setting"
+    native.config_setting(name = config_setting_name, **kwargs)
+
+    # Next match by the `pip_whl` flag value and then match by the flags that
+    # are intrinsic to the distribution.
+    native.alias(
+        name = _name,
+        actual = select({
+            "//conditions:default": FLAGS.is_pip_whl_auto,
+            FLAGS.is_pip_whl_auto: config_setting_name,
+            is_pip_whl: config_setting_name,
+        }),
+        visibility = visibility,
+    )
diff --git a/python/private/pypi/dependency_resolver/BUILD.bazel b/python/private/pypi/dependency_resolver/BUILD.bazel
new file mode 100644
index 0000000..9531b55
--- /dev/null
+++ b/python/private/pypi/dependency_resolver/BUILD.bazel
@@ -0,0 +1,7 @@
+exports_files(["dependency_resolver.py"])
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["**"]),
+    visibility = ["//python/private/pypi:__subpackages__"],
+)
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/python/private/pypi/dependency_resolver/__init__.py
similarity index 99%
rename from python/pip_install/tools/dependency_resolver/__init__.py
rename to python/private/pypi/dependency_resolver/__init__.py
index bbdfb4c..4101095 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/python/private/pypi/dependency_resolver/__init__.py
@@ -11,4 +11,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
diff --git a/python/pip_install/tools/dependency_resolver/dependency_resolver.py b/python/private/pypi/dependency_resolver/dependency_resolver.py
similarity index 90%
rename from python/pip_install/tools/dependency_resolver/dependency_resolver.py
rename to python/private/pypi/dependency_resolver/dependency_resolver.py
index 5e914bc..0ff9b2f 100644
--- a/python/pip_install/tools/dependency_resolver/dependency_resolver.py
+++ b/python/private/pypi/dependency_resolver/dependency_resolver.py
@@ -80,7 +80,7 @@
 
 
 @click.command(context_settings={"ignore_unknown_options": True})
[email protected]("requirements_in")
[email protected]("--src", "srcs", multiple=True, required=True)
 @click.argument("requirements_txt")
 @click.argument("update_target_label")
 @click.option("--requirements-linux")
@@ -88,7 +88,7 @@
 @click.option("--requirements-windows")
 @click.argument("extra_args", nargs=-1, type=click.UNPROCESSED)
 def main(
-    requirements_in: str,
+    srcs: Tuple[str, ...],
     requirements_txt: str,
     update_target_label: str,
     requirements_linux: Optional[str],
@@ -99,11 +99,13 @@
     bazel_runfiles = runfiles.Create()
 
     requirements_file = _select_golden_requirements_file(
-        requirements_txt=requirements_txt, requirements_linux=requirements_linux,
-        requirements_darwin=requirements_darwin, requirements_windows=requirements_windows
+        requirements_txt=requirements_txt,
+        requirements_linux=requirements_linux,
+        requirements_darwin=requirements_darwin,
+        requirements_windows=requirements_windows,
     )
 
-    resolved_requirements_in = _locate(bazel_runfiles, requirements_in)
+    resolved_srcs = [_locate(bazel_runfiles, src) for src in srcs]
     resolved_requirements_file = _locate(bazel_runfiles, requirements_file)
 
     # Files in the runfiles directory has the following naming schema:
@@ -116,12 +118,12 @@
         : -(len(requirements_file) - len(repository_prefix))
     ]
 
-    # As requirements_in might contain references to generated files we want to
+    # As srcs might contain references to generated files we want to
     # use the runfiles file first. Thus, we need to compute the relative path
     # from the execution root.
     # Note: Windows cannot reference generated files without runfiles support enabled.
-    requirements_in_relative = requirements_in[len(repository_prefix):]
-    requirements_file_relative = requirements_file[len(repository_prefix):]
+    srcs_relative = [src[len(repository_prefix) :] for src in srcs]
+    requirements_file_relative = requirements_file[len(repository_prefix) :]
 
     # Before loading click, set the locale for its parser.
     # If it leaks through to the system setting, it may fail:
@@ -157,11 +159,12 @@
     os.environ["CUSTOM_COMPILE_COMMAND"] = update_command
     os.environ["PIP_CONFIG_FILE"] = os.getenv("PIP_CONFIG_FILE") or os.devnull
 
-    argv.append(f"--output-file={requirements_file_relative if UPDATE else requirements_out}")
     argv.append(
-        requirements_in_relative
-        if Path(requirements_in_relative).exists()
-        else resolved_requirements_in
+        f"--output-file={requirements_file_relative if UPDATE else requirements_out}"
+    )
+    argv.extend(
+        (src_relative if Path(src_relative).exists() else resolved_src)
+        for src_relative, resolved_src in zip(srcs_relative, resolved_srcs)
     )
     argv.extend(extra_args)
 
@@ -196,7 +199,7 @@
                 print(
                     "pip-compile exited with code 2. This means that pip-compile found "
                     "incompatible requirements or could not find a version that matches "
-                    f"the install requirement in {requirements_in_relative}.",
+                    f"the install requirement in one of {srcs_relative}.",
                     file=sys.stderr,
                 )
                 sys.exit(1)
diff --git a/python/private/pypi/deps.bzl b/python/private/pypi/deps.bzl
new file mode 100644
index 0000000..e07d9aa
--- /dev/null
+++ b/python/private/pypi/deps.bzl
@@ -0,0 +1,140 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+
+_RULE_DEPS = [
+    # START: maintained by 'bazel run //tools/private/update_deps:update_pip_deps'
+    (
+        "pypi__build",
+        "https://files.pythonhosted.org/packages/e2/03/f3c8ba0a6b6e30d7d18c40faab90807c9bb5e9a1e3b2fe2008af624a9c97/build-1.2.1-py3-none-any.whl",
+        "75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4",
+    ),
+    (
+        "pypi__click",
+        "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl",
+        "ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28",
+    ),
+    (
+        "pypi__colorama",
+        "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl",
+        "4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6",
+    ),
+    (
+        "pypi__importlib_metadata",
+        "https://files.pythonhosted.org/packages/2d/0a/679461c511447ffaf176567d5c496d1de27cbe34a87df6677d7171b2fbd4/importlib_metadata-7.1.0-py3-none-any.whl",
+        "30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570",
+    ),
+    (
+        "pypi__installer",
+        "https://files.pythonhosted.org/packages/e5/ca/1172b6638d52f2d6caa2dd262ec4c811ba59eee96d54a7701930726bce18/installer-0.7.0-py3-none-any.whl",
+        "05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53",
+    ),
+    (
+        "pypi__more_itertools",
+        "https://files.pythonhosted.org/packages/50/e2/8e10e465ee3987bb7c9ab69efb91d867d93959095f4807db102d07995d94/more_itertools-10.2.0-py3-none-any.whl",
+        "686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684",
+    ),
+    (
+        "pypi__packaging",
+        "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl",
+        "2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5",
+    ),
+    (
+        "pypi__pep517",
+        "https://files.pythonhosted.org/packages/25/6e/ca4a5434eb0e502210f591b97537d322546e4833dcb4d470a48c375c5540/pep517-0.13.1-py3-none-any.whl",
+        "31b206f67165b3536dd577c5c3f1518e8fbaf38cbc57efff8369a392feff1721",
+    ),
+    (
+        "pypi__pip",
+        "https://files.pythonhosted.org/packages/8a/6a/19e9fe04fca059ccf770861c7d5721ab4c2aebc539889e97c7977528a53b/pip-24.0-py3-none-any.whl",
+        "ba0d021a166865d2265246961bec0152ff124de910c5cc39f1156ce3fa7c69dc",
+    ),
+    (
+        "pypi__pip_tools",
+        "https://files.pythonhosted.org/packages/0d/dc/38f4ce065e92c66f058ea7a368a9c5de4e702272b479c0992059f7693941/pip_tools-7.4.1-py3-none-any.whl",
+        "4c690e5fbae2f21e87843e89c26191f0d9454f362d8acdbd695716493ec8b3a9",
+    ),
+    (
+        "pypi__pyproject_hooks",
+        "https://files.pythonhosted.org/packages/ae/f3/431b9d5fe7d14af7a32340792ef43b8a714e7726f1d7b69cc4e8e7a3f1d7/pyproject_hooks-1.1.0-py3-none-any.whl",
+        "7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2",
+    ),
+    (
+        "pypi__setuptools",
+        "https://files.pythonhosted.org/packages/de/88/70c5767a0e43eb4451c2200f07d042a4bcd7639276003a9c54a68cfcc1f8/setuptools-70.0.0-py3-none-any.whl",
+        "54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4",
+    ),
+    (
+        "pypi__tomli",
+        "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl",
+        "939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
+    ),
+    (
+        "pypi__wheel",
+        "https://files.pythonhosted.org/packages/7d/cd/d7460c9a869b16c3dd4e1e403cce337df165368c71d6af229a74699622ce/wheel-0.43.0-py3-none-any.whl",
+        "55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81",
+    ),
+    (
+        "pypi__zipp",
+        "https://files.pythonhosted.org/packages/da/55/a03fd7240714916507e1fcf7ae355bd9d9ed2e6db492595f1a67f61681be/zipp-3.18.2-py3-none-any.whl",
+        "dce197b859eb796242b0622af1b8beb0a722d52aa2f57133ead08edd5bf5374e",
+    ),
+    # END: maintained by 'bazel run //tools/private/update_deps:update_pip_deps'
+]
+
+_GENERIC_WHEEL = """\
+package(default_visibility = ["//visibility:public"])
+
+load("@rules_python//python:defs.bzl", "py_library")
+
+py_library(
+    name = "lib",
+    srcs = glob(["**/*.py"]),
+    data = glob(["**/*"], exclude=[
+        # These entries include those put into user-installed dependencies by
+        # data_exclude to avoid non-determinism.
+        "**/*.py",
+        "**/*.pyc",
+        "**/*.pyc.*",  # During pyc creation, temp files named *.pyc.NNN are created
+        "**/* *",
+        "**/*.dist-info/RECORD",
+        "BUILD",
+        "WORKSPACE",
+    ]),
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["."],
+)
+"""
+
+# Collate all the repository names so they can be easily consumed
+all_repo_names = [name for (name, _, _) in _RULE_DEPS]
+
+def pypi_deps():
+    """
+    Fetch dependencies these rules depend on. Workspaces that use the pip_parse rule can call this.
+    """
+    for (name, url, sha256) in _RULE_DEPS:
+        maybe(
+            http_archive,
+            name,
+            url = url,
+            sha256 = sha256,
+            type = "zip",
+            build_file_content = _GENERIC_WHEEL,
+        )
diff --git a/python/private/pypi/evaluate_markers.bzl b/python/private/pypi/evaluate_markers.bzl
new file mode 100644
index 0000000..c805fd7
--- /dev/null
+++ b/python/private/pypi/evaluate_markers.bzl
@@ -0,0 +1,77 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A simple function that evaluates markers using a python interpreter."""
+
+load(":pypi_repo_utils.bzl", "pypi_repo_utils")
+
+# Used as a default value in a rule to ensure we fetch the dependencies.
+SRCS = [
+    # When the version, or any of the files in `packaging` package changes,
+    # this file will change as well.
+    Label("@pypi__packaging//:packaging-24.0.dist-info/RECORD"),
+    Label("//python/private/pypi/requirements_parser:resolve_target_platforms.py"),
+    Label("//python/private/pypi/whl_installer:platform.py"),
+]
+
+def evaluate_markers(mrctx, *, requirements, python_interpreter, python_interpreter_target, srcs, logger = None):
+    """Return the list of supported platforms per requirements line.
+
+    Args:
+        mrctx: repository_ctx or module_ctx.
+        requirements: list[str] of the requirement file lines to evaluate.
+        python_interpreter: str, path to the python_interpreter to use to
+            evaluate the env markers in the given requirements files. It will
+            be only called if the requirements files have env markers. This
+            should be something that is in your PATH or an absolute path.
+        python_interpreter_target: Label, same as python_interpreter, but in a
+            label format.
+        srcs: list[Label], the value of SRCS passed from the `rctx` or `mctx` to this function.
+        logger: repo_utils.logger or None, a simple struct to log diagnostic
+            messages. Defaults to None.
+
+    Returns:
+        dict of string lists with target platforms
+    """
+    if not requirements:
+        return {}
+
+    in_file = mrctx.path("requirements_with_markers.in.json")
+    out_file = mrctx.path("requirements_with_markers.out.json")
+    mrctx.file(in_file, json.encode(requirements))
+
+    pypi_repo_utils.execute_checked(
+        mrctx,
+        op = "ResolveRequirementEnvMarkers({})".format(in_file),
+        arguments = [
+            pypi_repo_utils.resolve_python_interpreter(
+                mrctx,
+                python_interpreter = python_interpreter,
+                python_interpreter_target = python_interpreter_target,
+            ),
+            "-m",
+            "python.private.pypi.requirements_parser.resolve_target_platforms",
+            in_file,
+            out_file,
+        ],
+        srcs = srcs,
+        environment = {
+            "PYTHONPATH": [
+                Label("@pypi__packaging//:BUILD.bazel"),
+                Label("//:BUILD.bazel"),
+            ],
+        },
+        logger = logger,
+    )
+    return json.decode(mrctx.read(out_file))
diff --git a/python/private/pypi/extension.bzl b/python/private/pypi/extension.bzl
new file mode 100644
index 0000000..1bc8f15
--- /dev/null
+++ b/python/private/pypi/extension.bzl
@@ -0,0 +1,858 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"pip module extension for use with bzlmod"
+
+load("@bazel_features//:features.bzl", "bazel_features")
+load("@pythons_hub//:interpreters.bzl", "DEFAULT_PYTHON_VERSION", "INTERPRETER_LABELS")
+load("//python/private:auth.bzl", "AUTH_ATTRS")
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:repo_utils.bzl", "repo_utils")
+load("//python/private:version_label.bzl", "version_label")
+load(":attrs.bzl", "use_isolated")
+load(":evaluate_markers.bzl", "evaluate_markers", EVALUATE_MARKERS_SRCS = "SRCS")
+load(":hub_repository.bzl", "hub_repository")
+load(":parse_requirements.bzl", "host_platform", "parse_requirements", "select_requirement")
+load(":parse_whl_name.bzl", "parse_whl_name")
+load(":pip_repository_attrs.bzl", "ATTRS")
+load(":render_pkg_aliases.bzl", "whl_alias")
+load(":requirements_files_by_platform.bzl", "requirements_files_by_platform")
+load(":simpleapi_download.bzl", "simpleapi_download")
+load(":whl_library.bzl", "whl_library")
+load(":whl_repo_name.bzl", "whl_repo_name")
+
+def _parse_version(version):
+    major, _, version = version.partition(".")
+    minor, _, version = version.partition(".")
+    patch, _, version = version.partition(".")
+    build, _, version = version.partition(".")
+
+    return struct(
+        # use semver vocabulary here
+        major = major,
+        minor = minor,
+        patch = patch,  # this is called `micro` in the Python interpreter versioning scheme
+        build = build,
+    )
+
+def _major_minor_version(version):
+    version = _parse_version(version)
+    return "{}.{}".format(version.major, version.minor)
+
+def _whl_mods_impl(mctx):
+    """Implementation of the pip.whl_mods tag class.
+
+    This creates the JSON files used to modify the creation of different wheels.
+"""
+    whl_mods_dict = {}
+    for mod in mctx.modules:
+        for whl_mod_attr in mod.tags.whl_mods:
+            if whl_mod_attr.hub_name not in whl_mods_dict.keys():
+                whl_mods_dict[whl_mod_attr.hub_name] = {whl_mod_attr.whl_name: whl_mod_attr}
+            elif whl_mod_attr.whl_name in whl_mods_dict[whl_mod_attr.hub_name].keys():
+                # We cannot have the same wheel name in the same hub, as we
+                # will create the same JSON file name.
+                fail("""\
+Found same whl_name '{}' in the same hub '{}', please use a different hub_name.""".format(
+                    whl_mod_attr.whl_name,
+                    whl_mod_attr.hub_name,
+                ))
+            else:
+                whl_mods_dict[whl_mod_attr.hub_name][whl_mod_attr.whl_name] = whl_mod_attr
+
+    for hub_name, whl_maps in whl_mods_dict.items():
+        whl_mods = {}
+
+        # create a struct that we can pass to the _whl_mods_repo rule
+        # to create the different JSON files.
+        for whl_name, mods in whl_maps.items():
+            build_content = mods.additive_build_content
+            if mods.additive_build_content_file != None and mods.additive_build_content != "":
+                fail("""\
+You cannot use both the additive_build_content and additive_build_content_file arguments at the same time.
+""")
+            elif mods.additive_build_content_file != None:
+                build_content = mctx.read(mods.additive_build_content_file)
+
+            whl_mods[whl_name] = json.encode(struct(
+                additive_build_content = build_content,
+                copy_files = mods.copy_files,
+                copy_executables = mods.copy_executables,
+                data = mods.data,
+                data_exclude_glob = mods.data_exclude_glob,
+                srcs_exclude_glob = mods.srcs_exclude_glob,
+            ))
+
+        _whl_mods_repo(
+            name = hub_name,
+            whl_mods = whl_mods,
+        )
+
+def _create_whl_repos(module_ctx, pip_attr, whl_map, whl_overrides, group_map, simpleapi_cache, exposed_packages):
+    logger = repo_utils.logger(module_ctx, "pypi:create_whl_repos")
+    python_interpreter_target = pip_attr.python_interpreter_target
+    is_hub_reproducible = True
+
+    # if we do not have the python_interpreter set in the attributes
+    # we programmatically find it.
+    hub_name = pip_attr.hub_name
+    if python_interpreter_target == None and not pip_attr.python_interpreter:
+        python_name = "python_{}_host".format(
+            pip_attr.python_version.replace(".", "_"),
+        )
+        if python_name not in INTERPRETER_LABELS:
+            fail((
+                "Unable to find interpreter for pip hub '{hub_name}' for " +
+                "python_version={version}: Make sure a corresponding " +
+                '`python.toolchain(python_version="{version}")` call exists.' +
+                "Expected to find {python_name} among registered versions:\n  {labels}"
+            ).format(
+                hub_name = hub_name,
+                version = pip_attr.python_version,
+                python_name = python_name,
+                labels = "  \n".join(INTERPRETER_LABELS),
+            ))
+        python_interpreter_target = INTERPRETER_LABELS[python_name]
+
+    pip_name = "{}_{}".format(
+        hub_name,
+        version_label(pip_attr.python_version),
+    )
+    major_minor = _major_minor_version(pip_attr.python_version)
+
+    if hub_name not in whl_map:
+        whl_map[hub_name] = {}
+
+    whl_modifications = {}
+    if pip_attr.whl_modifications != None:
+        for mod, whl_name in pip_attr.whl_modifications.items():
+            whl_modifications[whl_name] = mod
+
+    if pip_attr.experimental_requirement_cycles:
+        requirement_cycles = {
+            name: [normalize_name(whl_name) for whl_name in whls]
+            for name, whls in pip_attr.experimental_requirement_cycles.items()
+        }
+
+        whl_group_mapping = {
+            whl_name: group_name
+            for group_name, group_whls in requirement_cycles.items()
+            for whl_name in group_whls
+        }
+
+        # TODO @aignas 2024-04-05: how do we support different requirement
+        # cycles for different abis/oses? For now we will need the users to
+        # assume the same groups across all versions/platforms until we start
+        # using an alternative cycle resolution strategy.
+        group_map[hub_name] = pip_attr.experimental_requirement_cycles
+    else:
+        whl_group_mapping = {}
+        requirement_cycles = {}
+
+    # Create a new wheel library for each of the different whls
+
+    get_index_urls = None
+    if pip_attr.experimental_index_url:
+        if pip_attr.download_only:
+            fail("Currently unsupported to use `download_only` and `experimental_index_url`")
+
+        get_index_urls = lambda ctx, distributions: simpleapi_download(
+            ctx,
+            attr = struct(
+                index_url = pip_attr.experimental_index_url,
+                extra_index_urls = pip_attr.experimental_extra_index_urls or [],
+                index_url_overrides = pip_attr.experimental_index_url_overrides or {},
+                sources = distributions,
+                envsubst = pip_attr.envsubst,
+                # Auth related info
+                netrc = pip_attr.netrc,
+                auth_patterns = pip_attr.auth_patterns,
+            ),
+            cache = simpleapi_cache,
+            parallel_download = pip_attr.parallel_download,
+        )
+
+    requirements_by_platform = parse_requirements(
+        module_ctx,
+        requirements_by_platform = requirements_files_by_platform(
+            requirements_by_platform = pip_attr.requirements_by_platform,
+            requirements_linux = pip_attr.requirements_linux,
+            requirements_lock = pip_attr.requirements_lock,
+            requirements_osx = pip_attr.requirements_darwin,
+            requirements_windows = pip_attr.requirements_windows,
+            extra_pip_args = pip_attr.extra_pip_args,
+            python_version = major_minor,
+            logger = logger,
+        ),
+        get_index_urls = get_index_urls,
+        # NOTE @aignas 2024-08-02: , we will execute any interpreter that we find either
+        # in the PATH or if specified as a label. We will configure the env
+        # markers when evaluating the requirement lines based on the output
+        # from the `requirements_files_by_platform` which should have something
+        # similar to:
+        # {
+        #    "//:requirements.txt": ["cp311_linux_x86_64", ...]
+        # }
+        #
+        # We know the target python versions that we need to evaluate the
+        # markers for and thus we don't need to use multiple python interpreter
+        # instances to perform this manipulation. This function should be executed
+        # only once by the underlying code to minimize the overhead needed to
+        # spin up a Python interpreter.
+        evaluate_markers = lambda module_ctx, requirements: evaluate_markers(
+            module_ctx,
+            requirements = requirements,
+            python_interpreter = pip_attr.python_interpreter,
+            python_interpreter_target = python_interpreter_target,
+            srcs = pip_attr._evaluate_markers_srcs,
+            logger = logger,
+        ),
+        logger = logger,
+    )
+
+    repository_platform = host_platform(module_ctx)
+    for whl_name, requirements in requirements_by_platform.items():
+        # We are not using the "sanitized name" because the user
+        # would need to guess what name we modified the whl name
+        # to.
+        annotation = whl_modifications.get(whl_name)
+        whl_name = normalize_name(whl_name)
+
+        group_name = whl_group_mapping.get(whl_name)
+        group_deps = requirement_cycles.get(group_name, [])
+
+        # Construct args separately so that the lock file can be smaller and does not include unused
+        # attrs.
+        whl_library_args = dict(
+            repo = pip_name,
+            dep_template = "@{}//{{name}}:{{target}}".format(hub_name),
+        )
+        maybe_args = dict(
+            # The following values are safe to omit if they have false like values
+            annotation = annotation,
+            download_only = pip_attr.download_only,
+            enable_implicit_namespace_pkgs = pip_attr.enable_implicit_namespace_pkgs,
+            environment = pip_attr.environment,
+            envsubst = pip_attr.envsubst,
+            experimental_target_platforms = pip_attr.experimental_target_platforms,
+            group_deps = group_deps,
+            group_name = group_name,
+            pip_data_exclude = pip_attr.pip_data_exclude,
+            python_interpreter = pip_attr.python_interpreter,
+            python_interpreter_target = python_interpreter_target,
+            whl_patches = {
+                p: json.encode(args)
+                for p, args in whl_overrides.get(whl_name, {}).items()
+            },
+        )
+        whl_library_args.update({k: v for k, v in maybe_args.items() if v})
+        maybe_args_with_default = dict(
+            # The following values have defaults next to them
+            isolated = (use_isolated(module_ctx, pip_attr), True),
+            quiet = (pip_attr.quiet, True),
+            timeout = (pip_attr.timeout, 600),
+        )
+        whl_library_args.update({
+            k: v
+            for k, (v, default) in maybe_args_with_default.items()
+            if v != default
+        })
+
+        if get_index_urls:
+            # TODO @aignas 2024-05-26: move to a separate function
+            found_something = False
+            is_exposed = False
+            for requirement in requirements:
+                is_exposed = is_exposed or requirement.is_exposed
+                for distribution in requirement.whls + [requirement.sdist]:
+                    if not distribution:
+                        # sdist may be None
+                        continue
+
+                    found_something = True
+                    is_hub_reproducible = False
+
+                    if pip_attr.netrc:
+                        whl_library_args["netrc"] = pip_attr.netrc
+                    if pip_attr.auth_patterns:
+                        whl_library_args["auth_patterns"] = pip_attr.auth_patterns
+
+                    # pip is not used to download wheels and the python `whl_library` helpers are only extracting things
+                    whl_library_args.pop("extra_pip_args", None)
+
+                    # This is no-op because pip is not used to download the wheel.
+                    whl_library_args.pop("download_only", None)
+
+                    repo_name = whl_repo_name(pip_name, distribution.filename, distribution.sha256)
+                    whl_library_args["requirement"] = requirement.srcs.requirement
+                    whl_library_args["urls"] = [distribution.url]
+                    whl_library_args["sha256"] = distribution.sha256
+                    whl_library_args["filename"] = distribution.filename
+                    whl_library_args["experimental_target_platforms"] = requirement.target_platforms
+
+                    # Pure python wheels or sdists may need to have a platform here
+                    target_platforms = None
+                    if distribution.filename.endswith("-any.whl") or not distribution.filename.endswith(".whl"):
+                        if len(requirements) > 1:
+                            target_platforms = requirement.target_platforms
+
+                    whl_library(name = repo_name, **dict(sorted(whl_library_args.items())))
+
+                    whl_map[hub_name].setdefault(whl_name, []).append(
+                        whl_alias(
+                            repo = repo_name,
+                            version = major_minor,
+                            filename = distribution.filename,
+                            target_platforms = target_platforms,
+                        ),
+                    )
+
+            if found_something:
+                if is_exposed:
+                    exposed_packages.setdefault(hub_name, {})[whl_name] = None
+                continue
+
+        requirement = select_requirement(
+            requirements,
+            platform = None if pip_attr.download_only else repository_platform,
+        )
+        if not requirement:
+            # Sometimes the package is not present for host platform if there
+            # are whls specified only in particular requirements files, in that
+            # case just continue, however, if the download_only flag is set up,
+            # then the user can also specify the target platform of the wheel
+            # packages they want to download, in that case there will be always
+            # a requirement here, so we will not be in this code branch.
+            continue
+        elif get_index_urls:
+            logger.warn(lambda: "falling back to pip for installing the right file for {}".format(requirement.requirement_line))
+
+        whl_library_args["requirement"] = requirement.requirement_line
+        if requirement.extra_pip_args:
+            whl_library_args["extra_pip_args"] = requirement.extra_pip_args
+
+        # We sort so that the lock-file remains the same no matter the order of how the
+        # args are manipulated in the code going before.
+        repo_name = "{}_{}".format(pip_name, whl_name)
+        whl_library(name = repo_name, **dict(sorted(whl_library_args.items())))
+        whl_map[hub_name].setdefault(whl_name, []).append(
+            whl_alias(
+                repo = repo_name,
+                version = major_minor,
+            ),
+        )
+
+    return is_hub_reproducible
+
+def _pip_impl(module_ctx):
+    """Implementation of a class tag that creates the pip hub and corresponding pip spoke whl repositories.
+
+    This implementation iterates through all of the `pip.parse` calls and creates
+    different pip hub repositories based on the "hub_name".  Each of the
+    pip calls create spoke repos that uses a specific Python interpreter.
+
+    In a MODULES.bazel file we have:
+
+    pip.parse(
+        hub_name = "pip",
+        python_version = 3.9,
+        requirements_lock = "//:requirements_lock_3_9.txt",
+        requirements_windows = "//:requirements_windows_3_9.txt",
+    )
+    pip.parse(
+        hub_name = "pip",
+        python_version = 3.10,
+        requirements_lock = "//:requirements_lock_3_10.txt",
+        requirements_windows = "//:requirements_windows_3_10.txt",
+    )
+
+    For instance, we have a hub with the name of "pip".
+    A repository named the following is created. It is actually called last when
+    all of the pip spokes are collected.
+
+    - @@rules_python~override~pip~pip
+
+    As shown in the example code above we have the following.
+    Two different pip.parse statements exist in MODULE.bazel provide the hub_name "pip".
+    These definitions create two different pip spoke repositories that are
+    related to the hub "pip".
+    One spoke uses Python 3.9 and the other uses Python 3.10. This code automatically
+    determines the Python version and the interpreter.
+    Both of these pip spokes contain requirements files that includes websocket
+    and its dependencies.
+
+    We also need repositories for the wheels that the different pip spokes contain.
+    For each Python version a different wheel repository is created. In our example
+    each pip spoke had a requirements file that contained websockets. We
+    then create two different wheel repositories that are named the following.
+
+    - @@rules_python~override~pip~pip_39_websockets
+    - @@rules_python~override~pip~pip_310_websockets
+
+    And if the wheel has any other dependencies subsequent wheels are created in the same fashion.
+
+    The hub repository has aliases for `pkg`, `data`, etc, which have a select that resolves to
+    a spoke repository depending on the Python version.
+
+    Also we may have more than one hub as defined in a MODULES.bazel file.  So we could have multiple
+    hubs pointing to various different pip spokes.
+
+    Some other business rules notes. A hub can only have one spoke per Python version.  We cannot
+    have a hub named "pip" that has two spokes that use the Python 3.9 interpreter.  Second
+    we cannot have the same hub name used in sub-modules.  The hub name has to be globally
+    unique.
+
+    This implementation also handles the creation of whl_modification JSON files that are used
+    during the creation of wheel libraries. These JSON files used via the annotations argument
+    when calling wheel_installer.py.
+
+    Args:
+        module_ctx: module contents
+    """
+
+    # Build all of the wheel modifications if the tag class is called.
+    _whl_mods_impl(module_ctx)
+
+    _overriden_whl_set = {}
+    whl_overrides = {}
+
+    for module in module_ctx.modules:
+        for attr in module.tags.override:
+            if not module.is_root:
+                fail("overrides are only supported in root modules")
+
+            if not attr.file.endswith(".whl"):
+                fail("Only whl overrides are supported at this time")
+
+            whl_name = normalize_name(parse_whl_name(attr.file).distribution)
+
+            if attr.file in _overriden_whl_set:
+                fail("Duplicate module overrides for '{}'".format(attr.file))
+            _overriden_whl_set[attr.file] = None
+
+            for patch in attr.patches:
+                if whl_name not in whl_overrides:
+                    whl_overrides[whl_name] = {}
+
+                if patch not in whl_overrides[whl_name]:
+                    whl_overrides[whl_name][patch] = struct(
+                        patch_strip = attr.patch_strip,
+                        whls = [],
+                    )
+
+                whl_overrides[whl_name][patch].whls.append(attr.file)
+
+    # Used to track all the different pip hubs and the spoke pip Python
+    # versions.
+    pip_hub_map = {}
+
+    # Keeps track of all the hub's whl repos across the different versions.
+    # dict[hub, dict[whl, dict[version, str pip]]]
+    # Where hub, whl, and pip are the repo names
+    hub_whl_map = {}
+    hub_group_map = {}
+    exposed_packages = {}
+
+    simpleapi_cache = {}
+    is_extension_reproducible = True
+
+    for mod in module_ctx.modules:
+        for pip_attr in mod.tags.parse:
+            hub_name = pip_attr.hub_name
+            if hub_name not in pip_hub_map:
+                pip_hub_map[pip_attr.hub_name] = struct(
+                    module_name = mod.name,
+                    python_versions = [pip_attr.python_version],
+                )
+            elif pip_hub_map[hub_name].module_name != mod.name:
+                # We cannot have two hubs with the same name in different
+                # modules.
+                fail((
+                    "Duplicate cross-module pip hub named '{hub}': pip hub " +
+                    "names must be unique across modules. First defined " +
+                    "by module '{first_module}', second attempted by " +
+                    "module '{second_module}'"
+                ).format(
+                    hub = hub_name,
+                    first_module = pip_hub_map[hub_name].module_name,
+                    second_module = mod.name,
+                ))
+
+            elif pip_attr.python_version in pip_hub_map[hub_name].python_versions:
+                fail((
+                    "Duplicate pip python version '{version}' for hub " +
+                    "'{hub}' in module '{module}': the Python versions " +
+                    "used for a hub must be unique"
+                ).format(
+                    hub = hub_name,
+                    module = mod.name,
+                    version = pip_attr.python_version,
+                ))
+            else:
+                pip_hub_map[pip_attr.hub_name].python_versions.append(pip_attr.python_version)
+
+            is_hub_reproducible = _create_whl_repos(module_ctx, pip_attr, hub_whl_map, whl_overrides, hub_group_map, simpleapi_cache, exposed_packages)
+            is_extension_reproducible = is_extension_reproducible and is_hub_reproducible
+
+    for hub_name, whl_map in hub_whl_map.items():
+        hub_repository(
+            name = hub_name,
+            repo_name = hub_name,
+            whl_map = {
+                key: json.encode(value)
+                for key, value in whl_map.items()
+            },
+            default_version = _major_minor_version(DEFAULT_PYTHON_VERSION),
+            packages = sorted(exposed_packages.get(hub_name, {})),
+            groups = hub_group_map.get(hub_name),
+        )
+
+    if bazel_features.external_deps.extension_metadata_has_reproducible:
+        # If we are not using the `experimental_index_url feature, the extension is fully
+        # deterministic and we don't need to create a lock entry for it.
+        #
+        # In order to be able to dogfood the `experimental_index_url` feature before it gets
+        # stabilized, we have created the `_pip_non_reproducible` function, that will result
+        # in extra entries in the lock file.
+        return module_ctx.extension_metadata(reproducible = is_extension_reproducible)
+    else:
+        return None
+
+def _pip_non_reproducible(module_ctx):
+    _pip_impl(module_ctx)
+
+    # We default to calling the PyPI index and that will go into the
+    # MODULE.bazel.lock file, hence return nothing here.
+    return None
+
+def _pip_parse_ext_attrs(**kwargs):
+    """Get the attributes for the pip extension.
+
+    Args:
+        **kwargs: A kwarg for setting defaults for the specific attributes. The
+        key is expected to be the same as the attribute key.
+
+    Returns:
+        A dict of attributes.
+    """
+    attrs = dict({
+        "experimental_extra_index_urls": attr.string_list(
+            doc = """\
+The extra index URLs to use for downloading wheels using bazel downloader.
+Each value is going to be subject to `envsubst` substitutions if necessary.
+
+The indexes must support Simple API as described here:
+https://packaging.python.org/en/latest/specifications/simple-repository-api/
+
+This is equivalent to `--extra-index-urls` `pip` option.
+""",
+            default = [],
+        ),
+        "experimental_index_url": attr.string(
+            default = kwargs.get("experimental_index_url", ""),
+            doc = """\
+The index URL to use for downloading wheels using bazel downloader. This value is going
+to be subject to `envsubst` substitutions if necessary.
+
+The indexes must support Simple API as described here:
+https://packaging.python.org/en/latest/specifications/simple-repository-api/
+
+In the future this could be defaulted to `https://pypi.org` when this feature becomes
+stable.
+
+This is equivalent to `--index-url` `pip` option.
+""",
+        ),
+        "experimental_index_url_overrides": attr.string_dict(
+            doc = """\
+The index URL overrides for each package to use for downloading wheels using
+bazel downloader. This value is going to be subject to `envsubst` substitutions
+if necessary.
+
+The key is the package name (will be normalized before usage) and the value is the
+index URL.
+
+This design pattern has been chosen in order to be fully deterministic about which
+packages come from which source. We want to avoid issues similar to what happened in
+https://pytorch.org/blog/compromised-nightly-dependency/.
+
+The indexes must support Simple API as described here:
+https://packaging.python.org/en/latest/specifications/simple-repository-api/
+""",
+        ),
+        "hub_name": attr.string(
+            mandatory = True,
+            doc = """
+The name of the repo pip dependencies will be accessible from.
+
+This name must be unique between modules; unless your module is guaranteed to
+always be the root module, it's highly recommended to include your module name
+in the hub name. Repo mapping, `use_repo(..., pip="my_modules_pip_deps")`, can
+be used for shorter local names within your module.
+
+Within a module, the same `hub_name` can be specified to group different Python
+versions of pip dependencies under one repository name. This allows using a
+Python version-agnostic name when referring to pip dependencies; the
+correct version will be automatically selected.
+
+Typically, a module will only have a single hub of pip dependencies, but this
+is not required. Each hub is a separate resolution of pip dependencies. This
+means if different programs need different versions of some library, separate
+hubs can be created, and each program can use its respective hub's targets.
+Targets from different hubs should not be used together.
+""",
+        ),
+        "parallel_download": attr.bool(
+            doc = """\
+The flag allows to make use of parallel downloading feature in bazel 7.1 and above
+when the bazel downloader is used. This is by default enabled as it improves the
+performance by a lot, but in case the queries to the simple API are very expensive
+or when debugging authentication issues one may want to disable this feature.
+
+NOTE, This will download (potentially duplicate) data for multiple packages if
+there is more than one index available, but in general this should be negligible
+because the simple API calls are very cheap and the user should not notice any
+extra overhead.
+
+If we are in synchronous mode, then we will use the first result that we
+find in case extra indexes are specified.
+""",
+            default = True,
+        ),
+        "python_version": attr.string(
+            mandatory = True,
+            doc = """
+The Python version the dependencies are targetting, in Major.Minor format
+(e.g., "3.11") or patch level granularity (e.g. "3.11.1").
+
+If an interpreter isn't explicitly provided (using `python_interpreter` or
+`python_interpreter_target`), then the version specified here must have
+a corresponding `python.toolchain()` configured.
+""",
+        ),
+        "whl_modifications": attr.label_keyed_string_dict(
+            mandatory = False,
+            doc = """\
+A dict of labels to wheel names that is typically generated by the whl_modifications.
+The labels are JSON config files describing the modifications.
+""",
+        ),
+        "_evaluate_markers_srcs": attr.label_list(
+            default = EVALUATE_MARKERS_SRCS,
+            doc = """\
+The list of labels to use as SRCS for the marker evaluation code. This ensures that the
+code will be re-evaluated when any of files in the default changes.
+""",
+        ),
+    }, **ATTRS)
+    attrs.update(AUTH_ATTRS)
+
+    return attrs
+
+def _whl_mod_attrs():
+    attrs = {
+        "additive_build_content": attr.string(
+            doc = "(str, optional): Raw text to add to the generated `BUILD` file of a package.",
+        ),
+        "additive_build_content_file": attr.label(
+            doc = """\
+(label, optional): path to a BUILD file to add to the generated
+`BUILD` file of a package. You cannot use both additive_build_content and additive_build_content_file
+arguments at the same time.""",
+        ),
+        "copy_executables": attr.string_dict(
+            doc = """\
+(dict, optional): A mapping of `src` and `out` files for
+[@bazel_skylib//rules:copy_file.bzl][cf]. Targets generated here will also be flagged as
+executable.""",
+        ),
+        "copy_files": attr.string_dict(
+            doc = """\
+(dict, optional): A mapping of `src` and `out` files for
+[@bazel_skylib//rules:copy_file.bzl][cf]""",
+        ),
+        "data": attr.string_list(
+            doc = """\
+(list, optional): A list of labels to add as `data` dependencies to
+the generated `py_library` target.""",
+        ),
+        "data_exclude_glob": attr.string_list(
+            doc = """\
+(list, optional): A list of exclude glob patterns to add as `data` to
+the generated `py_library` target.""",
+        ),
+        "hub_name": attr.string(
+            doc = """\
+Name of the whl modification, hub we use this name to set the modifications for
+pip.parse. If you have different pip hubs you can use a different name,
+otherwise it is best practice to just use one.
+
+You cannot have the same `hub_name` in different modules.  You can reuse the same
+name in the same module for different wheels that you put in the same hub, but you
+cannot have a child module that uses the same `hub_name`.
+""",
+            mandatory = True,
+        ),
+        "srcs_exclude_glob": attr.string_list(
+            doc = """\
+(list, optional): A list of labels to add as `srcs` to the generated
+`py_library` target.""",
+        ),
+        "whl_name": attr.string(
+            doc = "The whl name that the modifications are used for.",
+            mandatory = True,
+        ),
+    }
+    return attrs
+
+# NOTE: the naming of 'override' is taken from the bzlmod native
+# 'archive_override', 'git_override' bzlmod functions.
+_override_tag = tag_class(
+    attrs = {
+        "file": attr.string(
+            doc = """\
+The Python distribution file name which needs to be patched. This will be
+applied to all repositories that setup this distribution via the pip.parse tag
+class.""",
+            mandatory = True,
+        ),
+        "patch_strip": attr.int(
+            default = 0,
+            doc = """\
+The number of leading path segments to be stripped from the file name in the
+patches.""",
+        ),
+        "patches": attr.label_list(
+            doc = """\
+A list of patches to apply to the repository *after* 'whl_library' is extracted
+and BUILD.bazel file is generated.""",
+            mandatory = True,
+        ),
+    },
+    doc = """\
+Apply any overrides (e.g. patches) to a given Python distribution defined by
+other tags in this extension.""",
+)
+
+pypi = module_extension(
+    doc = """\
+This extension is used to make dependencies from pip available.
+
+pip.parse:
+To use, call `pip.parse()` and specify `hub_name` and your requirements file.
+Dependencies will be downloaded and made available in a repo named after the
+`hub_name` argument.
+
+Each `pip.parse()` call configures a particular Python version. Multiple calls
+can be made to configure different Python versions, and will be grouped by
+the `hub_name` argument. This allows the same logical name, e.g. `@pip//numpy`
+to automatically resolve to different, Python version-specific, libraries.
+
+pip.whl_mods:
+This tag class is used to help create JSON files to describe modifications to
+the BUILD files for wheels.
+""",
+    implementation = _pip_impl,
+    tag_classes = {
+        "override": _override_tag,
+        "parse": tag_class(
+            attrs = _pip_parse_ext_attrs(),
+            doc = """\
+This tag class is used to create a pip hub and all of the spokes that are part of that hub.
+This tag class reuses most of the pip attributes that are found in
+@rules_python//python/pip_install:pip_repository.bzl.
+The exception is it does not use the arg 'repo_prefix'.  We set the repository
+prefix for the user and the alias arg is always True in bzlmod.
+""",
+        ),
+        "whl_mods": tag_class(
+            attrs = _whl_mod_attrs(),
+            doc = """\
+This tag class is used to create JSON file that are used when calling wheel_builder.py.  These
+JSON files contain instructions on how to modify a wheel's project.  Each of the attributes
+create different modifications based on the type of attribute. Previously to bzlmod these
+JSON files where referred to as annotations, and were renamed to whl_modifications in this
+extension.
+""",
+        ),
+    },
+)
+
+pypi_internal = module_extension(
+    doc = """\
+This extension is used to make dependencies from pypi available.
+
+For now this is intended to be used internally so that usage of the `pip`
+extension in `rules_python` does not affect the evaluations of the extension
+for the consumers.
+
+pip.parse:
+To use, call `pip.parse()` and specify `hub_name` and your requirements file.
+Dependencies will be downloaded and made available in a repo named after the
+`hub_name` argument.
+
+Each `pip.parse()` call configures a particular Python version. Multiple calls
+can be made to configure different Python versions, and will be grouped by
+the `hub_name` argument. This allows the same logical name, e.g. `@pypi//numpy`
+to automatically resolve to different, Python version-specific, libraries.
+
+pip.whl_mods:
+This tag class is used to help create JSON files to describe modifications to
+the BUILD files for wheels.
+""",
+    implementation = _pip_non_reproducible,
+    tag_classes = {
+        "override": _override_tag,
+        "parse": tag_class(
+            attrs = _pip_parse_ext_attrs(
+                experimental_index_url = "https://pypi.org/simple",
+            ),
+            doc = """\
+This tag class is used to create a pypi hub and all of the spokes that are part of that hub.
+This tag class reuses most of the pypi attributes that are found in
+@rules_python//python/pip_install:pip_repository.bzl.
+The exception is it does not use the arg 'repo_prefix'.  We set the repository
+prefix for the user and the alias arg is always True in bzlmod.
+""",
+        ),
+        "whl_mods": tag_class(
+            attrs = _whl_mod_attrs(),
+            doc = """\
+This tag class is used to create JSON file that are used when calling wheel_builder.py.  These
+JSON files contain instructions on how to modify a wheel's project.  Each of the attributes
+create different modifications based on the type of attribute. Previously to bzlmod these
+JSON files where referred to as annotations, and were renamed to whl_modifications in this
+extension.
+""",
+        ),
+    },
+)
+
+def _whl_mods_repo_impl(rctx):
+    rctx.file("BUILD.bazel", "")
+    for whl_name, mods in rctx.attr.whl_mods.items():
+        rctx.file("{}.json".format(whl_name), mods)
+
+_whl_mods_repo = repository_rule(
+    doc = """\
+This rule creates json files based on the whl_mods attribute.
+""",
+    implementation = _whl_mods_repo_impl,
+    attrs = {
+        "whl_mods": attr.string_dict(
+            mandatory = True,
+            doc = "JSON endcoded string that is provided to wheel_builder.py",
+        ),
+    },
+)
diff --git a/python/private/pypi/flags.bzl b/python/private/pypi/flags.bzl
new file mode 100644
index 0000000..1e38062
--- /dev/null
+++ b/python/private/pypi/flags.bzl
@@ -0,0 +1,80 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Values and helpers for pip_repository related flags.
+
+NOTE: The transitive loads of this should be kept minimal. This avoids loading
+unnecessary files when all that are needed are flag definitions.
+"""
+
+load("@bazel_skylib//rules:common_settings.bzl", "string_flag")
+load("//python/private:enum.bzl", "enum")
+
+# Determines if we should use whls for third party
+#
+# buildifier: disable=name-conventions
+UseWhlFlag = enum(
+    # Automatically decide the effective value based on environment, target
+    # platform and the presence of distributions for a particular package.
+    AUTO = "auto",
+    # Do not use `sdist` and fail if there are no available whls suitable for the target platform.
+    ONLY = "only",
+    # Do not use whl distributions and instead build the whls from `sdist`.
+    NO = "no",
+)
+
+# Determines whether universal wheels should be preferred over arch platform specific ones.
+#
+# buildifier: disable=name-conventions
+UniversalWhlFlag = enum(
+    # Prefer platform-specific wheels over universal wheels.
+    ARCH = "arch",
+    # Prefer universal wheels over platform-specific wheels.
+    UNIVERSAL = "universal",
+)
+
+# Determines which libc flavor is preferred when selecting the linux whl distributions.
+#
+# buildifier: disable=name-conventions
+WhlLibcFlag = enum(
+    # Prefer glibc wheels (e.g. manylinux_2_17_x86_64 or linux_x86_64)
+    GLIBC = "glibc",
+    # Prefer musl wheels (e.g. musllinux_2_17_x86_64)
+    MUSL = "musl",
+)
+
+INTERNAL_FLAGS = [
+    "dist",
+    "whl_plat",
+    "whl_plat_py3",
+    "whl_plat_py3_abi3",
+    "whl_plat_pycp3x",
+    "whl_plat_pycp3x_abi3",
+    "whl_plat_pycp3x_abicp",
+    "whl_py2_py3",
+    "whl_py3",
+    "whl_py3_abi3",
+    "whl_pycp3x",
+    "whl_pycp3x_abi3",
+    "whl_pycp3x_abicp",
+]
+
+def define_pypi_internal_flags(name):
+    for flag in INTERNAL_FLAGS:
+        string_flag(
+            name = "_internal_pip_" + flag,
+            build_setting_default = "",
+            values = [""],
+            visibility = ["//visibility:public"],
+        )
diff --git a/python/pip_install/private/generate_group_library_build_bazel.bzl b/python/private/pypi/generate_group_library_build_bazel.bzl
similarity index 69%
rename from python/pip_install/private/generate_group_library_build_bazel.bzl
rename to python/private/pypi/generate_group_library_build_bazel.bzl
index c122b04..54da066 100644
--- a/python/pip_install/private/generate_group_library_build_bazel.bzl
+++ b/python/private/pypi/generate_group_library_build_bazel.bzl
@@ -14,17 +14,18 @@
 
 """Generate the BUILD.bazel contents for a repo defined by a group_library."""
 
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:text_util.bzl", "render")
 load(
-    "//python/private:labels.bzl",
+    ":labels.bzl",
     "PY_LIBRARY_IMPL_LABEL",
     "PY_LIBRARY_PUBLIC_LABEL",
     "WHEEL_FILE_IMPL_LABEL",
     "WHEEL_FILE_PUBLIC_LABEL",
 )
-load("//python/private:normalize_name.bzl", "normalize_name")
 
 _PRELUDE = """\
-load("@rules_python//python:defs.bzl", "py_library", "py_binary")
+load("@rules_python//python:defs.bzl", "py_library")
 """
 
 _GROUP_TEMPLATE = """\
@@ -62,26 +63,39 @@
           which make up the group.
     """
 
-    lib_dependencies = [
-        "@%s%s//:%s" % (repo_prefix, normalize_name(d), PY_LIBRARY_IMPL_LABEL)
-        for d in group_members
-    ]
-    whl_file_deps = [
-        "@%s%s//:%s" % (repo_prefix, normalize_name(d), WHEEL_FILE_IMPL_LABEL)
-        for d in group_members
-    ]
-    visibility = [
-        "@%s%s//:__pkg__" % (repo_prefix, normalize_name(d))
-        for d in group_members
-    ]
+    group_members = sorted(group_members)
+
+    if repo_prefix:
+        lib_dependencies = [
+            "@%s%s//:%s" % (repo_prefix, normalize_name(d), PY_LIBRARY_IMPL_LABEL)
+            for d in group_members
+        ]
+        whl_file_deps = [
+            "@%s%s//:%s" % (repo_prefix, normalize_name(d), WHEEL_FILE_IMPL_LABEL)
+            for d in group_members
+        ]
+        visibility = [
+            "@%s%s//:__pkg__" % (repo_prefix, normalize_name(d))
+            for d in group_members
+        ]
+    else:
+        lib_dependencies = [
+            "//%s:%s" % (normalize_name(d), PY_LIBRARY_IMPL_LABEL)
+            for d in group_members
+        ]
+        whl_file_deps = [
+            "//%s:%s" % (normalize_name(d), WHEEL_FILE_IMPL_LABEL)
+            for d in group_members
+        ]
+        visibility = ["//:__subpackages__"]
 
     return _GROUP_TEMPLATE.format(
         name = normalize_name(group_name),
         whl_public_label = WHEEL_FILE_PUBLIC_LABEL,
-        whl_deps = repr(whl_file_deps),
+        whl_deps = render.indent(render.list(whl_file_deps)).lstrip(),
         lib_public_label = PY_LIBRARY_PUBLIC_LABEL,
-        lib_deps = repr(lib_dependencies),
-        visibility = repr(visibility),
+        lib_deps = render.indent(render.list(lib_dependencies)).lstrip(),
+        visibility = render.indent(render.list(visibility)).lstrip(),
     )
 
 def generate_group_library_build_bazel(
diff --git a/python/pip_install/private/generate_whl_library_build_bazel.bzl b/python/private/pypi/generate_whl_library_build_bazel.bzl
similarity index 66%
rename from python/pip_install/private/generate_whl_library_build_bazel.bzl
rename to python/private/pypi/generate_whl_library_build_bazel.bzl
index 568b00e..d25f73a 100644
--- a/python/pip_install/private/generate_whl_library_build_bazel.bzl
+++ b/python/private/pypi/generate_whl_library_build_bazel.bzl
@@ -14,8 +14,10 @@
 
 """Generate the BUILD.bazel contents for a repo defined by a whl_library."""
 
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:text_util.bzl", "render")
 load(
-    "//python/private:labels.bzl",
+    ":labels.bzl",
     "DATA_LABEL",
     "DIST_INFO_LABEL",
     "PY_LIBRARY_IMPL_LABEL",
@@ -24,8 +26,6 @@
     "WHEEL_FILE_IMPL_LABEL",
     "WHEEL_FILE_PUBLIC_LABEL",
 )
-load("//python/private:normalize_name.bzl", "normalize_name")
-load("//python/private:text_util.bzl", "render")
 
 _COPY_FILE_TEMPLATE = """\
 copy_file(
@@ -48,8 +48,7 @@
 """
 
 _BUILD_TEMPLATE = """\
-load("@rules_python//python:defs.bzl", "py_library", "py_binary")
-load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
+{loads}
 
 package(default_visibility = ["//visibility:public"])
 
@@ -64,14 +63,14 @@
 )
 
 filegroup(
-    name = "{whl_file_impl_label}",
+    name = "{whl_file_label}",
     srcs = ["{whl_name}"],
     data = {whl_file_deps},
     visibility = {impl_vis},
 )
 
 py_library(
-    name = "{py_library_impl_label}",
+    name = "{py_library_label}",
     srcs = glob(
         ["site-packages/**/*.py"],
         exclude={srcs_exclude},
@@ -90,35 +89,35 @@
     tags = {tags},
     visibility = {impl_vis},
 )
-
-alias(
-   name = "{py_library_public_label}",
-   actual = "{py_library_actual_label}",
-)
-
-alias(
-   name = "{whl_file_public_label}",
-   actual = "{whl_file_actual_label}",
-)
 """
 
+def _plat_label(plat):
+    if plat.endswith("default"):
+        return plat
+    if plat.startswith("@//"):
+        return "@@" + str(Label("//:BUILD.bazel")).partition("//")[0].strip("@") + plat.strip("@")
+    elif plat.startswith("@"):
+        return str(Label(plat))
+    else:
+        return ":is_" + plat.replace("cp3", "python_3.")
+
 def _render_list_and_select(deps, deps_by_platform, tmpl):
-    deps = render.list([tmpl.format(d) for d in deps])
+    deps = render.list([tmpl.format(d) for d in sorted(deps)])
 
     if not deps_by_platform:
         return deps
 
     deps_by_platform = {
-        p if p.startswith("@") else ":is_" + p: [
+        _plat_label(p): [
             tmpl.format(d)
-            for d in deps
+            for d in sorted(deps)
         ]
-        for p, deps in deps_by_platform.items()
+        for p, deps in sorted(deps_by_platform.items())
     }
 
     # Add the default, which means that we will be just using the dependencies in
     # `deps` for platforms that are not handled in a special way by the packages
-    deps_by_platform["//conditions:default"] = []
+    deps_by_platform.setdefault("//conditions:default", [])
     deps_by_platform = render.select(deps_by_platform, value_repr = render.list)
 
     if deps == "[]":
@@ -126,9 +125,72 @@
     else:
         return "{} + {}".format(deps, deps_by_platform)
 
+def _render_config_settings(dependencies_by_platform):
+    loads = []
+    additional_content = []
+    for p in dependencies_by_platform:
+        # p can be one of the following formats:
+        # * //conditions:default
+        # * @platforms//os:{value}
+        # * @platforms//cpu:{value}
+        # * @//python/config_settings:is_python_3.{minor_version}
+        # * {os}_{cpu}
+        # * cp3{minor_version}_{os}_{cpu}
+        if p.startswith("@") or p.endswith("default"):
+            continue
+
+        abi, _, tail = p.partition("_")
+        if not abi.startswith("cp"):
+            tail = p
+            abi = ""
+
+        os, _, arch = tail.partition("_")
+        os = "" if os == "anyos" else os
+        arch = "" if arch == "anyarch" else arch
+
+        constraint_values = []
+        if arch:
+            constraint_values.append("@platforms//cpu:{}".format(arch))
+        if os:
+            constraint_values.append("@platforms//os:{}".format(os))
+
+        constraint_values_str = render.indent(render.list(constraint_values)).lstrip()
+
+        if abi:
+            if not loads:
+                loads.append("""load("@rules_python//python/config_settings:config_settings.bzl", "is_python_config_setting")""")
+
+            additional_content.append(
+                """\
+is_python_config_setting(
+    name = "is_{name}",
+    python_version = "3.{minor_version}",
+    constraint_values = {constraint_values},
+    visibility = ["//visibility:private"],
+)""".format(
+                    name = p.replace("cp3", "python_3."),
+                    minor_version = abi[len("cp3"):],
+                    constraint_values = constraint_values_str,
+                ),
+            )
+        else:
+            additional_content.append(
+                """\
+config_setting(
+    name = "is_{name}",
+    constraint_values = {constraint_values},
+    visibility = ["//visibility:private"],
+)""".format(
+                    name = p.replace("cp3", "python_3."),
+                    constraint_values = constraint_values_str,
+                ),
+            )
+
+    return loads, "\n\n".join(additional_content)
+
 def generate_whl_library_build_bazel(
         *,
-        repo_prefix,
+        dep_template,
         whl_name,
         dependencies,
         dependencies_by_platform,
@@ -141,7 +203,7 @@
     """Generate a BUILD file for an unzipped Wheel
 
     Args:
-        repo_prefix: the repo prefix that should be used for dependency lists.
+        dep_template: the dependency template that should be used for dependency lists.
         whl_name: the whl_name that this is generated for.
         dependencies: a list of PyPI packages that are dependencies to the py_library.
         dependencies_by_platform: a dict[str, list] of PyPI packages that may vary by platform.
@@ -228,72 +290,86 @@
         if deps
     }
 
-    for p in dependencies_by_platform:
-        if p.startswith("@"):
-            continue
+    loads = [
+        """load("@rules_python//python:defs.bzl", "py_library", "py_binary")""",
+        """load("@bazel_skylib//rules:copy_file.bzl", "copy_file")""",
+    ]
 
-        os, _, cpu = p.partition("_")
-
-        additional_content.append(
-            """\
-config_setting(
-    name = "is_{os}_{cpu}",
-    constraint_values = [
-        "@platforms//cpu:{cpu}",
-        "@platforms//os:{os}",
-    ],
-    visibility = ["//visibility:private"],
-)
-""".format(os = os, cpu = cpu),
-        )
+    loads_, config_settings_content = _render_config_settings(dependencies_by_platform)
+    if config_settings_content:
+        for line in loads_:
+            if line not in loads:
+                loads.append(line)
+        additional_content.append(config_settings_content)
 
     lib_dependencies = _render_list_and_select(
         deps = dependencies,
         deps_by_platform = dependencies_by_platform,
-        tmpl = "@{}{{}}//:{}".format(repo_prefix, PY_LIBRARY_PUBLIC_LABEL),
+        tmpl = dep_template.format(name = "{}", target = PY_LIBRARY_PUBLIC_LABEL),
     )
 
     whl_file_deps = _render_list_and_select(
         deps = dependencies,
         deps_by_platform = dependencies_by_platform,
-        tmpl = "@{}{{}}//:{}".format(repo_prefix, WHEEL_FILE_PUBLIC_LABEL),
+        tmpl = dep_template.format(name = "{}", target = WHEEL_FILE_PUBLIC_LABEL),
     )
 
     # If this library is a member of a group, its public label aliases need to
     # point to the group implementation rule not the implementation rules. We
     # also need to mark the implementation rules as visible to the group
     # implementation.
-    if group_name:
-        group_repo = repo_prefix + "_groups"
-        library_impl_label = "@%s//:%s_%s" % (group_repo, normalize_name(group_name), PY_LIBRARY_PUBLIC_LABEL)
-        whl_impl_label = "@%s//:%s_%s" % (group_repo, normalize_name(group_name), WHEEL_FILE_PUBLIC_LABEL)
-        impl_vis = "@%s//:__pkg__" % (group_repo,)
+    if group_name and "//:" in dep_template:
+        # This is the legacy behaviour where the group library is outside the hub repo
+        label_tmpl = dep_template.format(
+            name = "_groups",
+            target = normalize_name(group_name) + "_{}",
+        )
+        impl_vis = [dep_template.format(
+            name = "_groups",
+            target = "__pkg__",
+        )]
+        additional_content.extend([
+            "",
+            render.alias(
+                name = PY_LIBRARY_PUBLIC_LABEL,
+                actual = repr(label_tmpl.format(PY_LIBRARY_PUBLIC_LABEL)),
+            ),
+            "",
+            render.alias(
+                name = WHEEL_FILE_PUBLIC_LABEL,
+                actual = repr(label_tmpl.format(WHEEL_FILE_PUBLIC_LABEL)),
+            ),
+        ])
+        py_library_label = PY_LIBRARY_IMPL_LABEL
+        whl_file_label = WHEEL_FILE_IMPL_LABEL
+
+    elif group_name:
+        py_library_label = PY_LIBRARY_PUBLIC_LABEL
+        whl_file_label = WHEEL_FILE_PUBLIC_LABEL
+        impl_vis = [dep_template.format(name = "", target = "__subpackages__")]
 
     else:
-        library_impl_label = PY_LIBRARY_IMPL_LABEL
-        whl_impl_label = WHEEL_FILE_IMPL_LABEL
-        impl_vis = "//visibility:private"
+        py_library_label = PY_LIBRARY_PUBLIC_LABEL
+        whl_file_label = WHEEL_FILE_PUBLIC_LABEL
+        impl_vis = ["//visibility:public"]
 
     contents = "\n".join(
         [
             _BUILD_TEMPLATE.format(
-                py_library_public_label = PY_LIBRARY_PUBLIC_LABEL,
-                py_library_impl_label = PY_LIBRARY_IMPL_LABEL,
-                py_library_actual_label = library_impl_label,
+                loads = "\n".join(sorted(loads)),
+                py_library_label = py_library_label,
                 dependencies = render.indent(lib_dependencies, " " * 4).lstrip(),
                 whl_file_deps = render.indent(whl_file_deps, " " * 4).lstrip(),
                 data_exclude = repr(_data_exclude),
                 whl_name = whl_name,
-                whl_file_public_label = WHEEL_FILE_PUBLIC_LABEL,
-                whl_file_impl_label = WHEEL_FILE_IMPL_LABEL,
-                whl_file_actual_label = whl_impl_label,
+                whl_file_label = whl_file_label,
                 tags = repr(tags),
                 data_label = DATA_LABEL,
                 dist_info_label = DIST_INFO_LABEL,
                 entry_point_prefix = WHEEL_ENTRY_POINT_PREFIX,
                 srcs_exclude = repr(srcs_exclude),
                 data = repr(data),
-                impl_vis = repr([impl_vis]),
+                impl_vis = repr(impl_vis),
             ),
         ] + additional_content,
     )
diff --git a/python/private/pypi/group_library.bzl b/python/private/pypi/group_library.bzl
new file mode 100644
index 0000000..ff800e2
--- /dev/null
+++ b/python/private/pypi/group_library.bzl
@@ -0,0 +1,40 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""group_library implementation for WORKSPACE setups."""
+
+load(":generate_group_library_build_bazel.bzl", "generate_group_library_build_bazel")
+
+def _group_library_impl(rctx):
+    build_file_contents = generate_group_library_build_bazel(
+        repo_prefix = rctx.attr.repo_prefix,
+        groups = rctx.attr.groups,
+    )
+    rctx.file("BUILD.bazel", build_file_contents)
+
+group_library = repository_rule(
+    attrs = {
+        "groups": attr.string_list_dict(
+            doc = "A mapping of group names to requirements within that group.",
+        ),
+        "repo_prefix": attr.string(
+            doc = "Prefix used for the whl_library created components of each group",
+        ),
+    },
+    implementation = _group_library_impl,
+    doc = """
+Create a package containing only wrapper py_library and whl_library rules for implementing dependency groups.
+This is an implementation detail of dependency groups and should not be used alone.
+    """,
+)
diff --git a/python/private/pypi/hub_repository.bzl b/python/private/pypi/hub_repository.bzl
new file mode 100644
index 0000000..f589dd4
--- /dev/null
+++ b/python/private/pypi/hub_repository.bzl
@@ -0,0 +1,103 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("//python/private:text_util.bzl", "render")
+load(
+    ":render_pkg_aliases.bzl",
+    "render_multiplatform_pkg_aliases",
+    "whl_alias",
+)
+
+_BUILD_FILE_CONTENTS = """\
+package(default_visibility = ["//visibility:public"])
+
+# Ensure the `requirements.bzl` source can be accessed by stardoc, since users load() from it
+exports_files(["requirements.bzl"])
+"""
+
+def _impl(rctx):
+    bzl_packages = rctx.attr.packages or rctx.attr.whl_map.keys()
+    aliases = render_multiplatform_pkg_aliases(
+        aliases = {
+            key: [whl_alias(**v) for v in json.decode(values)]
+            for key, values in rctx.attr.whl_map.items()
+        },
+        default_version = rctx.attr.default_version,
+        default_config_setting = "//_config:is_python_" + rctx.attr.default_version,
+        requirement_cycles = rctx.attr.groups,
+    )
+    for path, contents in aliases.items():
+        rctx.file(path, contents)
+
+    # NOTE: we are using the canonical name with the double '@' in order to
+    # always uniquely identify a repository, as the labels are being passed as
+    # a string and the resolution of the label happens at the call-site of the
+    # `requirement`, et al. macros.
+    macro_tmpl = "@@{name}//{{}}:{{}}".format(name = rctx.attr.name)
+
+    rctx.file("BUILD.bazel", _BUILD_FILE_CONTENTS)
+    rctx.template("requirements.bzl", rctx.attr._template, substitutions = {
+        "%%ALL_DATA_REQUIREMENTS%%": render.list([
+            macro_tmpl.format(p, "data")
+            for p in bzl_packages
+        ]),
+        "%%ALL_REQUIREMENTS%%": render.list([
+            macro_tmpl.format(p, "pkg")
+            for p in bzl_packages
+        ]),
+        "%%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%": render.dict({
+            p: macro_tmpl.format(p, "whl")
+            for p in bzl_packages
+        }),
+        "%%MACRO_TMPL%%": macro_tmpl,
+    })
+
+hub_repository = repository_rule(
+    attrs = {
+        "default_version": attr.string(
+            mandatory = True,
+            doc = """\
+This is the default python version in the format of X.Y. This should match
+what is setup by the 'python' extension using the 'is_default = True'
+setting.""",
+        ),
+        "groups": attr.string_list_dict(
+            mandatory = False,
+        ),
+        "packages": attr.string_list(
+            mandatory = False,
+            doc = """\
+The list of packages that will be exposed via all_*requirements macros. Defaults to whl_map keys.
+""",
+        ),
+        "repo_name": attr.string(
+            mandatory = True,
+            doc = "The apparent name of the repo. This is needed because in bzlmod, the name attribute becomes the canonical name.",
+        ),
+        "whl_map": attr.string_dict(
+            mandatory = True,
+            doc = """\
+The wheel map where values are json.encoded strings of the whl_map constructed
+in the pip.parse tag class.
+""",
+        ),
+        "_template": attr.label(
+            default = ":requirements.bzl.tmpl.bzlmod",
+        ),
+    },
+    doc = """A rule for bzlmod mulitple pip repository creation. PRIVATE USE ONLY.""",
+    implementation = _impl,
+)
diff --git a/python/private/pypi/index_sources.bzl b/python/private/pypi/index_sources.bzl
new file mode 100644
index 0000000..2166014
--- /dev/null
+++ b/python/private/pypi/index_sources.bzl
@@ -0,0 +1,53 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A file that houses private functions used in the `bzlmod` extension with the same name.
+"""
+
+def index_sources(line):
+    """Get PyPI sources from a requirements.txt line.
+
+    We interpret the spec described in
+    https://pip.pypa.io/en/stable/reference/requirement-specifiers/#requirement-specifiers
+
+    Args:
+        line(str): The requirements.txt entry.
+
+    Returns:
+        A struct with shas attribute containing a list of shas to download from pypi_index.
+    """
+    head, _, maybe_hashes = line.partition(";")
+    _, _, version = head.partition("==")
+    version = version.partition(" ")[0].strip()
+
+    if "@" in head:
+        shas = []
+    else:
+        maybe_hashes = maybe_hashes or line
+        shas = [
+            sha.strip()
+            for sha in maybe_hashes.split("--hash=sha256:")[1:]
+        ]
+
+    if head == line:
+        head = line.partition("--hash=")[0].strip()
+    else:
+        head = head + ";" + maybe_hashes.partition("--hash=")[0].strip()
+
+    return struct(
+        requirement = line if not shas else head,
+        version = version,
+        shas = sorted(shas),
+    )
diff --git a/python/private/labels.bzl b/python/private/pypi/labels.bzl
similarity index 100%
rename from python/private/labels.bzl
rename to python/private/pypi/labels.bzl
diff --git a/python/private/pypi/multi_pip_parse.bzl b/python/private/pypi/multi_pip_parse.bzl
new file mode 100644
index 0000000..fe9e2db
--- /dev/null
+++ b/python/private/pypi/multi_pip_parse.bzl
@@ -0,0 +1,160 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A pip_parse implementation for version aware toolchains in WORKSPACE."""
+
+load(":pip_repository.bzl", pip_parse = "pip_repository")
+
+def _multi_pip_parse_impl(rctx):
+    rules_python = rctx.attr._rules_python_workspace.workspace_name
+    load_statements = []
+    install_deps_calls = []
+    process_requirements_calls = []
+    for python_version, pypi_repository in rctx.attr.pip_parses.items():
+        sanitized_python_version = python_version.replace(".", "_")
+        load_statement = """\
+load(
+    "@{pypi_repository}//:requirements.bzl",
+    _{sanitized_python_version}_install_deps = "install_deps",
+    _{sanitized_python_version}_all_requirements = "all_requirements",
+)""".format(
+            pypi_repository = pypi_repository,
+            sanitized_python_version = sanitized_python_version,
+        )
+        load_statements.append(load_statement)
+        process_requirements_call = """\
+_process_requirements(
+    pkg_labels = _{sanitized_python_version}_all_requirements,
+    python_version = "{python_version}",
+    repo_prefix = "{pypi_repository}_",
+)""".format(
+            pypi_repository = pypi_repository,
+            python_version = python_version,
+            sanitized_python_version = sanitized_python_version,
+        )
+        process_requirements_calls.append(process_requirements_call)
+        install_deps_call = """    _{sanitized_python_version}_install_deps(**whl_library_kwargs)""".format(
+            sanitized_python_version = sanitized_python_version,
+        )
+        install_deps_calls.append(install_deps_call)
+
+    # NOTE @aignas 2023-10-31: I am not sure it is possible to render aliases
+    # for all of the packages using the `render_pkg_aliases` function because
+    # we need to know what the list of packages for each version is and then
+    # we would be creating directories for each.
+    macro_tmpl = "@%s_{}//:{}" % rctx.attr.name
+
+    requirements_bzl = """\
+# Generated by python/pip.bzl
+
+load("@{rules_python}//python:pip.bzl", "whl_library_alias", "pip_utils")
+{load_statements}
+
+_wheel_names = []
+_version_map = dict()
+def _process_requirements(pkg_labels, python_version, repo_prefix):
+    for pkg_label in pkg_labels:
+        wheel_name = Label(pkg_label).package
+        if not wheel_name:
+            # We are dealing with the cases where we don't have aliases.
+            workspace_name = Label(pkg_label).workspace_name
+            wheel_name = workspace_name[len(repo_prefix):]
+
+        _wheel_names.append(wheel_name)
+        if not wheel_name in _version_map:
+            _version_map[wheel_name] = dict()
+        _version_map[wheel_name][python_version] = repo_prefix
+
+{process_requirements_calls}
+
+def requirement(name):
+    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "pkg")
+
+def whl_requirement(name):
+    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "whl")
+
+def data_requirement(name):
+    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "data")
+
+def dist_info_requirement(name):
+    return "{macro_tmpl}".format(pip_utils.normalize_name(name), "dist_info")
+
+def install_deps(**whl_library_kwargs):
+{install_deps_calls}
+    for wheel_name in _wheel_names:
+        whl_library_alias(
+            name = "{name}_" + wheel_name,
+            wheel_name = wheel_name,
+            default_version = "{default_version}",
+            version_map = _version_map[wheel_name],
+        )
+""".format(
+        name = rctx.attr.name,
+        install_deps_calls = "\n".join(install_deps_calls),
+        load_statements = "\n".join(load_statements),
+        macro_tmpl = macro_tmpl,
+        process_requirements_calls = "\n".join(process_requirements_calls),
+        rules_python = rules_python,
+        default_version = rctx.attr.default_version,
+    )
+    rctx.file("requirements.bzl", requirements_bzl)
+    rctx.file("BUILD.bazel", "exports_files(['requirements.bzl'])")
+
+_multi_pip_parse = repository_rule(
+    _multi_pip_parse_impl,
+    attrs = {
+        "default_version": attr.string(),
+        "pip_parses": attr.string_dict(),
+        "_rules_python_workspace": attr.label(default = Label("//:WORKSPACE")),
+    },
+)
+
+def multi_pip_parse(name, default_version, python_versions, python_interpreter_target, requirements_lock, **kwargs):
+    """NOT INTENDED FOR DIRECT USE!
+
+    This is intended to be used by the multi_pip_parse implementation in the template of the
+    multi_toolchain_aliases repository rule.
+
+    Args:
+        name: the name of the multi_pip_parse repository.
+        default_version: the default Python version.
+        python_versions: all Python toolchain versions currently registered.
+        python_interpreter_target: a dictionary which keys are Python versions and values are resolved host interpreters.
+        requirements_lock: a dictionary which keys are Python versions and values are locked requirements files.
+        **kwargs: extra arguments passed to all wrapped pip_parse.
+
+    Returns:
+        The internal implementation of multi_pip_parse repository rule.
+    """
+    pip_parses = {}
+    for python_version in python_versions:
+        if not python_version in python_interpreter_target:
+            fail("Missing python_interpreter_target for Python version %s in '%s'" % (python_version, name))
+        if not python_version in requirements_lock:
+            fail("Missing requirements_lock for Python version %s in '%s'" % (python_version, name))
+
+        pip_parse_name = name + "_" + python_version.replace(".", "_")
+        pip_parse(
+            name = pip_parse_name,
+            python_interpreter_target = python_interpreter_target[python_version],
+            requirements_lock = requirements_lock[python_version],
+            **kwargs
+        )
+        pip_parses[python_version] = pip_parse_name
+
+    return _multi_pip_parse(
+        name = name,
+        default_version = default_version,
+        pip_parses = pip_parses,
+    )
diff --git a/python/private/pypi/package_annotation.bzl b/python/private/pypi/package_annotation.bzl
new file mode 100644
index 0000000..4a54703
--- /dev/null
+++ b/python/private/pypi/package_annotation.bzl
@@ -0,0 +1,49 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Package annotation API for WORKSPACE setups."""
+
+def package_annotation(
+        additive_build_content = None,
+        copy_files = {},
+        copy_executables = {},
+        data = [],
+        data_exclude_glob = [],
+        srcs_exclude_glob = []):
+    """Annotations to apply to the BUILD file content from package generated from a `pip_repository` rule.
+
+    [cf]: https://github.com/bazelbuild/bazel-skylib/blob/main/docs/copy_file_doc.md
+
+    Args:
+        additive_build_content (str, optional): Raw text to add to the generated `BUILD` file of a package.
+        copy_files (dict, optional): A mapping of `src` and `out` files for [@bazel_skylib//rules:copy_file.bzl][cf]
+        copy_executables (dict, optional): A mapping of `src` and `out` files for
+            [@bazel_skylib//rules:copy_file.bzl][cf]. Targets generated here will also be flagged as
+            executable.
+        data (list, optional): A list of labels to add as `data` dependencies to the generated `py_library` target.
+        data_exclude_glob (list, optional): A list of exclude glob patterns to add as `data` to the generated
+            `py_library` target.
+        srcs_exclude_glob (list, optional): A list of labels to add as `srcs` to the generated `py_library` target.
+
+    Returns:
+        str: A json encoded string of the provided content.
+    """
+    return json.encode(struct(
+        additive_build_content = additive_build_content,
+        copy_files = copy_files,
+        copy_executables = copy_executables,
+        data = data,
+        data_exclude_glob = data_exclude_glob,
+        srcs_exclude_glob = srcs_exclude_glob,
+    ))
diff --git a/python/private/pypi/parse_requirements.bzl b/python/private/pypi/parse_requirements.bzl
new file mode 100644
index 0000000..eee97d7
--- /dev/null
+++ b/python/private/pypi/parse_requirements.bzl
@@ -0,0 +1,309 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Requirements parsing for whl_library creation.
+
+Use cases that the code needs to cover:
+* A single requirements_lock file that is used for the host platform.
+* Per-OS requirements_lock files that are used for the host platform.
+* A target platform specific requirements_lock that is used with extra
+  pip arguments with --platform, etc and download_only = True.
+
+In the last case only a single `requirements_lock` file is allowed, in all
+other cases we assume that there may be a desire to resolve the requirements
+file for the host platform to be backwards compatible with the legacy
+behavior.
+"""
+
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:repo_utils.bzl", "repo_utils")
+load(":index_sources.bzl", "index_sources")
+load(":parse_requirements_txt.bzl", "parse_requirements_txt")
+load(":whl_target_platforms.bzl", "select_whls")
+
+def parse_requirements(
+        ctx,
+        *,
+        requirements_by_platform = {},
+        extra_pip_args = [],
+        get_index_urls = None,
+        evaluate_markers = lambda *_: {},
+        logger = None):
+    """Get the requirements with platforms that the requirements apply to.
+
+    Args:
+        ctx: A context that has .read function that would read contents from a label.
+        requirements_by_platform (label_keyed_string_dict): a way to have
+            different package versions (or different packages) for different
+            os, arch combinations.
+        extra_pip_args (string list): Extra pip arguments to perform extra validations and to
+            be joined with args fined in files.
+        get_index_urls: Callable[[ctx, list[str]], dict], a callable to get all
+            of the distribution URLs from a PyPI index. Accepts ctx and
+            distribution names to query.
+        evaluate_markers: A function to use to evaluate the requirements.
+            Accepts the ctx and a dict where keys are requirement lines to
+            evaluate against the platforms stored as values in the input dict.
+            Returns the same dict, but with values being platforms that are
+            compatible with the requirements line.
+        logger: repo_utils.logger or None, a simple struct to log diagnostic messages.
+
+    Returns:
+        A tuple where the first element a dict of dicts where the first key is
+        the normalized distribution name (with underscores) and the second key
+        is the requirement_line, then value and the keys are structs with the
+        following attributes:
+         * distribution: The non-normalized distribution name.
+         * srcs: The Simple API downloadable source list.
+         * requirement_line: The original requirement line.
+         * target_platforms: The list of target platforms that this package is for.
+         * is_exposed: A boolean if the package should be exposed via the hub
+           repository.
+
+        The second element is extra_pip_args should be passed to `whl_library`.
+    """
+    options = {}
+    requirements = {}
+    for file, plats in requirements_by_platform.items():
+        if logger:
+            logger.debug(lambda: "Using {} for {}".format(file, plats))
+        contents = ctx.read(file)
+
+        # Parse the requirements file directly in starlark to get the information
+        # needed for the whl_library declarations later.
+        parse_result = parse_requirements_txt(contents)
+
+        # Replicate a surprising behavior that WORKSPACE builds allowed:
+        # Defining a repo with the same name multiple times, but only the last
+        # definition is respected.
+        # The requirement lines might have duplicate names because lines for extras
+        # are returned as just the base package name. e.g., `foo[bar]` results
+        # in an entry like `("foo", "foo[bar] == 1.0 ...")`.
+        requirements_dict = {
+            normalize_name(entry[0]): entry
+            for entry in sorted(
+                parse_result.requirements,
+                # Get the longest match and fallback to original WORKSPACE sorting,
+                # which should get us the entry with most extras.
+                #
+                # FIXME @aignas 2024-05-13: The correct behaviour might be to get an
+                # entry with all aggregated extras, but it is unclear if we
+                # should do this now.
+                key = lambda x: (len(x[1].partition("==")[0]), x),
+            )
+        }.values()
+
+        tokenized_options = []
+        for opt in parse_result.options:
+            for p in opt.split(" "):
+                tokenized_options.append(p)
+
+        pip_args = tokenized_options + extra_pip_args
+        for plat in plats:
+            requirements[plat] = requirements_dict
+            options[plat] = pip_args
+
+    requirements_by_platform = {}
+    reqs_with_env_markers = {}
+    for target_platform, reqs_ in requirements.items():
+        extra_pip_args = options[target_platform]
+
+        for distribution, requirement_line in reqs_:
+            for_whl = requirements_by_platform.setdefault(
+                normalize_name(distribution),
+                {},
+            )
+
+            if ";" in requirement_line:
+                reqs_with_env_markers.setdefault(requirement_line, []).append(target_platform)
+
+            for_req = for_whl.setdefault(
+                (requirement_line, ",".join(extra_pip_args)),
+                struct(
+                    distribution = distribution,
+                    srcs = index_sources(requirement_line),
+                    requirement_line = requirement_line,
+                    target_platforms = [],
+                    extra_pip_args = extra_pip_args,
+                ),
+            )
+            for_req.target_platforms.append(target_platform)
+
+    # This may call to Python, so execute it early (before calling to the
+    # internet below) and ensure that we call it only once.
+    #
+    # NOTE @aignas 2024-07-13: in the future, if this is something that we want
+    # to do, we could use Python to parse the requirement lines and infer the
+    # URL of the files to download things from. This should be important for
+    # VCS package references.
+    env_marker_target_platforms = evaluate_markers(ctx, reqs_with_env_markers)
+    if logger:
+        logger.debug(lambda: "Evaluated env markers from:\n{}\n\nTo:\n{}".format(
+            reqs_with_env_markers,
+            env_marker_target_platforms,
+        ))
+
+    index_urls = {}
+    if get_index_urls:
+        index_urls = get_index_urls(
+            ctx,
+            # Use list({}) as a way to have a set
+            list({
+                req.distribution: None
+                for reqs in requirements_by_platform.values()
+                for req in reqs.values()
+            }),
+        )
+
+    ret = {}
+    for whl_name, reqs in requirements_by_platform.items():
+        requirement_target_platforms = {}
+        for r in reqs.values():
+            target_platforms = env_marker_target_platforms.get(r.requirement_line, r.target_platforms)
+            for p in target_platforms:
+                requirement_target_platforms[p] = None
+
+        is_exposed = len(requirement_target_platforms) == len(requirements)
+        if not is_exposed and logger:
+            logger.debug(lambda: "Package '{}' will not be exposed because it is only present on a subset of platforms: {} out of {}".format(
+                whl_name,
+                sorted(requirement_target_platforms),
+                sorted(requirements),
+            ))
+
+        for r in sorted(reqs.values(), key = lambda r: r.requirement_line):
+            whls, sdist = _add_dists(
+                requirement = r,
+                index_urls = index_urls.get(whl_name),
+                logger = logger,
+            )
+
+            target_platforms = env_marker_target_platforms.get(r.requirement_line, r.target_platforms)
+            ret.setdefault(whl_name, []).append(
+                struct(
+                    distribution = r.distribution,
+                    srcs = r.srcs,
+                    requirement_line = r.requirement_line,
+                    target_platforms = sorted(target_platforms),
+                    extra_pip_args = r.extra_pip_args,
+                    whls = whls,
+                    sdist = sdist,
+                    is_exposed = is_exposed,
+                ),
+            )
+
+    if logger:
+        logger.debug(lambda: "Will configure whl repos: {}".format(ret.keys()))
+
+    return ret
+
+def select_requirement(requirements, *, platform):
+    """A simple function to get a requirement for a particular platform.
+
+    Args:
+        requirements (list[struct]): The list of requirements as returned by
+            the `parse_requirements` function above.
+        platform (str or None): The host platform. Usually an output of the
+            `host_platform` function. If None, then this function will return
+            the first requirement it finds.
+
+    Returns:
+        None if not found or a struct returned as one of the values in the
+        parse_requirements function. The requirement that should be downloaded
+        by the host platform will be returned.
+    """
+    maybe_requirement = [
+        req
+        for req in requirements
+        if not platform or [p for p in req.target_platforms if p.endswith(platform)]
+    ]
+    if not maybe_requirement:
+        # Sometimes the package is not present for host platform if there
+        # are whls specified only in particular requirements files, in that
+        # case just continue, however, if the download_only flag is set up,
+        # then the user can also specify the target platform of the wheel
+        # packages they want to download, in that case there will be always
+        # a requirement here, so we will not be in this code branch.
+        return None
+
+    return maybe_requirement[0]
+
+def host_platform(ctx):
+    """Return a string representation of the repository OS.
+
+    Args:
+        ctx (struct): The `module_ctx` or `repository_ctx` attribute.
+
+    Returns:
+        The string representation of the platform that we can later used in the `pip`
+        machinery.
+    """
+    return "{}_{}".format(
+        repo_utils.get_platforms_os_name(ctx),
+        repo_utils.get_platforms_cpu_name(ctx),
+    )
+
+def _add_dists(*, requirement, index_urls, logger = None):
+    """Populate dists based on the information from the PyPI index.
+
+    This function will modify the given requirements_by_platform data structure.
+
+    Args:
+        requirement: The result of parse_requirements function.
+        index_urls: The result of simpleapi_download.
+        logger: A logger for printing diagnostic info.
+    """
+    if not index_urls:
+        return [], None
+
+    whls = []
+    sdist = None
+
+    # TODO @aignas 2024-05-22: it is in theory possible to add all
+    # requirements by version instead of by sha256. This may be useful
+    # for some projects.
+    for sha256 in requirement.srcs.shas:
+        # For now if the artifact is marked as yanked we just ignore it.
+        #
+        # See https://packaging.python.org/en/latest/specifications/simple-repository-api/#adding-yank-support-to-the-simple-api
+
+        maybe_whl = index_urls.whls.get(sha256)
+        if maybe_whl and not maybe_whl.yanked:
+            whls.append(maybe_whl)
+            continue
+
+        maybe_sdist = index_urls.sdists.get(sha256)
+        if maybe_sdist and not maybe_sdist.yanked:
+            sdist = maybe_sdist
+            continue
+
+        if logger:
+            logger.warn(lambda: "Could not find a whl or an sdist with sha256={}".format(sha256))
+
+    yanked = {}
+    for dist in whls + [sdist]:
+        if dist and dist.yanked:
+            yanked.setdefault(dist.yanked, []).append(dist.filename)
+    if yanked:
+        logger.warn(lambda: "\n".join([
+            "the following distributions got yanked:",
+        ] + [
+            "reason: {}\n  {}".format(reason, "\n".join(sorted(dists)))
+            for reason, dists in yanked.items()
+        ]))
+
+    # Filter out the wheels that are incompatible with the target_platforms.
+    whls = select_whls(whls = whls, want_platforms = requirement.target_platforms, logger = logger)
+
+    return whls, sdist
diff --git a/python/private/pypi/parse_requirements_txt.bzl b/python/private/pypi/parse_requirements_txt.bzl
new file mode 100644
index 0000000..6f51d03
--- /dev/null
+++ b/python/private/pypi/parse_requirements_txt.bzl
@@ -0,0 +1,133 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Pip requirements parser for Starlark."""
+
+_STATE = struct(
+    # Consume extraneous whitespace
+    ConsumeSpace = 0,
+    # Consume a comment
+    ConsumeComment = 1,
+    # Parse the name of a pip package
+    ParseDependency = 2,
+    # Parse a full requirement line
+    ParseRequirement = 3,
+    # Parse a pip option
+    ParseOption = 4,
+)
+
+EOF = {}
+
+def parse_requirements_txt(content):
+    """A simplistic (and incomplete) pip requirements lockfile parser.
+
+    Parses package names and their full requirement lines, as well pip
+    options.
+
+    Args:
+      content: lockfile content as a string
+
+    Returns:
+      Struct with fields `requirements` and `options`.
+
+      requirements: List of requirements, where each requirement is a 2-element
+        tuple containing the package name and the requirement line.
+        E.g., [(certifi', 'certifi==2021.10.8 --hash=sha256:7888...'), ...]
+
+      options: List of pip option lines
+    """
+    content = content.replace("\r", "")
+
+    result = struct(
+        requirements = [],
+        options = [],
+    )
+    state = _STATE.ConsumeSpace
+    buffer = ""
+
+    inputs = content.elems()[:]
+    inputs.append(EOF)
+
+    for input in inputs:
+        if state == _STATE.ConsumeSpace:
+            (state, buffer) = _handleConsumeSpace(input)
+        elif state == _STATE.ConsumeComment:
+            (state, buffer) = _handleConsumeComment(input, buffer, result)
+        elif state == _STATE.ParseDependency:
+            (state, buffer) = _handleParseDependency(input, buffer, result)
+        elif state == _STATE.ParseOption:
+            (state, buffer) = _handleParseOption(input, buffer, result)
+        elif state == _STATE.ParseRequirement:
+            (state, buffer) = _handleParseRequirement(input, buffer, result)
+        else:
+            fail("Unknown state %d" % state)
+
+    return result
+
+def _handleConsumeSpace(input):
+    if input == EOF:
+        return (_STATE.ConsumeSpace, "")
+    if input.isspace():
+        return (_STATE.ConsumeSpace, "")
+    elif input == "#":
+        return (_STATE.ConsumeComment, "")
+    elif input == "-":
+        return (_STATE.ParseOption, input)
+
+    return (_STATE.ParseDependency, input)
+
+def _handleConsumeComment(input, buffer, result):
+    if input == "\n":
+        if len(result.requirements) > 0 and len(result.requirements[-1]) == 1:
+            result.requirements[-1] = (result.requirements[-1][0], buffer.rstrip(" \n"))
+            return (_STATE.ConsumeSpace, "")
+        elif len(buffer) > 0:
+            result.options.append(buffer.rstrip(" \n"))
+            return (_STATE.ConsumeSpace, "")
+        return (_STATE.ConsumeSpace, "")
+    return (_STATE.ConsumeComment, buffer)
+
+def _handleParseDependency(input, buffer, result):
+    if input == EOF:
+        fail("Enountered unexpected end of file while parsing requirement")
+    elif input.isspace() or input in [">", "<", "~", "=", ";", "["]:
+        result.requirements.append((buffer,))
+        return (_STATE.ParseRequirement, buffer + input)
+
+    return (_STATE.ParseDependency, buffer + input)
+
+def _handleParseOption(input, buffer, result):
+    if input == "\n" and buffer.endswith("\\"):
+        return (_STATE.ParseOption, buffer[0:-1])
+    elif input == " ":
+        result.options.append(buffer.rstrip("\n"))
+        return (_STATE.ParseOption, "")
+    elif input == "\n" or input == EOF:
+        result.options.append(buffer.rstrip("\n"))
+        return (_STATE.ConsumeSpace, "")
+    elif input == "#" and (len(buffer) == 0 or buffer[-1].isspace()):
+        return (_STATE.ConsumeComment, buffer)
+
+    return (_STATE.ParseOption, buffer + input)
+
+def _handleParseRequirement(input, buffer, result):
+    if input == "\n" and buffer.endswith("\\"):
+        return (_STATE.ParseRequirement, buffer[0:-1])
+    elif input == "\n" or input == EOF:
+        result.requirements[-1] = (result.requirements[-1][0], buffer.rstrip(" \n"))
+        return (_STATE.ConsumeSpace, "")
+    elif input == "#" and (len(buffer) == 0 or buffer[-1].isspace()):
+        return (_STATE.ConsumeComment, buffer)
+
+    return (_STATE.ParseRequirement, buffer + input)
diff --git a/python/private/pypi/parse_simpleapi_html.bzl b/python/private/pypi/parse_simpleapi_html.bzl
new file mode 100644
index 0000000..81ee385
--- /dev/null
+++ b/python/private/pypi/parse_simpleapi_html.bzl
@@ -0,0 +1,126 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Parse SimpleAPI HTML in Starlark.
+"""
+
+def parse_simpleapi_html(*, url, content):
+    """Get the package URLs for given shas by parsing the Simple API HTML.
+
+    Args:
+        url(str): The URL that the HTML content can be downloaded from.
+        content(str): The Simple API HTML content.
+
+    Returns:
+        A list of structs with:
+        * filename: The filename of the artifact.
+        * url: The URL to download the artifact.
+        * sha256: The sha256 of the artifact.
+        * metadata_sha256: The whl METADATA sha256 if we can download it. If this is
+          present, then the 'metadata_url' is also present. Defaults to "".
+        * metadata_url: The URL for the METADATA if we can download it. Defaults to "".
+    """
+    sdists = {}
+    whls = {}
+    lines = content.split("<a href=\"")
+
+    _, _, api_version = lines[0].partition("name=\"pypi:repository-version\" content=\"")
+    api_version, _, _ = api_version.partition("\"")
+
+    # We must assume the 1.0 if it is not present
+    # See https://packaging.python.org/en/latest/specifications/simple-repository-api/#clients
+    api_version = api_version or "1.0"
+    api_version = tuple([int(i) for i in api_version.split(".")])
+
+    if api_version >= (2, 0):
+        # We don't expect to have version 2.0 here, but have this check in place just in case.
+        # https://packaging.python.org/en/latest/specifications/simple-repository-api/#versioning-pypi-s-simple-api
+        fail("Unsupported API version: {}".format(api_version))
+
+    # Each line follows the following pattern
+    # <a href="https://...#sha256=..." attribute1="foo" ... attributeN="bar">filename</a><br />
+    for line in lines[1:]:
+        dist_url, _, tail = line.partition("#sha256=")
+        sha256, _, tail = tail.partition("\"")
+
+        # See https://packaging.python.org/en/latest/specifications/simple-repository-api/#adding-yank-support-to-the-simple-api
+        yanked = "data-yanked" in line
+
+        head, _, _ = tail.rpartition("</a>")
+        maybe_metadata, _, filename = head.rpartition(">")
+
+        metadata_sha256 = ""
+        metadata_url = ""
+        for metadata_marker in ["data-core-metadata", "data-dist-info-metadata"]:
+            metadata_marker = metadata_marker + "=\"sha256="
+            if metadata_marker in maybe_metadata:
+                # Implement https://peps.python.org/pep-0714/
+                _, _, tail = maybe_metadata.partition(metadata_marker)
+                metadata_sha256, _, _ = tail.partition("\"")
+                metadata_url = dist_url + ".metadata"
+                break
+
+        if filename.endswith(".whl"):
+            whls[sha256] = struct(
+                filename = filename,
+                url = _absolute_url(url, dist_url),
+                sha256 = sha256,
+                metadata_sha256 = metadata_sha256,
+                metadata_url = _absolute_url(url, metadata_url),
+                yanked = yanked,
+            )
+        else:
+            sdists[sha256] = struct(
+                filename = filename,
+                url = _absolute_url(url, dist_url),
+                sha256 = sha256,
+                metadata_sha256 = "",
+                metadata_url = "",
+                yanked = yanked,
+            )
+
+    return struct(
+        sdists = sdists,
+        whls = whls,
+    )
+
+def _get_root_directory(url):
+    scheme_end = url.find("://")
+    if scheme_end == -1:
+        fail("Invalid URL format")
+
+    scheme = url[:scheme_end]
+    host_end = url.find("/", scheme_end + 3)
+    if host_end == -1:
+        host_end = len(url)
+    host = url[scheme_end + 3:host_end]
+
+    return "{}://{}".format(scheme, host)
+
+def _absolute_url(index_url, candidate):
+    if candidate.startswith("/"):
+        # absolute url
+        root_directory = _get_root_directory(index_url)
+        return "{}{}".format(root_directory, candidate)
+
+    if not candidate.startswith(".."):
+        return candidate
+
+    candidate_parts = candidate.split("..")
+    last = candidate_parts[-1]
+    for _ in range(len(candidate_parts) - 1):
+        index_url, _, _ = index_url.rstrip("/").rpartition("/")
+
+    return "{}/{}".format(index_url, last.strip("/"))
diff --git a/python/private/parse_whl_name.bzl b/python/private/pypi/parse_whl_name.bzl
similarity index 71%
rename from python/private/parse_whl_name.bzl
rename to python/private/pypi/parse_whl_name.bzl
index 9c7866e..063ac84 100644
--- a/python/private/parse_whl_name.bzl
+++ b/python/private/pypi/parse_whl_name.bzl
@@ -16,6 +16,30 @@
 A starlark implementation of a Wheel filename parsing.
 """
 
+# Taken from https://peps.python.org/pep-0600/
+_LEGACY_ALIASES = {
+    "manylinux1_i686": "manylinux_2_5_i686",
+    "manylinux1_x86_64": "manylinux_2_5_x86_64",
+    "manylinux2010_i686": "manylinux_2_12_i686",
+    "manylinux2010_x86_64": "manylinux_2_12_x86_64",
+    "manylinux2014_aarch64": "manylinux_2_17_aarch64",
+    "manylinux2014_armv7l": "manylinux_2_17_armv7l",
+    "manylinux2014_i686": "manylinux_2_17_i686",
+    "manylinux2014_ppc64": "manylinux_2_17_ppc64",
+    "manylinux2014_ppc64le": "manylinux_2_17_ppc64le",
+    "manylinux2014_s390x": "manylinux_2_17_s390x",
+    "manylinux2014_x86_64": "manylinux_2_17_x86_64",
+}
+
+def normalize_platform_tag(tag):
+    """Resolve legacy aliases to modern equivalents for easier parsing elsewhere."""
+    return ".".join(list({
+        # The `list({})` usage here is to use it as a string set, where we will
+        # deduplicate, but otherwise retain the order of the tags.
+        _LEGACY_ALIASES.get(p, p): None
+        for p in tag.split(".")
+    }))
+
 def parse_whl_name(file):
     """Parse whl file name into a struct of constituents.
 
@@ -68,5 +92,5 @@
         build_tag = build_tag,
         python_tag = python_tag,
         abi_tag = abi_tag,
-        platform_tag = platform_tag,
+        platform_tag = normalize_platform_tag(platform_tag),
     )
diff --git a/python/private/patch_whl.bzl b/python/private/pypi/patch_whl.bzl
similarity index 87%
rename from python/private/patch_whl.bzl
rename to python/private/pypi/patch_whl.bzl
index 9e3119f..c2c633d 100644
--- a/python/private/patch_whl.bzl
+++ b/python/private/pypi/patch_whl.bzl
@@ -27,7 +27,8 @@
 within the wheel.
 """
 
-load("//python/private:parse_whl_name.bzl", "parse_whl_name")
+load("//python/private:repo_utils.bzl", "repo_utils")
+load(":parse_whl_name.bzl", "parse_whl_name")
 
 _rules_python_root = Label("//:BUILD.bazel")
 
@@ -40,7 +41,7 @@
         whl_path: The whl file name to be patched.
         patches: a label-keyed-int dict that has the patch files as keys and
             the patch_strip as the value.
-        **kwargs: extras passed to rctx.execute.
+        **kwargs: extras passed to repo_utils.execute_checked.
 
     Returns:
         value of the repackaging action.
@@ -75,11 +76,12 @@
 
     record_patch = rctx.path("RECORD.patch")
 
-    result = rctx.execute(
-        [
+    repo_utils.execute_checked(
+        rctx,
+        arguments = [
             python_interpreter,
             "-m",
-            "python.private.repack_whl",
+            "python.private.pypi.repack_whl",
             "--record-patch",
             record_patch,
             whl_input,
@@ -91,16 +93,6 @@
         **kwargs
     )
 
-    if result.return_code:
-        fail(
-            "repackaging .whl {whl} failed: with exit code '{return_code}':\n{stdout}\n\nstderr:\n{stderr}".format(
-                whl = whl_input.basename,
-                stdout = result.stdout,
-                stderr = result.stderr,
-                return_code = result.return_code,
-            ),
-        )
-
     if record_patch.exists:
         record_patch_contents = rctx.read(record_patch)
         warning_msg = """WARNING: the resultant RECORD file of the patch wheel is different
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/python/private/pypi/pip.bzl
similarity index 78%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to python/private/pypi/pip.bzl
index bbdfb4c..cb8e111 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/python/private/pypi/pip.bzl
@@ -12,3 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+"pip module extensions for use with bzlmod."
+
+load("//python/private/pypi:extension.bzl", "pypi", "pypi_internal")
+
+pip = pypi
+pip_internal = pypi_internal
diff --git a/python/private/pypi/pip_compile.bzl b/python/private/pypi/pip_compile.bzl
new file mode 100644
index 0000000..a6cabf7
--- /dev/null
+++ b/python/private/pypi/pip_compile.bzl
@@ -0,0 +1,179 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Rules to verify and update pip-compile locked requirements.txt.
+
+NOTE @aignas 2024-06-23: We are using the implementation specific name here to
+make it possible to have multiple tools inside the `pypi` directory
+"""
+
+load("//python:defs.bzl", _py_binary = "py_binary", _py_test = "py_test")
+
+def pip_compile(
+        name,
+        srcs = None,
+        src = None,
+        extra_args = [],
+        extra_deps = [],
+        generate_hashes = True,
+        py_binary = _py_binary,
+        py_test = _py_test,
+        requirements_in = None,
+        requirements_txt = None,
+        requirements_darwin = None,
+        requirements_linux = None,
+        requirements_windows = None,
+        visibility = ["//visibility:private"],
+        tags = None,
+        **kwargs):
+    """Generates targets for managing pip dependencies with pip-compile.
+
+    By default this rules generates a filegroup named "[name]" which can be included in the data
+    of some other compile_pip_requirements rule that references these requirements
+    (e.g. with `-r ../other/requirements.txt`).
+
+    It also generates two targets for running pip-compile:
+
+    - validate with `bazel test [name]_test`
+    - update with   `bazel run [name].update`
+
+    If you are using a version control system, the requirements.txt generated by this rule should
+    be checked into it to ensure that all developers/users have the same dependency versions.
+
+    Args:
+        name: base name for generated targets, typically "requirements".
+        srcs: a list of files containing inputs to dependency resolution. If not specified,
+            defaults to `["pyproject.toml"]`. Supported formats are:
+            * a requirements text file, usually named `requirements.in`
+            * A `.toml` file, where the `project.dependencies` list is used as per
+              [PEP621](https://peps.python.org/pep-0621/).
+        src: file containing inputs to dependency resolution. If not specified,
+            defaults to `pyproject.toml`. Supported formats are:
+            * a requirements text file, usually named `requirements.in`
+            * A `.toml` file, where the `project.dependencies` list is used as per
+              [PEP621](https://peps.python.org/pep-0621/).
+        extra_args: passed to pip-compile.
+        extra_deps: extra dependencies passed to pip-compile.
+        generate_hashes: whether to put hashes in the requirements_txt file.
+        py_binary: the py_binary rule to be used.
+        py_test: the py_test rule to be used.
+        requirements_in: file expressing desired dependencies. Deprecated, use src or srcs instead.
+        requirements_txt: result of "compiling" the requirements.in file.
+        requirements_linux: File of linux specific resolve output to check validate if requirement.in has changes.
+        requirements_darwin: File of darwin specific resolve output to check validate if requirement.in has changes.
+        requirements_windows: File of windows specific resolve output to check validate if requirement.in has changes.
+        tags: tagging attribute common to all build rules, passed to both the _test and .update rules.
+        visibility: passed to both the _test and .update rules.
+        **kwargs: other bazel attributes passed to the "_test" rule.
+    """
+    if len([x for x in [srcs, src, requirements_in] if x != None]) > 1:
+        fail("At most one of 'srcs', 'src', and 'requirements_in' attributes may be provided")
+
+    if requirements_in:
+        srcs = [requirements_in]
+    elif src:
+        srcs = [src]
+    else:
+        srcs = srcs or ["pyproject.toml"]
+
+    requirements_txt = name + ".txt" if requirements_txt == None else requirements_txt
+
+    # "Default" target produced by this macro
+    # Allow a compile_pip_requirements rule to include another one in the data
+    # for a requirements file that does `-r ../other/requirements.txt`
+    native.filegroup(
+        name = name,
+        srcs = kwargs.pop("data", []) + [requirements_txt],
+        visibility = visibility,
+    )
+
+    data = [name, requirements_txt] + srcs + [f for f in (requirements_linux, requirements_darwin, requirements_windows) if f != None]
+
+    # Use the Label constructor so this is expanded in the context of the file
+    # where it appears, which is to say, in @rules_python
+    pip_compile = Label("//python/private/pypi/dependency_resolver:dependency_resolver.py")
+
+    loc = "$(rlocationpath {})"
+
+    args = ["--src=%s" % loc.format(src) for src in srcs] + [
+        loc.format(requirements_txt),
+        "//%s:%s.update" % (native.package_name(), name),
+        "--resolver=backtracking",
+        "--allow-unsafe",
+    ]
+    if generate_hashes:
+        args.append("--generate-hashes")
+    if requirements_linux:
+        args.append("--requirements-linux={}".format(loc.format(requirements_linux)))
+    if requirements_darwin:
+        args.append("--requirements-darwin={}".format(loc.format(requirements_darwin)))
+    if requirements_windows:
+        args.append("--requirements-windows={}".format(loc.format(requirements_windows)))
+    args.extend(extra_args)
+
+    deps = [
+        Label("@pypi__build//:lib"),
+        Label("@pypi__click//:lib"),
+        Label("@pypi__colorama//:lib"),
+        Label("@pypi__importlib_metadata//:lib"),
+        Label("@pypi__more_itertools//:lib"),
+        Label("@pypi__packaging//:lib"),
+        Label("@pypi__pep517//:lib"),
+        Label("@pypi__pip//:lib"),
+        Label("@pypi__pip_tools//:lib"),
+        Label("@pypi__pyproject_hooks//:lib"),
+        Label("@pypi__setuptools//:lib"),
+        Label("@pypi__tomli//:lib"),
+        Label("@pypi__zipp//:lib"),
+        Label("//python/runfiles:runfiles"),
+    ] + extra_deps
+
+    tags = tags or []
+    tags.append("requires-network")
+    tags.append("no-remote-exec")
+    tags.append("no-sandbox")
+    attrs = {
+        "args": args,
+        "data": data,
+        "deps": deps,
+        "main": pip_compile,
+        "srcs": [pip_compile],
+        "tags": tags,
+        "visibility": visibility,
+    }
+
+    # setuptools (the default python build tool) attempts to find user
+    # configuration in the user's home direcotory. This seems to work fine on
+    # linux and macOS, but fails on Windows, so we conditionally provide a fake
+    # USERPROFILE env variable to allow setuptools to proceed without finding
+    # user-provided configuration.
+    kwargs["env"] = select({
+        "@@platforms//os:windows": {"USERPROFILE": "Z:\\FakeSetuptoolsHomeDirectoryHack"},
+        "//conditions:default": {},
+    }) | kwargs.get("env", {})
+
+    py_binary(
+        name = name + ".update",
+        **attrs
+    )
+
+    timeout = kwargs.pop("timeout", "short")
+
+    py_test(
+        name = name + "_test",
+        timeout = timeout,
+        # kwargs could contain test-specific attributes like size or timeout
+        **dict(attrs, **kwargs)
+    )
diff --git a/python/private/pypi/pip_repository.bzl b/python/private/pypi/pip_repository.bzl
new file mode 100644
index 0000000..0c9e300
--- /dev/null
+++ b/python/private/pypi/pip_repository.bzl
@@ -0,0 +1,351 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@bazel_skylib//lib:sets.bzl", "sets")
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR")
+load("//python/private:text_util.bzl", "render")
+load(":evaluate_markers.bzl", "evaluate_markers", EVALUATE_MARKERS_SRCS = "SRCS")
+load(":parse_requirements.bzl", "host_platform", "parse_requirements", "select_requirement")
+load(":pip_repository_attrs.bzl", "ATTRS")
+load(":render_pkg_aliases.bzl", "render_pkg_aliases", "whl_alias")
+load(":requirements_files_by_platform.bzl", "requirements_files_by_platform")
+
+def _get_python_interpreter_attr(rctx):
+    """A helper function for getting the `python_interpreter` attribute or it's default
+
+    Args:
+        rctx (repository_ctx): Handle to the rule repository context.
+
+    Returns:
+        str: The attribute value or it's default
+    """
+    if rctx.attr.python_interpreter:
+        return rctx.attr.python_interpreter
+
+    if "win" in rctx.os.name:
+        return "python.exe"
+    else:
+        return "python3"
+
+def use_isolated(ctx, attr):
+    """Determine whether or not to pass the pip `--isolated` flag to the pip invocation.
+
+    Args:
+        ctx: repository or module context
+        attr: attributes for the repo rule or tag extension
+
+    Returns:
+        True if --isolated should be passed
+    """
+    use_isolated = attr.isolated
+
+    # The environment variable will take precedence over the attribute
+    isolated_env = ctx.os.environ.get("RULES_PYTHON_PIP_ISOLATED", None)
+    if isolated_env != None:
+        if isolated_env.lower() in ("0", "false"):
+            use_isolated = False
+        else:
+            use_isolated = True
+
+    return use_isolated
+
+_BUILD_FILE_CONTENTS = """\
+package(default_visibility = ["//visibility:public"])
+
+# Ensure the `requirements.bzl` source can be accessed by stardoc, since users load() from it
+exports_files(["requirements.bzl"])
+"""
+
+def _pip_repository_impl(rctx):
+    requirements_by_platform = parse_requirements(
+        rctx,
+        requirements_by_platform = requirements_files_by_platform(
+            requirements_by_platform = rctx.attr.requirements_by_platform,
+            requirements_linux = rctx.attr.requirements_linux,
+            requirements_lock = rctx.attr.requirements_lock,
+            requirements_osx = rctx.attr.requirements_darwin,
+            requirements_windows = rctx.attr.requirements_windows,
+            extra_pip_args = rctx.attr.extra_pip_args,
+        ),
+        extra_pip_args = rctx.attr.extra_pip_args,
+        evaluate_markers = lambda rctx, requirements: evaluate_markers(
+            rctx,
+            requirements = requirements,
+            python_interpreter = rctx.attr.python_interpreter,
+            python_interpreter_target = rctx.attr.python_interpreter_target,
+            srcs = rctx.attr._evaluate_markers_srcs,
+        ),
+    )
+    selected_requirements = {}
+    options = None
+    repository_platform = host_platform(rctx)
+    for name, requirements in requirements_by_platform.items():
+        r = select_requirement(
+            requirements,
+            platform = None if rctx.attr.download_only else repository_platform,
+        )
+        if not r:
+            continue
+        options = options or r.extra_pip_args
+        selected_requirements[name] = r.requirement_line
+
+    bzl_packages = sorted(selected_requirements.keys())
+
+    # Normalize cycles first
+    requirement_cycles = {
+        name: sorted(sets.to_list(sets.make(deps)))
+        for name, deps in rctx.attr.experimental_requirement_cycles.items()
+    }
+
+    # Check for conflicts between cycles _before_ we normalize package names so
+    # that reported errors use the names the user specified
+    for i in range(len(requirement_cycles)):
+        left_group = requirement_cycles.keys()[i]
+        left_deps = requirement_cycles.values()[i]
+        for j in range(len(requirement_cycles) - (i + 1)):
+            right_deps = requirement_cycles.values()[1 + i + j]
+            right_group = requirement_cycles.keys()[1 + i + j]
+            for d in left_deps:
+                if d in right_deps:
+                    fail("Error: Requirement %s cannot be repeated between cycles %s and %s; please merge the cycles." % (d, left_group, right_group))
+
+    # And normalize the names as used in the cycle specs
+    #
+    # NOTE: We must check that a listed dependency is actually in the actual
+    # requirements set for the current platform so that we can support cycles in
+    # platform-conditional requirements. Otherwise we'll blindly generate a
+    # label referencing a package which may not be installed on the current
+    # platform.
+    requirement_cycles = {
+        normalize_name(name): sorted([normalize_name(d) for d in group if normalize_name(d) in bzl_packages])
+        for name, group in requirement_cycles.items()
+    }
+
+    imports = [
+        # NOTE: Maintain the order consistent with `buildifier`
+        'load("@rules_python//python:pip.bzl", "pip_utils")',
+        'load("@rules_python//python/pip_install:pip_repository.bzl", "group_library", "whl_library")',
+    ]
+
+    annotations = {}
+    for pkg, annotation in rctx.attr.annotations.items():
+        filename = "{}.annotation.json".format(normalize_name(pkg))
+        rctx.file(filename, json.encode_indent(json.decode(annotation)))
+        annotations[pkg] = "@{name}//:{filename}".format(name = rctx.attr.name, filename = filename)
+
+    config = {
+        "download_only": rctx.attr.download_only,
+        "enable_implicit_namespace_pkgs": rctx.attr.enable_implicit_namespace_pkgs,
+        "environment": rctx.attr.environment,
+        "envsubst": rctx.attr.envsubst,
+        "extra_pip_args": options,
+        "isolated": use_isolated(rctx, rctx.attr),
+        "pip_data_exclude": rctx.attr.pip_data_exclude,
+        "python_interpreter": _get_python_interpreter_attr(rctx),
+        "quiet": rctx.attr.quiet,
+        "repo": rctx.attr.name,
+        "timeout": rctx.attr.timeout,
+    }
+    if rctx.attr.use_hub_alias_dependencies:
+        config["dep_template"] = "@{}//{{name}}:{{target}}".format(rctx.attr.name)
+    else:
+        config["repo_prefix"] = "{}_".format(rctx.attr.name)
+
+    if rctx.attr.python_interpreter_target:
+        config["python_interpreter_target"] = str(rctx.attr.python_interpreter_target)
+    if rctx.attr.experimental_target_platforms:
+        config["experimental_target_platforms"] = rctx.attr.experimental_target_platforms
+
+    macro_tmpl = "@%s//{}:{}" % rctx.attr.name
+
+    aliases = render_pkg_aliases(
+        aliases = {
+            pkg: [whl_alias(repo = rctx.attr.name + "_" + pkg)]
+            for pkg in bzl_packages or []
+        },
+    )
+    for path, contents in aliases.items():
+        rctx.file(path, contents)
+
+    rctx.file("BUILD.bazel", _BUILD_FILE_CONTENTS)
+    rctx.template("requirements.bzl", rctx.attr._template, substitutions = {
+        "    # %%GROUP_LIBRARY%%": """\
+    group_repo = "{name}__groups"
+    group_library(
+        name = group_repo,
+        repo_prefix = "{name}_",
+        groups = all_requirement_groups,
+    )""".format(name = rctx.attr.name) if not rctx.attr.use_hub_alias_dependencies else "",
+        "%%ALL_DATA_REQUIREMENTS%%": render.list([
+            macro_tmpl.format(p, "data")
+            for p in bzl_packages
+        ]),
+        "%%ALL_REQUIREMENTS%%": render.list([
+            macro_tmpl.format(p, "pkg")
+            for p in bzl_packages
+        ]),
+        "%%ALL_REQUIREMENT_GROUPS%%": render.dict(requirement_cycles),
+        "%%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%": render.dict({
+            p: macro_tmpl.format(p, "whl")
+            for p in bzl_packages
+        }),
+        "%%ANNOTATIONS%%": render.dict(dict(sorted(annotations.items()))),
+        "%%CONFIG%%": render.dict(dict(sorted(config.items()))),
+        "%%EXTRA_PIP_ARGS%%": json.encode(options),
+        "%%IMPORTS%%": "\n".join(imports),
+        "%%MACRO_TMPL%%": macro_tmpl,
+        "%%NAME%%": rctx.attr.name,
+        "%%PACKAGES%%": render.list(
+            [
+                ("{}_{}".format(rctx.attr.name, p), r)
+                for p, r in sorted(selected_requirements.items())
+            ],
+        ),
+    })
+
+    return
+
+pip_repository = repository_rule(
+    attrs = dict(
+        annotations = attr.string_dict(
+            doc = """\
+Optional annotations to apply to packages. Keys should be package names, with
+capitalization matching the input requirements file, and values should be
+generated using the `package_name` macro. For example usage, see [this WORKSPACE
+file](https://github.com/bazelbuild/rules_python/blob/main/examples/pip_repository_annotations/WORKSPACE).
+""",
+        ),
+        _template = attr.label(
+            default = ":requirements.bzl.tmpl.workspace",
+        ),
+        _evaluate_markers_srcs = attr.label_list(
+            default = EVALUATE_MARKERS_SRCS,
+            doc = """\
+The list of labels to use as SRCS for the marker evaluation code. This ensures that the
+code will be re-evaluated when any of files in the default changes.
+""",
+        ),
+        **ATTRS
+    ),
+    doc = """Accepts a locked/compiled requirements file and installs the dependencies listed within.
+
+Those dependencies become available in a generated `requirements.bzl` file.
+You can instead check this `requirements.bzl` file into your repo, see the "vendoring" section below.
+
+In your WORKSPACE file:
+
+```starlark
+load("@rules_python//python:pip.bzl", "pip_parse")
+
+pip_parse(
+    name = "pypi",
+    requirements_lock = ":requirements.txt",
+)
+
+load("@pypi//:requirements.bzl", "install_deps")
+
+install_deps()
+```
+
+You can then reference installed dependencies from a `BUILD` file with the alias targets generated in the same repo, for example, for `PyYAML` we would have the following:
+- `@pypi//pyyaml` and `@pypi//pyyaml:pkg` both point to the `py_library`
+  created after extracting the `PyYAML` package.
+- `@pypi//pyyaml:data` points to the extra data included in the package.
+- `@pypi//pyyaml:dist_info` points to the `dist-info` files in the package.
+- `@pypi//pyyaml:whl` points to the wheel file that was extracted.
+
+```starlark
+py_library(
+    name = "bar",
+    ...
+    deps = [
+       "//my/other:dep",
+       "@pypi//numpy",
+       "@pypi//requests",
+    ],
+)
+```
+
+or
+
+```starlark
+load("@pypi//:requirements.bzl", "requirement")
+
+py_library(
+    name = "bar",
+    ...
+    deps = [
+       "//my/other:dep",
+       requirement("numpy"),
+       requirement("requests"),
+    ],
+)
+```
+
+In addition to the `requirement` macro, which is used to access the generated `py_library`
+target generated from a package's wheel, The generated `requirements.bzl` file contains
+functionality for exposing [entry points][whl_ep] as `py_binary` targets as well.
+
+[whl_ep]: https://packaging.python.org/specifications/entry-points/
+
+```starlark
+load("@pypi//:requirements.bzl", "entry_point")
+
+alias(
+    name = "pip-compile",
+    actual = entry_point(
+        pkg = "pip-tools",
+        script = "pip-compile",
+    ),
+)
+```
+
+Note that for packages whose name and script are the same, only the name of the package
+is needed when calling the `entry_point` macro.
+
+```starlark
+load("@pip//:requirements.bzl", "entry_point")
+
+alias(
+    name = "flake8",
+    actual = entry_point("flake8"),
+)
+```
+
+### Vendoring the requirements.bzl file
+
+In some cases you may not want to generate the requirements.bzl file as a repository rule
+while Bazel is fetching dependencies. For example, if you produce a reusable Bazel module
+such as a ruleset, you may want to include the requirements.bzl file rather than make your users
+install the WORKSPACE setup to generate it.
+See https://github.com/bazelbuild/rules_python/issues/608
+
+This is the same workflow as Gazelle, which creates `go_repository` rules with
+[`update-repos`](https://github.com/bazelbuild/bazel-gazelle#update-repos)
+
+To do this, use the "write to source file" pattern documented in
+https://blog.aspect.dev/bazel-can-write-to-the-source-folder
+to put a copy of the generated requirements.bzl into your project.
+Then load the requirements.bzl file directly rather than from the generated repository.
+See the example in rules_python/examples/pip_parse_vendored.
+""",
+    implementation = _pip_repository_impl,
+    environ = [
+        "RULES_PYTHON_PIP_ISOLATED",
+        REPO_DEBUG_ENV_VAR,
+    ],
+)
diff --git a/python/private/pypi/pip_repository_attrs.bzl b/python/private/pypi/pip_repository_attrs.bzl
new file mode 100644
index 0000000..2300086
--- /dev/null
+++ b/python/private/pypi/pip_repository_attrs.bzl
@@ -0,0 +1,73 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Common attributes between bzlmod pip.parse and workspace pip_parse.
+
+A common attributes shared between bzlmod and workspace implementations
+stored in a separate file to avoid unnecessary refetching of the
+repositories."""
+
+load(":attrs.bzl", COMMON_ATTRS = "ATTRS")
+
+ATTRS = {
+    "requirements_by_platform": attr.label_keyed_string_dict(
+        doc = """\
+The requirements files and the comma delimited list of target platforms as values.
+
+The keys are the requirement files and the values are comma-separated platform
+identifiers. For now we only support `<os>_<cpu>` values that are present in
+`@platforms//os` and `@platforms//cpu` packages respectively.
+""",
+    ),
+    "requirements_darwin": attr.label(
+        allow_single_file = True,
+        doc = "Override the requirements_lock attribute when the host platform is Mac OS",
+    ),
+    "requirements_linux": attr.label(
+        allow_single_file = True,
+        doc = "Override the requirements_lock attribute when the host platform is Linux",
+    ),
+    "requirements_lock": attr.label(
+        allow_single_file = True,
+        doc = """\
+A fully resolved 'requirements.txt' pip requirement file containing the
+transitive set of your dependencies. If this file is passed instead of
+'requirements' no resolve will take place and pip_repository will create
+individual repositories for each of your dependencies so that wheels are
+fetched/built only for the targets specified by 'build/run/test'. Note that if
+your lockfile is platform-dependent, you can use the `requirements_[platform]`
+attributes.
+
+Note, that in general requirements files are compiled for a specific platform,
+but sometimes they can work for multiple platforms. `rules_python` right now
+supports requirements files that are created for a particular platform without
+platform markers.
+""",
+    ),
+    "requirements_windows": attr.label(
+        allow_single_file = True,
+        doc = "Override the requirements_lock attribute when the host platform is Windows",
+    ),
+    "use_hub_alias_dependencies": attr.bool(
+        default = False,
+        doc = """\
+Controls if the hub alias dependencies are used. If set to true, then the
+group_library will be included in the hub repo.
+
+True will become default in a subsequent release.
+""",
+    ),
+}
+
+ATTRS.update(**COMMON_ATTRS)
diff --git a/python/private/pypi/pypi_repo_utils.bzl b/python/private/pypi/pypi_repo_utils.bzl
new file mode 100644
index 0000000..da449b4
--- /dev/null
+++ b/python/private/pypi/pypi_repo_utils.bzl
@@ -0,0 +1,130 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@bazel_skylib//lib:types.bzl", "types")
+load("//python/private:repo_utils.bzl", "repo_utils")
+
+def _get_python_interpreter_attr(mrctx, *, python_interpreter = None):
+    """A helper function for getting the `python_interpreter` attribute or it's default
+
+    Args:
+        mrctx (module_ctx or repository_ctx): Handle to the rule repository context.
+        python_interpreter (str): The python interpreter override.
+
+    Returns:
+        str: The attribute value or it's default
+    """
+    if python_interpreter:
+        return python_interpreter
+
+    os = repo_utils.get_platforms_os_name(mrctx)
+    if "windows" in os:
+        return "python.exe"
+    else:
+        return "python3"
+
+def _resolve_python_interpreter(mrctx, *, python_interpreter = None, python_interpreter_target = None):
+    """Helper function to find the python interpreter from the common attributes
+
+    Args:
+        mrctx: Handle to the module_ctx or repository_ctx.
+        python_interpreter: str, the python interpreter to use.
+        python_interpreter_target: Label, the python interpreter to use after
+            downloading the label.
+
+    Returns:
+        `path` object, for the resolved path to the Python interpreter.
+    """
+    python_interpreter = _get_python_interpreter_attr(mrctx, python_interpreter = python_interpreter)
+
+    if python_interpreter_target != None:
+        python_interpreter = mrctx.path(python_interpreter_target)
+
+        os = repo_utils.get_platforms_os_name(mrctx)
+
+        # On Windows, the symlink doesn't work because Windows attempts to find
+        # Python DLLs where the symlink is, not where the symlink points.
+        if "windows" in os:
+            python_interpreter = python_interpreter.realpath
+    elif "/" not in python_interpreter:
+        # It's a plain command, e.g. "python3", to look up in the environment.
+        python_interpreter = repo_utils.which_checked(mrctx, python_interpreter)
+    else:
+        python_interpreter = mrctx.path(python_interpreter)
+    return python_interpreter
+
+def _construct_pypath(mrctx, *, entries):
+    """Helper function to construct a PYTHONPATH.
+
+    Contains entries for code in this repo as well as packages downloaded from //python/pip_install:repositories.bzl.
+    This allows us to run python code inside repository rule implementations.
+
+    Args:
+        mrctx: Handle to the module_ctx or repository_ctx.
+        entries: The list of entries to add to PYTHONPATH.
+
+    Returns: String of the PYTHONPATH.
+    """
+
+    if not entries:
+        return None
+
+    os = repo_utils.get_platforms_os_name(mrctx)
+    separator = ";" if "windows" in os else ":"
+    pypath = separator.join([
+        str(mrctx.path(entry).dirname)
+        # Use a dict as a way to remove duplicates and then sort it.
+        for entry in sorted({x: None for x in entries})
+    ])
+    return pypath
+
+def _execute_checked(mrctx, *, srcs, **kwargs):
+    """Helper function to run a python script and modify the PYTHONPATH to include external deps.
+
+    Args:
+        mrctx: Handle to the module_ctx or repository_ctx.
+        srcs: The src files that the script depends on. This is important to
+            ensure that the Bazel repository cache or the bzlmod lock file gets
+            invalidated when any one file changes. It is advisable to use
+            `RECORD` files for external deps and the list of srcs from the
+            rules_python repo for any scripts.
+        **kwargs: Arguments forwarded to `repo_utils.execute_checked`. If
+            the `environment` has a value `PYTHONPATH` and it is a list, then
+            it will be passed to `construct_pythonpath` function.
+    """
+
+    for src in srcs:
+        # This will ensure that we will re-evaluate the bzlmod extension or
+        # refetch the repository_rule when the srcs change. This should work on
+        # Bazel versions without `mrctx.watch` as well.
+        repo_utils.watch(mrctx.path(src))
+
+    env = kwargs.pop("environment", {})
+    pythonpath = env.get("PYTHONPATH", "")
+    if pythonpath and not types.is_string(pythonpath):
+        env["PYTHONPATH"] = _construct_pypath(mrctx, entries = pythonpath)
+
+    return repo_utils.execute_checked(
+        mrctx,
+        environment = env,
+        **kwargs
+    )
+
+pypi_repo_utils = struct(
+    construct_pythonpath = _construct_pypath,
+    execute_checked = _execute_checked,
+    resolve_python_interpreter = _resolve_python_interpreter,
+)
diff --git a/python/private/pypi/render_pkg_aliases.bzl b/python/private/pypi/render_pkg_aliases.bzl
new file mode 100644
index 0000000..9e5158f
--- /dev/null
+++ b/python/private/pypi/render_pkg_aliases.bzl
@@ -0,0 +1,640 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""render_pkg_aliases is a function to generate BUILD.bazel contents used to create user-friendly aliases.
+
+This is used in bzlmod and non-bzlmod setups."""
+
+load("//python/private:normalize_name.bzl", "normalize_name")
+load("//python/private:text_util.bzl", "render")
+load(
+    ":generate_group_library_build_bazel.bzl",
+    "generate_group_library_build_bazel",
+)  # buildifier: disable=bzl-visibility
+load(
+    ":labels.bzl",
+    "DATA_LABEL",
+    "DIST_INFO_LABEL",
+    "PY_LIBRARY_IMPL_LABEL",
+    "PY_LIBRARY_PUBLIC_LABEL",
+    "WHEEL_FILE_IMPL_LABEL",
+    "WHEEL_FILE_PUBLIC_LABEL",
+)
+load(":parse_whl_name.bzl", "parse_whl_name")
+load(":whl_target_platforms.bzl", "whl_target_platforms")
+
+NO_MATCH_ERROR_MESSAGE_TEMPLATE = """\
+No matching wheel for current configuration's Python version.
+
+The current build configuration's Python version doesn't match any of the Python
+versions available for this wheel. This wheel supports the following Python versions:
+    {supported_versions}
+
+As matched by the `@{rules_python}//python/config_settings:is_python_<version>`
+configuration settings.
+
+To determine the current configuration's Python version, run:
+    `bazel config <config id>` (shown further below)
+and look for
+    {rules_python}//python/config_settings:python_version
+
+If the value is missing, then the "default" Python version is being used,
+which has a "null" version value and will not match version constraints.
+"""
+
+NO_MATCH_ERROR_MESSAGE_TEMPLATE_V2 = """\
+No matching wheel for current configuration's Python version.
+
+The current build configuration's Python version doesn't match any of the Python
+wheels available for this wheel. This wheel supports the following Python
+configuration settings:
+    {config_settings}
+
+To determine the current configuration's Python version, run:
+    `bazel config <config id>` (shown further below)
+and look for
+    {rules_python}//python/config_settings:python_version
+
+If the value is missing, then the "default" Python version is being used,
+which has a "null" version value and will not match version constraints.
+"""
+
+def _render_whl_library_alias(
+        *,
+        name,
+        default_config_setting,
+        aliases,
+        target_name,
+        **kwargs):
+    """Render an alias for common targets."""
+    if len(aliases) == 1 and not aliases[0].version:
+        alias = aliases[0]
+        return render.alias(
+            name = name,
+            actual = repr("@{repo}//:{name}".format(
+                repo = alias.repo,
+                name = target_name,
+            )),
+            **kwargs
+        )
+
+    # Create the alias repositories which contains different select
+    # statements  These select statements point to the different pip
+    # whls that are based on a specific version of Python.
+    selects = {}
+    no_match_error = "_NO_MATCH_ERROR"
+    for alias in sorted(aliases, key = lambda x: x.version):
+        actual = "@{repo}//:{name}".format(repo = alias.repo, name = target_name)
+        selects.setdefault(actual, []).append(alias.config_setting)
+        if alias.config_setting == default_config_setting:
+            selects[actual].append("//conditions:default")
+            no_match_error = None
+
+    return render.alias(
+        name = name,
+        actual = render.select(
+            {
+                tuple(sorted(
+                    conditions,
+                    # Group `is_python` and other conditions for easier reading
+                    # when looking at the generated files.
+                    key = lambda condition: ("is_python" not in condition, condition),
+                )): target
+                for target, conditions in sorted(selects.items())
+            },
+            no_match_error = no_match_error,
+            # This key_repr is used to render selects.with_or keys
+            key_repr = lambda x: repr(x[0]) if len(x) == 1 else render.tuple(x),
+            name = "selects.with_or",
+        ),
+        **kwargs
+    )
+
+def _render_common_aliases(*, name, aliases, default_config_setting = None, group_name = None):
+    lines = [
+        """load("@bazel_skylib//lib:selects.bzl", "selects")""",
+        """package(default_visibility = ["//visibility:public"])""",
+    ]
+
+    config_settings = None
+    if aliases:
+        config_settings = sorted([v.config_setting for v in aliases if v.config_setting])
+
+    if not config_settings or default_config_setting in config_settings:
+        pass
+    else:
+        error_msg = NO_MATCH_ERROR_MESSAGE_TEMPLATE_V2.format(
+            config_settings = render.indent(
+                "\n".join(config_settings),
+            ).lstrip(),
+            rules_python = "rules_python",
+        )
+
+        lines.append("_NO_MATCH_ERROR = \"\"\"\\\n{error_msg}\"\"\"".format(
+            error_msg = error_msg,
+        ))
+
+        # This is to simplify the code in _render_whl_library_alias and to ensure
+        # that we don't pass a 'default_version' that is not in 'versions'.
+        default_config_setting = None
+
+    lines.append(
+        render.alias(
+            name = name,
+            actual = repr(":pkg"),
+        ),
+    )
+    lines.extend(
+        [
+            _render_whl_library_alias(
+                name = name,
+                default_config_setting = default_config_setting,
+                aliases = aliases,
+                target_name = target_name,
+                visibility = ["//_groups:__subpackages__"] if name.startswith("_") else None,
+            )
+            for target_name, name in {
+                PY_LIBRARY_PUBLIC_LABEL: PY_LIBRARY_IMPL_LABEL if group_name else PY_LIBRARY_PUBLIC_LABEL,
+                WHEEL_FILE_PUBLIC_LABEL: WHEEL_FILE_IMPL_LABEL if group_name else WHEEL_FILE_PUBLIC_LABEL,
+                DATA_LABEL: DATA_LABEL,
+                DIST_INFO_LABEL: DIST_INFO_LABEL,
+            }.items()
+        ],
+    )
+    if group_name:
+        lines.extend(
+            [
+                render.alias(
+                    name = "pkg",
+                    actual = repr("//_groups:{}_pkg".format(group_name)),
+                ),
+                render.alias(
+                    name = "whl",
+                    actual = repr("//_groups:{}_whl".format(group_name)),
+                ),
+            ],
+        )
+
+    return "\n\n".join(lines)
+
+def render_pkg_aliases(*, aliases, default_config_setting = None, requirement_cycles = None):
+    """Create alias declarations for each PyPI package.
+
+    The aliases should be appended to the pip_repository BUILD.bazel file. These aliases
+    allow users to use requirement() without needed a corresponding `use_repo()` for each dep
+    when using bzlmod.
+
+    Args:
+        aliases: dict, the keys are normalized distribution names and values are the
+            whl_alias instances.
+        default_config_setting: the default to be used for the aliases.
+        requirement_cycles: any package groups to also add.
+
+    Returns:
+        A dict of file paths and their contents.
+    """
+    contents = {}
+    if not aliases:
+        return contents
+    elif type(aliases) != type({}):
+        fail("The aliases need to be provided as a dict, got: {}".format(type(aliases)))
+
+    whl_group_mapping = {}
+    if requirement_cycles:
+        requirement_cycles = {
+            name: [normalize_name(whl_name) for whl_name in whls]
+            for name, whls in requirement_cycles.items()
+        }
+
+        whl_group_mapping = {
+            whl_name: group_name
+            for group_name, group_whls in requirement_cycles.items()
+            for whl_name in group_whls
+        }
+
+    files = {
+        "{}/BUILD.bazel".format(normalize_name(name)): _render_common_aliases(
+            name = normalize_name(name),
+            aliases = pkg_aliases,
+            default_config_setting = default_config_setting,
+            group_name = whl_group_mapping.get(normalize_name(name)),
+        ).strip()
+        for name, pkg_aliases in aliases.items()
+    }
+
+    if requirement_cycles:
+        files["_groups/BUILD.bazel"] = generate_group_library_build_bazel("", requirement_cycles)
+    return files
+
+def whl_alias(*, repo, version = None, config_setting = None, filename = None, target_platforms = None):
+    """The bzl_packages value used by by the render_pkg_aliases function.
+
+    This contains the minimum amount of information required to generate correct
+    aliases in a hub repository.
+
+    Args:
+        repo: str, the repo of where to find the things to be aliased.
+        version: optional(str), the version of the python toolchain that this
+            whl alias is for. If not set, then non-version aware aliases will be
+            constructed. This is mainly used for better error messages when there
+            is no match found during a select.
+        config_setting: optional(Label or str), the config setting that we should use. Defaults
+            to "//_config:is_python_{version}".
+        filename: optional(str), the distribution filename to derive the config_setting.
+        target_platforms: optional(list[str]), the list of target_platforms for this
+            distribution.
+
+    Returns:
+        a struct with the validated and parsed values.
+    """
+    if not repo:
+        fail("'repo' must be specified")
+
+    if version:
+        config_setting = config_setting or ("//_config:is_python_" + version)
+        config_setting = str(config_setting)
+
+    if target_platforms:
+        for p in target_platforms:
+            if not p.startswith("cp"):
+                fail("target_platform should start with 'cp' denoting the python version, got: " + p)
+
+    return struct(
+        repo = repo,
+        version = version,
+        config_setting = config_setting,
+        filename = filename,
+        target_platforms = target_platforms,
+    )
+
+def render_multiplatform_pkg_aliases(*, aliases, default_version = None, **kwargs):
+    """Render the multi-platform pkg aliases.
+
+    Args:
+        aliases: dict[str, list(whl_alias)] A list of aliases that will be
+          transformed from ones having `filename` to ones having `config_setting`.
+        default_version: str, the default python version. Defaults to None.
+        **kwargs: extra arguments passed to render_pkg_aliases.
+
+    Returns:
+        A dict of file paths and their contents.
+    """
+
+    flag_versions = get_whl_flag_versions(
+        aliases = [
+            a
+            for bunch in aliases.values()
+            for a in bunch
+        ],
+    )
+
+    config_setting_aliases = {
+        pkg: multiplatform_whl_aliases(
+            aliases = pkg_aliases,
+            default_version = default_version,
+            glibc_versions = flag_versions.get("glibc_versions", []),
+            muslc_versions = flag_versions.get("muslc_versions", []),
+            osx_versions = flag_versions.get("osx_versions", []),
+        )
+        for pkg, pkg_aliases in aliases.items()
+    }
+
+    contents = render_pkg_aliases(
+        aliases = config_setting_aliases,
+        **kwargs
+    )
+    contents["_config/BUILD.bazel"] = _render_config_settings(**flag_versions)
+    return contents
+
+def multiplatform_whl_aliases(*, aliases, default_version = None, **kwargs):
+    """convert a list of aliases from filename to config_setting ones.
+
+    Args:
+        aliases: list(whl_alias): The aliases to process. Any aliases that have
+            the filename set will be converted to a list of aliases, each with
+            an appropriate config_setting value.
+        default_version: string | None, the default python version to use.
+        **kwargs: Extra parameters passed to get_filename_config_settings.
+
+    Returns:
+        A dict with aliases to be used in the hub repo.
+    """
+
+    ret = []
+    versioned_additions = {}
+    for alias in aliases:
+        if not alias.filename:
+            ret.append(alias)
+            continue
+
+        config_settings, all_versioned_settings = get_filename_config_settings(
+            # TODO @aignas 2024-05-27: pass the parsed whl to reduce the
+            # number of duplicate operations.
+            filename = alias.filename,
+            target_platforms = alias.target_platforms,
+            python_version = alias.version,
+            python_default = default_version == alias.version,
+            **kwargs
+        )
+
+        for setting in config_settings:
+            ret.append(whl_alias(
+                repo = alias.repo,
+                version = alias.version,
+                config_setting = "//_config" + setting,
+            ))
+
+        # Now for the versioned platform config settings, we need to select one
+        # that best fits the bill and if there are multiple wheels, e.g.
+        # manylinux_2_17_x86_64 and manylinux_2_28_x86_64, then we need to select
+        # the former when the glibc is in the range of [2.17, 2.28) and then chose
+        # the later if it is [2.28, ...). If the 2.28 wheel was not present in
+        # the hub, then we would need to use 2.17 for all the glibc version
+        # configurations.
+        #
+        # Here we add the version settings to a dict where we key the range of
+        # versions that the whl spans. If the wheel supports musl and glibc at
+        # the same time, we do this for each supported platform, hence the
+        # double dict.
+        for default_setting, versioned in all_versioned_settings.items():
+            versions = sorted(versioned)
+            min_version = versions[0]
+            max_version = versions[-1]
+
+            versioned_additions.setdefault(default_setting, {})[(min_version, max_version)] = struct(
+                repo = alias.repo,
+                python_version = alias.version,
+                settings = versioned,
+            )
+
+    versioned = {}
+    for default_setting, candidates in versioned_additions.items():
+        # Sort the candidates by the range of versions the span, so that we
+        # start with the lowest version.
+        for _, candidate in sorted(candidates.items()):
+            # Set the default with the first candidate, which gives us the highest
+            # compatibility. If the users want to use a higher-version than the default
+            # they can configure the glibc_version flag.
+            versioned.setdefault(default_setting, whl_alias(
+                version = candidate.python_version,
+                config_setting = "//_config" + default_setting,
+                repo = candidate.repo,
+            ))
+
+            # We will be overwriting previously added entries, but that is intended.
+            for _, setting in sorted(candidate.settings.items()):
+                versioned[setting] = whl_alias(
+                    version = candidate.python_version,
+                    config_setting = "//_config" + setting,
+                    repo = candidate.repo,
+                )
+
+    ret.extend(versioned.values())
+    return ret
+
+def _render_config_settings(python_versions = [], target_platforms = [], osx_versions = [], glibc_versions = [], muslc_versions = []):
+    return """\
+load("@rules_python//python/private/pypi:config_settings.bzl", "config_settings")
+
+config_settings(
+    name = "config_settings",
+    glibc_versions = {glibc_versions},
+    muslc_versions = {muslc_versions},
+    osx_versions = {osx_versions},
+    python_versions = {python_versions},
+    target_platforms = {target_platforms},
+    visibility = ["//:__subpackages__"],
+)""".format(
+        glibc_versions = render.indent(render.list(glibc_versions)).lstrip(),
+        muslc_versions = render.indent(render.list(muslc_versions)).lstrip(),
+        osx_versions = render.indent(render.list(osx_versions)).lstrip(),
+        python_versions = render.indent(render.list(python_versions)).lstrip(),
+        target_platforms = render.indent(render.list(target_platforms)).lstrip(),
+    )
+
+def get_whl_flag_versions(aliases):
+    """Return all of the flag versions that is used by the aliases
+
+    Args:
+        aliases: list[whl_alias]
+
+    Returns:
+        dict, which may have keys:
+          * python_versions
+    """
+    python_versions = {}
+    glibc_versions = {}
+    target_platforms = {}
+    muslc_versions = {}
+    osx_versions = {}
+
+    for a in aliases:
+        if not a.version and not a.filename:
+            continue
+
+        if a.version:
+            python_versions[a.version] = None
+
+        if not a.filename:
+            continue
+
+        if a.filename.endswith(".whl") and not a.filename.endswith("-any.whl"):
+            parsed = parse_whl_name(a.filename)
+        else:
+            for plat in a.target_platforms or []:
+                target_platforms[_non_versioned_platform(plat)] = None
+            continue
+
+        for platform_tag in parsed.platform_tag.split("."):
+            parsed = whl_target_platforms(platform_tag)
+
+            for p in parsed:
+                target_platforms[p.target_platform] = None
+
+            if platform_tag.startswith("win") or platform_tag.startswith("linux"):
+                continue
+
+            head, _, tail = platform_tag.partition("_")
+            major, _, tail = tail.partition("_")
+            minor, _, tail = tail.partition("_")
+            if tail:
+                version = (int(major), int(minor))
+                if "many" in head:
+                    glibc_versions[version] = None
+                elif "musl" in head:
+                    muslc_versions[version] = None
+                elif "mac" in head:
+                    osx_versions[version] = None
+                else:
+                    fail(platform_tag)
+
+    return {
+        k: sorted(v)
+        for k, v in {
+            "glibc_versions": glibc_versions,
+            "muslc_versions": muslc_versions,
+            "osx_versions": osx_versions,
+            "python_versions": python_versions,
+            "target_platforms": target_platforms,
+        }.items()
+        if v
+    }
+
+def _non_versioned_platform(p, *, strict = False):
+    """A small utility function that converts 'cp311_linux_x86_64' to 'linux_x86_64'.
+
+    This is so that we can tighten the code structure later by using strict = True.
+    """
+    has_abi = p.startswith("cp")
+    if has_abi:
+        return p.partition("_")[-1]
+    elif not strict:
+        return p
+    else:
+        fail("Expected to always have a platform in the form '{{abi}}_{{os}}_{{arch}}', got: {}".format(p))
+
+def get_filename_config_settings(
+        *,
+        filename,
+        target_platforms,
+        glibc_versions,
+        muslc_versions,
+        osx_versions,
+        python_version = "",
+        python_default = True):
+    """Get the filename config settings.
+
+    Args:
+        filename: the distribution filename (can be a whl or an sdist).
+        target_platforms: list[str], target platforms in "{abi}_{os}_{cpu}" format.
+        glibc_versions: list[tuple[int, int]], list of versions.
+        muslc_versions: list[tuple[int, int]], list of versions.
+        osx_versions: list[tuple[int, int]], list of versions.
+        python_version: the python version to generate the config_settings for.
+        python_default: if we should include the setting when python_version is not set.
+
+    Returns:
+        A tuple:
+         * A list of config settings that are generated by ./pip_config_settings.bzl
+         * The list of default version settings.
+    """
+    prefixes = []
+    suffixes = []
+    if (0, 0) in glibc_versions:
+        fail("Invalid version in 'glibc_versions': cannot specify (0, 0) as a value")
+    if (0, 0) in muslc_versions:
+        fail("Invalid version in 'muslc_versions': cannot specify (0, 0) as a value")
+    if (0, 0) in osx_versions:
+        fail("Invalid version in 'osx_versions': cannot specify (0, 0) as a value")
+
+    glibc_versions = sorted(glibc_versions)
+    muslc_versions = sorted(muslc_versions)
+    osx_versions = sorted(osx_versions)
+    setting_supported_versions = {}
+
+    if filename.endswith(".whl"):
+        parsed = parse_whl_name(filename)
+        if parsed.python_tag == "py2.py3":
+            py = "py"
+        elif parsed.python_tag.startswith("cp"):
+            py = "cp3x"
+        else:
+            py = "py3"
+
+        if parsed.abi_tag.startswith("cp"):
+            abi = "cp"
+        else:
+            abi = parsed.abi_tag
+
+        if parsed.platform_tag == "any":
+            prefixes = ["{}_{}_any".format(py, abi)]
+            suffixes = [_non_versioned_platform(p) for p in target_platforms or []]
+        else:
+            prefixes = ["{}_{}".format(py, abi)]
+            suffixes = _whl_config_setting_suffixes(
+                platform_tag = parsed.platform_tag,
+                glibc_versions = glibc_versions,
+                muslc_versions = muslc_versions,
+                osx_versions = osx_versions,
+                setting_supported_versions = setting_supported_versions,
+            )
+    else:
+        prefixes = ["sdist"]
+        suffixes = [_non_versioned_platform(p) for p in target_platforms or []]
+
+    if python_default and python_version:
+        prefixes += ["cp{}_{}".format(python_version, p) for p in prefixes]
+    elif python_version:
+        prefixes = ["cp{}_{}".format(python_version, p) for p in prefixes]
+    elif python_default:
+        pass
+    else:
+        fail("BUG: got no python_version and it is not default")
+
+    versioned = {
+        ":is_{}_{}".format(p, suffix): {
+            version: ":is_{}_{}".format(p, setting)
+            for version, setting in versions.items()
+        }
+        for p in prefixes
+        for suffix, versions in setting_supported_versions.items()
+    }
+
+    if suffixes or versioned:
+        return [":is_{}_{}".format(p, s) for p in prefixes for s in suffixes], versioned
+    else:
+        return [":is_{}".format(p) for p in prefixes], setting_supported_versions
+
+def _whl_config_setting_suffixes(
+        platform_tag,
+        glibc_versions,
+        muslc_versions,
+        osx_versions,
+        setting_supported_versions):
+    suffixes = []
+    for platform_tag in platform_tag.split("."):
+        for p in whl_target_platforms(platform_tag):
+            prefix = p.os
+            suffix = p.cpu
+            if "manylinux" in platform_tag:
+                prefix = "manylinux"
+                versions = glibc_versions
+            elif "musllinux" in platform_tag:
+                prefix = "musllinux"
+                versions = muslc_versions
+            elif p.os in ["linux", "windows"]:
+                versions = [(0, 0)]
+            elif p.os == "osx":
+                versions = osx_versions
+                if "universal2" in platform_tag:
+                    suffix += "_universal2"
+            else:
+                fail("Unsupported whl os: {}".format(p.os))
+
+            default_version_setting = "{}_{}".format(prefix, suffix)
+            supported_versions = {}
+            for v in versions:
+                if v == (0, 0):
+                    suffixes.append(default_version_setting)
+                elif v >= p.version:
+                    supported_versions[v] = "{}_{}_{}_{}".format(
+                        prefix,
+                        v[0],
+                        v[1],
+                        suffix,
+                    )
+            if supported_versions:
+                setting_supported_versions[default_version_setting] = supported_versions
+
+    return suffixes
diff --git a/python/private/repack_whl.py b/python/private/pypi/repack_whl.py
similarity index 96%
rename from python/private/repack_whl.py
rename to python/private/pypi/repack_whl.py
index be113ef..9052ac3 100644
--- a/python/private/repack_whl.py
+++ b/python/private/pypi/repack_whl.py
@@ -150,8 +150,11 @@
         logging.debug(f"Found dist-info dir: {distinfo_dir}")
         record_path = distinfo_dir / "RECORD"
         record_contents = record_path.read_text() if record_path.exists() else ""
+        distribution_prefix = distinfo_dir.with_suffix("").name
 
-        with _WhlFile(args.output, mode="w", distinfo_dir=distinfo_dir) as out:
+        with _WhlFile(
+            args.output, mode="w", distribution_prefix=distribution_prefix
+        ) as out:
             for p in _files_to_pack(patched_wheel_dir, record_contents):
                 rel_path = p.relative_to(patched_wheel_dir)
                 out.add_file(str(rel_path), p)
diff --git a/python/private/pypi/requirements.bzl.tmpl.bzlmod b/python/private/pypi/requirements.bzl.tmpl.bzlmod
new file mode 100644
index 0000000..ba227ae
--- /dev/null
+++ b/python/private/pypi/requirements.bzl.tmpl.bzlmod
@@ -0,0 +1,26 @@
+"""Starlark representation of locked requirements.
+
+@generated by rules_python pip.parse bzlmod extension.
+"""
+
+load("@rules_python//python:pip.bzl", "pip_utils")
+
+all_requirements = %%ALL_REQUIREMENTS%%
+
+all_whl_requirements_by_package = %%ALL_WHL_REQUIREMENTS_BY_PACKAGE%%
+
+all_whl_requirements = all_whl_requirements_by_package.values()
+
+all_data_requirements = %%ALL_DATA_REQUIREMENTS%%
+
+def requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "pkg")
+
+def whl_requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "whl")
+
+def data_requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "data")
+
+def dist_info_requirement(name):
+    return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "dist_info")
diff --git a/python/pip_install/pip_repository_requirements.bzl.tmpl b/python/private/pypi/requirements.bzl.tmpl.workspace
similarity index 83%
rename from python/pip_install/pip_repository_requirements.bzl.tmpl
rename to python/private/pypi/requirements.bzl.tmpl.workspace
index 2b88f5c..2f4bcd6 100644
--- a/python/pip_install/pip_repository_requirements.bzl.tmpl
+++ b/python/private/pypi/requirements.bzl.tmpl.workspace
@@ -1,7 +1,6 @@
 """Starlark representation of locked requirements.
 
-@generated by rules_python pip_parse repository rule
-from %%REQUIREMENTS_LOCK%%
+@generated by rules_python pip_parse repository rule.
 """
 
 %%IMPORTS%%
@@ -30,11 +29,6 @@
 def dist_info_requirement(name):
     return "%%MACRO_TMPL%%".format(pip_utils.normalize_name(name), "dist_info")
 
-def entry_point(pkg, script = None):
-    if not script:
-        script = pkg
-    return "@%%NAME%%_" + pip_utils.normalize_name(pkg) + "//:rules_python_wheel_entry_point_" + script
-
 def _get_annotation(requirement):
     # This expects to parse `setuptools==58.2.0     --hash=sha256:2551203ae6955b9876741a26ab3e767bb3242dafe86a32a749ea0d78b6792f11`
     # down to `setuptools`.
@@ -58,12 +52,7 @@
         for requirement in group_requirements
     }
 
-    group_repo = "%%NAME%%__groups"
-    group_library(
-        name = group_repo,
-        repo_prefix = "%%NAME%%_",
-        groups = all_requirement_groups,
-    )
+    # %%GROUP_LIBRARY%%
 
     # Install wheels which may be participants in a group
     whl_config = dict(_config)
diff --git a/python/pip_install/tools/requirements.txt b/python/private/pypi/requirements.txt
similarity index 85%
rename from python/pip_install/tools/requirements.txt
rename to python/private/pypi/requirements.txt
index bf9fe46..006ef21 100755
--- a/python/pip_install/tools/requirements.txt
+++ b/python/private/pypi/requirements.txt
@@ -7,7 +7,7 @@
 packaging
 pep517
 pip
-pip_tools
+pip_tools >= 7.4.0
 setuptools
 tomli
 wheel
diff --git a/python/private/pypi/requirements_files_by_platform.bzl b/python/private/pypi/requirements_files_by_platform.bzl
new file mode 100644
index 0000000..e3aafc0
--- /dev/null
+++ b/python/private/pypi/requirements_files_by_platform.bzl
@@ -0,0 +1,258 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Get the requirement files by platform."""
+
+load(":whl_target_platforms.bzl", "whl_target_platforms")
+
+# TODO @aignas 2024-05-13: consider using the same platform tags as are used in
+# the //python:versions.bzl
+DEFAULT_PLATFORMS = [
+    "linux_aarch64",
+    "linux_arm",
+    "linux_ppc",
+    "linux_s390x",
+    "linux_x86_64",
+    "osx_aarch64",
+    "osx_x86_64",
+    "windows_x86_64",
+]
+
+def _default_platforms(*, filter):
+    if not filter:
+        fail("Must specific a filter string, got: {}".format(filter))
+
+    if filter.startswith("cp3"):
+        # TODO @aignas 2024-05-23: properly handle python versions in the filter.
+        # For now we are just dropping it to ensure that we don't fail.
+        _, _, filter = filter.partition("_")
+
+    sanitized = filter.replace("*", "").replace("_", "")
+    if sanitized and not sanitized.isalnum():
+        fail("The platform filter can only contain '*', '_' and alphanumerics")
+
+    if "*" in filter:
+        prefix = filter.rstrip("*")
+        if "*" in prefix:
+            fail("The filter can only contain '*' at the end of it")
+
+        if not prefix:
+            return DEFAULT_PLATFORMS
+
+        return [p for p in DEFAULT_PLATFORMS if p.startswith(prefix)]
+    else:
+        return [p for p in DEFAULT_PLATFORMS if filter in p]
+
+def _platforms_from_args(extra_pip_args):
+    platform_values = []
+
+    if not extra_pip_args:
+        return platform_values
+
+    for arg in extra_pip_args:
+        if platform_values and platform_values[-1] == "":
+            platform_values[-1] = arg
+            continue
+
+        if arg == "--platform":
+            platform_values.append("")
+            continue
+
+        if not arg.startswith("--platform"):
+            continue
+
+        _, _, plat = arg.partition("=")
+        if not plat:
+            _, _, plat = arg.partition(" ")
+        if plat:
+            platform_values.append(plat)
+        else:
+            platform_values.append("")
+
+    if not platform_values:
+        return []
+
+    platforms = {
+        p.target_platform: None
+        for arg in platform_values
+        for p in whl_target_platforms(arg)
+    }
+    return list(platforms.keys())
+
+def _platform(platform_string, python_version = None):
+    if not python_version or platform_string.startswith("cp3"):
+        return platform_string
+
+    _, _, tail = python_version.partition(".")
+    minor, _, _ = tail.partition(".")
+
+    return "cp3{}_{}".format(minor, platform_string)
+
+def requirements_files_by_platform(
+        *,
+        requirements_by_platform = {},
+        requirements_osx = None,
+        requirements_linux = None,
+        requirements_lock = None,
+        requirements_windows = None,
+        extra_pip_args = None,
+        python_version = None,
+        logger = None,
+        fail_fn = fail):
+    """Resolve the requirement files by target platform.
+
+    Args:
+        requirements_by_platform (label_keyed_string_dict): a way to have
+            different package versions (or different packages) for different
+            os, arch combinations.
+        requirements_osx (label): The requirements file for the osx OS.
+        requirements_linux (label): The requirements file for the linux OS.
+        requirements_lock (label): The requirements file for all OSes, or used as a fallback.
+        requirements_windows (label): The requirements file for windows OS.
+        extra_pip_args (string list): Extra pip arguments to perform extra validations and to
+            be joined with args fined in files.
+        python_version: str or None. This is needed when the get_index_urls is
+            specified. It should be of the form "3.x.x",
+        logger: repo_utils.logger or None, a simple struct to log diagnostic messages.
+        fail_fn (Callable[[str], None]): A failure function used in testing failure cases.
+
+    Returns:
+        A dict with keys as the labels to the files and values as lists of
+        platforms that the files support.
+    """
+    if not (
+        requirements_lock or
+        requirements_linux or
+        requirements_osx or
+        requirements_windows or
+        requirements_by_platform
+    ):
+        fail_fn(
+            "A 'requirements_lock' attribute must be specified, a platform-specific lockfiles " +
+            "via 'requirements_by_platform' or an os-specific lockfiles must be specified " +
+            "via 'requirements_*' attributes",
+        )
+        return None
+
+    platforms = _platforms_from_args(extra_pip_args)
+    if logger:
+        logger.debug(lambda: "Platforms from pip args: {}".format(platforms))
+
+    if platforms:
+        lock_files = [
+            f
+            for f in [
+                requirements_lock,
+                requirements_linux,
+                requirements_osx,
+                requirements_windows,
+            ] + list(requirements_by_platform.keys())
+            if f
+        ]
+
+        if len(lock_files) > 1:
+            # If the --platform argument is used, check that we are using
+            # a single `requirements_lock` file instead of the OS specific ones as that is
+            # the only correct way to use the API.
+            fail_fn("only a single 'requirements_lock' file can be used when using '--platform' pip argument, consider specifying it via 'requirements_lock' attribute")
+            return None
+
+        files_by_platform = [
+            (lock_files[0], platforms),
+        ]
+        if logger:
+            logger.debug(lambda: "Files by platform with the platform set in the args: {}".format(files_by_platform))
+    else:
+        files_by_platform = {
+            file: [
+                platform
+                for filter_or_platform in specifier.split(",")
+                for platform in (_default_platforms(filter = filter_or_platform) if filter_or_platform.endswith("*") else [filter_or_platform])
+            ]
+            for file, specifier in requirements_by_platform.items()
+        }.items()
+
+        if logger:
+            logger.debug(lambda: "Files by platform with the platform set in the attrs: {}".format(files_by_platform))
+
+        for f in [
+            # If the users need a greater span of the platforms, they should consider
+            # using the 'requirements_by_platform' attribute.
+            (requirements_linux, _default_platforms(filter = "linux_*")),
+            (requirements_osx, _default_platforms(filter = "osx_*")),
+            (requirements_windows, _default_platforms(filter = "windows_*")),
+            (requirements_lock, None),
+        ]:
+            if f[0]:
+                if logger:
+                    logger.debug(lambda: "Adding an extra item to files_by_platform: {}".format(f))
+                files_by_platform.append(f)
+
+    configured_platforms = {}
+    requirements = {}
+    for file, plats in files_by_platform:
+        if plats:
+            plats = [_platform(p, python_version) for p in plats]
+            for p in plats:
+                if p in configured_platforms:
+                    fail_fn(
+                        "Expected the platform '{}' to be map only to a single requirements file, but got multiple: '{}', '{}'".format(
+                            p,
+                            configured_platforms[p],
+                            file,
+                        ),
+                    )
+                    return None
+
+                configured_platforms[p] = file
+        else:
+            default_platforms = [_platform(p, python_version) for p in DEFAULT_PLATFORMS]
+            plats = [
+                p
+                for p in default_platforms
+                if p not in configured_platforms
+            ]
+            if logger:
+                logger.debug(lambda: "File {} will be used for the remaining platforms {} that are not in configured_platforms: {}".format(
+                    file,
+                    plats,
+                    default_platforms,
+                ))
+            for p in plats:
+                configured_platforms[p] = file
+
+        if logger:
+            logger.debug(lambda: "Configured platforms for file {} are {}".format(file, plats))
+
+        for p in plats:
+            if p in requirements:
+                # This should never happen because in the code above we should
+                # have unambiguous selection of the requirements files.
+                fail_fn("Attempting to override a requirements file '{}' with '{}' for platform '{}'".format(
+                    requirements[p],
+                    file,
+                    p,
+                ))
+                return None
+            requirements[p] = file
+
+    # Now return a dict that is similar to requirements_by_platform - where we
+    # have labels/files as keys in the dict to minimize the number of times we
+    # may parse the same file.
+
+    ret = {}
+    for plat, file in requirements.items():
+        ret.setdefault(file, []).append(plat)
+
+    return ret
diff --git a/tests/pip_install/BUILD.bazel b/python/private/pypi/requirements_parser/BUILD.bazel
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to python/private/pypi/requirements_parser/BUILD.bazel
diff --git a/python/private/pypi/requirements_parser/resolve_target_platforms.py b/python/private/pypi/requirements_parser/resolve_target_platforms.py
new file mode 100755
index 0000000..c899a94
--- /dev/null
+++ b/python/private/pypi/requirements_parser/resolve_target_platforms.py
@@ -0,0 +1,63 @@
+"""A CLI to evaluate env markers for requirements files.
+
+A simple script to evaluate the `requirements.txt` files. Currently it is only
+handling environment markers in the requirements files, but in the future it
+may handle more things. We require a `python` interpreter that can run on the
+host platform and then we depend on the [packaging] PyPI wheel.
+
+In order to be able to resolve requirements files for any platform, we are
+re-using the same code that is used in the `whl_library` installer. See
+[here](../whl_installer/wheel.py).
+
+Requirements for the code are:
+- Depends only on `packaging` and core Python.
+- Produces the same result irrespective of the Python interpreter platform or version.
+
+[packaging]: https://packaging.pypa.io/en/stable/
+"""
+
+import argparse
+import json
+import pathlib
+
+from packaging.requirements import Requirement
+
+from python.private.pypi.whl_installer.platform import Platform
+
+INPUT_HELP = """\
+Input path to read the requirements as a json file, the keys in the dictionary
+are the requirements lines and the values are strings of target platforms.
+"""
+OUTPUT_HELP = """\
+Output to write the requirements as a json filepath, the keys in the dictionary
+are the requirements lines and the values are strings of target platforms, which
+got changed based on the evaluated markers.
+"""
+
+
+def main():
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("input_path", type=pathlib.Path, help=INPUT_HELP.strip())
+    parser.add_argument("output_path", type=pathlib.Path, help=OUTPUT_HELP.strip())
+    args = parser.parse_args()
+
+    with args.input_path.open() as f:
+        reqs = json.load(f)
+
+    response = {}
+    for requirement_line, target_platforms in reqs.items():
+        entry, prefix, hashes = requirement_line.partition("--hash")
+        hashes = prefix + hashes
+
+        req = Requirement(entry)
+        for p in target_platforms:
+            (platform,) = Platform.from_string(p)
+            if not req.marker or req.marker.evaluate(platform.env_markers("")):
+                response.setdefault(requirement_line, []).append(p)
+
+    with args.output_path.open("w") as f:
+        json.dump(response, f)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/python/private/pypi/simpleapi_download.bzl b/python/private/pypi/simpleapi_download.bzl
new file mode 100644
index 0000000..c730c20
--- /dev/null
+++ b/python/private/pypi/simpleapi_download.bzl
@@ -0,0 +1,204 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A file that houses private functions used in the `bzlmod` extension with the same name.
+"""
+
+load("@bazel_features//:features.bzl", "bazel_features")
+load("//python/private:auth.bzl", "get_auth")
+load("//python/private:envsubst.bzl", "envsubst")
+load("//python/private:normalize_name.bzl", "normalize_name")
+load(":parse_simpleapi_html.bzl", "parse_simpleapi_html")
+
+def simpleapi_download(ctx, *, attr, cache, parallel_download = True):
+    """Download Simple API HTML.
+
+    Args:
+        ctx: The module_ctx or repository_ctx.
+        attr: Contains the parameters for the download. They are grouped into a
+          struct for better clarity. It must have attributes:
+           * index_url: str, the index.
+           * index_url_overrides: dict[str, str], the index overrides for
+             separate packages.
+           * extra_index_urls: Extra index URLs that will be looked up after
+             the main is looked up.
+           * sources: list[str], the sources to download things for. Each value is
+             the contents of requirements files.
+           * envsubst: list[str], the envsubst vars for performing substitution in index url.
+           * netrc: The netrc parameter for ctx.download, see http_file for docs.
+           * auth_patterns: The auth_patterns parameter for ctx.download, see
+               http_file for docs.
+        cache: A dictionary that can be used as a cache between calls during a
+            single evaluation of the extension. We use a dictionary as a cache
+            so that we can reuse calls to the simple API when evaluating the
+            extension. Using the canonical_id parameter of the module_ctx would
+            deposit the simple API responses to the bazel cache and that is
+            undesirable because additions to the PyPI index would not be
+            reflected when re-evaluating the extension unless we do
+            `bazel clean --expunge`.
+        parallel_download: A boolean to enable usage of bazel 7.1 non-blocking downloads.
+
+    Returns:
+        dict of pkg name to the parsed HTML contents - a list of structs.
+    """
+    index_url_overrides = {
+        normalize_name(p): i
+        for p, i in (attr.index_url_overrides or {}).items()
+    }
+
+    download_kwargs = {}
+    if bazel_features.external_deps.download_has_block_param:
+        download_kwargs["block"] = not parallel_download
+
+    # NOTE @aignas 2024-03-31: we are not merging results from multiple indexes
+    # to replicate how `pip` would handle this case.
+    async_downloads = {}
+    contents = {}
+    index_urls = [attr.index_url] + attr.extra_index_urls
+    for pkg in attr.sources:
+        pkg_normalized = normalize_name(pkg)
+
+        success = False
+        for index_url in index_urls:
+            result = _read_simpleapi(
+                ctx = ctx,
+                url = "{}/{}/".format(
+                    index_url_overrides.get(pkg_normalized, index_url).rstrip("/"),
+                    pkg,
+                ),
+                attr = attr,
+                cache = cache,
+                **download_kwargs
+            )
+            if hasattr(result, "wait"):
+                # We will process it in a separate loop:
+                async_downloads.setdefault(pkg_normalized, []).append(
+                    struct(
+                        pkg_normalized = pkg_normalized,
+                        wait = result.wait,
+                    ),
+                )
+                continue
+
+            if result.success:
+                contents[pkg_normalized] = result.output
+                success = True
+                break
+
+        if not async_downloads and not success:
+            fail("Failed to download metadata from urls: {}".format(
+                ", ".join(index_urls),
+            ))
+
+    if not async_downloads:
+        return contents
+
+    # If we use `block` == False, then we need to have a second loop that is
+    # collecting all of the results as they were being downloaded in parallel.
+    for pkg, downloads in async_downloads.items():
+        success = False
+        for download in downloads:
+            result = download.wait()
+
+            if result.success and download.pkg_normalized not in contents:
+                contents[download.pkg_normalized] = result.output
+                success = True
+
+        if not success:
+            fail("Failed to download metadata from urls: {}".format(
+                ", ".join(index_urls),
+            ))
+
+    return contents
+
+def _read_simpleapi(ctx, url, attr, cache, **download_kwargs):
+    """Read SimpleAPI.
+
+    Args:
+        ctx: The module_ctx or repository_ctx.
+        url: str, the url parameter that can be passed to ctx.download.
+        attr: The attribute that contains necessary info for downloading. The
+          following attributes must be present:
+           * envsubst: The envsubst values for performing substitutions in the URL.
+           * netrc: The netrc parameter for ctx.download, see http_file for docs.
+           * auth_patterns: The auth_patterns parameter for ctx.download, see
+               http_file for docs.
+        cache: A dict for storing the results.
+        **download_kwargs: Any extra params to ctx.download.
+            Note that output and auth will be passed for you.
+
+    Returns:
+        A similar object to what `download` would return except that in result.out
+        will be the parsed simple api contents.
+    """
+    # NOTE @aignas 2024-03-31: some of the simple APIs use relative URLs for
+    # the whl location and we cannot handle multiple URLs at once by passing
+    # them to ctx.download if we want to correctly handle the relative URLs.
+    # TODO: Add a test that env subbed index urls do not leak into the lock file.
+
+    real_url = envsubst(
+        url,
+        attr.envsubst,
+        ctx.getenv if hasattr(ctx, "getenv") else ctx.os.environ.get,
+    )
+
+    cache_key = real_url
+    if cache_key in cache:
+        return struct(success = True, output = cache[cache_key])
+
+    output_str = envsubst(
+        url,
+        attr.envsubst,
+        # Use env names in the subst values - this will be unique over
+        # the lifetime of the execution of this function and we also use
+        # `~` as the separator to ensure that we don't get clashes.
+        {e: "~{}~".format(e) for e in attr.envsubst}.get,
+    )
+
+    # Transform the URL into a valid filename
+    for char in [".", ":", "/", "\\", "-"]:
+        output_str = output_str.replace(char, "_")
+
+    output = ctx.path(output_str.strip("_").lower() + ".html")
+
+    # NOTE: this may have block = True or block = False in the download_kwargs
+    download = ctx.download(
+        url = [real_url],
+        output = output,
+        auth = get_auth(ctx, [real_url], ctx_attr = attr),
+        allow_fail = True,
+        **download_kwargs
+    )
+
+    if download_kwargs.get("block") == False:
+        # Simulate the same API as ctx.download has
+        return struct(
+            wait = lambda: _read_index_result(ctx, download.wait(), output, real_url, cache, cache_key),
+        )
+
+    return _read_index_result(ctx, download, output, real_url, cache, cache_key)
+
+def _read_index_result(ctx, result, output, url, cache, cache_key):
+    if not result.success:
+        return struct(success = False)
+
+    content = ctx.read(output)
+
+    output = parse_simpleapi_html(url = url, content = content)
+    if output:
+        cache.setdefault(cache_key, output)
+        return struct(success = True, output = output, cache_key = cache_key)
+    else:
+        return struct(success = False)
diff --git a/python/private/pypi/whl_installer/BUILD.bazel b/python/private/pypi/whl_installer/BUILD.bazel
new file mode 100644
index 0000000..5bce1a5
--- /dev/null
+++ b/python/private/pypi/whl_installer/BUILD.bazel
@@ -0,0 +1,35 @@
+load("//python:defs.bzl", "py_binary", "py_library")
+
+py_library(
+    name = "lib",
+    srcs = [
+        "arguments.py",
+        "namespace_pkgs.py",
+        "platform.py",
+        "wheel.py",
+        "wheel_installer.py",
+    ],
+    visibility = [
+        "//:__subpackages__",
+    ],
+    deps = [
+        "@pypi__installer//:lib",
+        "@pypi__packaging//:lib",
+        "@pypi__pip//:lib",
+        "@pypi__setuptools//:lib",
+    ],
+)
+
+py_binary(
+    name = "wheel_installer",
+    srcs = [
+        "wheel_installer.py",
+    ],
+    deps = [":lib"],
+)
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["*"]),
+    visibility = ["//:__subpackages__"],
+)
diff --git a/python/pip_install/tools/wheel_installer/arguments.py b/python/private/pypi/whl_installer/arguments.py
similarity index 96%
rename from python/pip_install/tools/wheel_installer/arguments.py
rename to python/private/pypi/whl_installer/arguments.py
index 71133c2..29bea80 100644
--- a/python/pip_install/tools/wheel_installer/arguments.py
+++ b/python/private/pypi/whl_installer/arguments.py
@@ -17,7 +17,7 @@
 import pathlib
 from typing import Any, Dict, Set
 
-from python.pip_install.tools.wheel_installer import wheel
+from python.private.pypi.whl_installer.platform import Platform
 
 
 def parser(**kwargs: Any) -> argparse.ArgumentParser:
@@ -44,7 +44,7 @@
     parser.add_argument(
         "--platform",
         action="extend",
-        type=wheel.Platform.from_string,
+        type=Platform.from_string,
         help="Platforms to target dependencies. Can be used multiple times.",
     )
     parser.add_argument(
diff --git a/python/pip_install/tools/wheel_installer/namespace_pkgs.py b/python/private/pypi/whl_installer/namespace_pkgs.py
similarity index 100%
rename from python/pip_install/tools/wheel_installer/namespace_pkgs.py
rename to python/private/pypi/whl_installer/namespace_pkgs.py
diff --git a/python/private/pypi/whl_installer/platform.py b/python/private/pypi/whl_installer/platform.py
new file mode 100644
index 0000000..83e42b0
--- /dev/null
+++ b/python/private/pypi/whl_installer/platform.py
@@ -0,0 +1,302 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utility class to inspect an extracted wheel directory"""
+
+import platform
+import sys
+from dataclasses import dataclass
+from enum import Enum
+from typing import Any, Dict, Iterator, List, Optional, Union
+
+
+class OS(Enum):
+    linux = 1
+    osx = 2
+    windows = 3
+    darwin = osx
+    win32 = windows
+
+    @classmethod
+    def interpreter(cls) -> "OS":
+        "Return the interpreter operating system."
+        return cls[sys.platform.lower()]
+
+    def __str__(self) -> str:
+        return self.name.lower()
+
+
+class Arch(Enum):
+    x86_64 = 1
+    x86_32 = 2
+    aarch64 = 3
+    ppc = 4
+    s390x = 5
+    arm = 6
+    amd64 = x86_64
+    arm64 = aarch64
+    i386 = x86_32
+    i686 = x86_32
+    x86 = x86_32
+    ppc64le = ppc
+
+    @classmethod
+    def interpreter(cls) -> "Arch":
+        "Return the currently running interpreter architecture."
+        # FIXME @aignas 2023-12-13: Hermetic toolchain on Windows 3.11.6
+        # is returning an empty string here, so lets default to x86_64
+        return cls[platform.machine().lower() or "x86_64"]
+
+    def __str__(self) -> str:
+        return self.name.lower()
+
+
+def _as_int(value: Optional[Union[OS, Arch]]) -> int:
+    """Convert one of the enums above to an int for easier sorting algorithms.
+
+    Args:
+        value: The value of an enum or None.
+
+    Returns:
+        -1 if we get None, otherwise, the numeric value of the given enum.
+    """
+    if value is None:
+        return -1
+
+    return int(value.value)
+
+
+def host_interpreter_minor_version() -> int:
+    return sys.version_info.minor
+
+
+@dataclass(frozen=True)
+class Platform:
+    os: Optional[OS] = None
+    arch: Optional[Arch] = None
+    minor_version: Optional[int] = None
+
+    @classmethod
+    def all(
+        cls,
+        want_os: Optional[OS] = None,
+        minor_version: Optional[int] = None,
+    ) -> List["Platform"]:
+        return sorted(
+            [
+                cls(os=os, arch=arch, minor_version=minor_version)
+                for os in OS
+                for arch in Arch
+                if not want_os or want_os == os
+            ]
+        )
+
+    @classmethod
+    def host(cls) -> List["Platform"]:
+        """Use the Python interpreter to detect the platform.
+
+        We extract `os` from sys.platform and `arch` from platform.machine
+
+        Returns:
+            A list of parsed values which makes the signature the same as
+            `Platform.all` and `Platform.from_string`.
+        """
+        return [
+            Platform(
+                os=OS.interpreter(),
+                arch=Arch.interpreter(),
+                minor_version=host_interpreter_minor_version(),
+            )
+        ]
+
+    def all_specializations(self) -> Iterator["Platform"]:
+        """Return the platform itself and all its unambiguous specializations.
+
+        For more info about specializations see
+        https://bazel.build/docs/configurable-attributes
+        """
+        yield self
+        if self.arch is None:
+            for arch in Arch:
+                yield Platform(os=self.os, arch=arch, minor_version=self.minor_version)
+        if self.os is None:
+            for os in OS:
+                yield Platform(os=os, arch=self.arch, minor_version=self.minor_version)
+        if self.arch is None and self.os is None:
+            for os in OS:
+                for arch in Arch:
+                    yield Platform(os=os, arch=arch, minor_version=self.minor_version)
+
+    def __lt__(self, other: Any) -> bool:
+        """Add a comparison method, so that `sorted` returns the most specialized platforms first."""
+        if not isinstance(other, Platform) or other is None:
+            raise ValueError(f"cannot compare {other} with Platform")
+
+        self_arch, self_os = _as_int(self.arch), _as_int(self.os)
+        other_arch, other_os = _as_int(other.arch), _as_int(other.os)
+
+        if self_os == other_os:
+            return self_arch < other_arch
+        else:
+            return self_os < other_os
+
+    def __str__(self) -> str:
+        if self.minor_version is None:
+            if self.os is None and self.arch is None:
+                return "//conditions:default"
+
+            if self.arch is None:
+                return f"@platforms//os:{self.os}"
+            else:
+                return f"{self.os}_{self.arch}"
+
+        if self.arch is None and self.os is None:
+            return f"@//python/config_settings:is_python_3.{self.minor_version}"
+
+        if self.arch is None:
+            return f"cp3{self.minor_version}_{self.os}_anyarch"
+
+        if self.os is None:
+            return f"cp3{self.minor_version}_anyos_{self.arch}"
+
+        return f"cp3{self.minor_version}_{self.os}_{self.arch}"
+
+    @classmethod
+    def from_string(cls, platform: Union[str, List[str]]) -> List["Platform"]:
+        """Parse a string and return a list of platforms"""
+        platform = [platform] if isinstance(platform, str) else list(platform)
+        ret = set()
+        for p in platform:
+            if p == "host":
+                ret.update(cls.host())
+                continue
+
+            abi, _, tail = p.partition("_")
+            if not abi.startswith("cp"):
+                # The first item is not an abi
+                tail = p
+                abi = ""
+            os, _, arch = tail.partition("_")
+            arch = arch or "*"
+
+            minor_version = int(abi[len("cp3") :]) if abi else None
+
+            if arch != "*":
+                ret.add(
+                    cls(
+                        os=OS[os] if os != "*" else None,
+                        arch=Arch[arch],
+                        minor_version=minor_version,
+                    )
+                )
+
+            else:
+                ret.update(
+                    cls.all(
+                        want_os=OS[os] if os != "*" else None,
+                        minor_version=minor_version,
+                    )
+                )
+
+        return sorted(ret)
+
+    # NOTE @aignas 2023-12-05: below is the minimum number of accessors that are defined in
+    # https://peps.python.org/pep-0496/ to make rules_python generate dependencies.
+    #
+    # WARNING: It may not work in cases where the python implementation is different between
+    # different platforms.
+
+    # derived from OS
+    @property
+    def os_name(self) -> str:
+        if self.os == OS.linux or self.os == OS.osx:
+            return "posix"
+        elif self.os == OS.windows:
+            return "nt"
+        else:
+            return ""
+
+    @property
+    def sys_platform(self) -> str:
+        if self.os == OS.linux:
+            return "linux"
+        elif self.os == OS.osx:
+            return "darwin"
+        elif self.os == OS.windows:
+            return "win32"
+        else:
+            return ""
+
+    @property
+    def platform_system(self) -> str:
+        if self.os == OS.linux:
+            return "Linux"
+        elif self.os == OS.osx:
+            return "Darwin"
+        elif self.os == OS.windows:
+            return "Windows"
+        else:
+            return ""
+
+    # derived from OS and Arch
+    @property
+    def platform_machine(self) -> str:
+        """Guess the target 'platform_machine' marker.
+
+        NOTE @aignas 2023-12-05: this may not work on really new systems, like
+        Windows if they define the platform markers in a different way.
+        """
+        if self.arch == Arch.x86_64:
+            return "x86_64"
+        elif self.arch == Arch.x86_32 and self.os != OS.osx:
+            return "i386"
+        elif self.arch == Arch.x86_32:
+            return ""
+        elif self.arch == Arch.aarch64 and self.os == OS.linux:
+            return "aarch64"
+        elif self.arch == Arch.aarch64:
+            # Assuming that OSX and Windows use this one since the precedent is set here:
+            # https://github.com/cgohlke/win_arm64-wheels
+            return "arm64"
+        elif self.os != OS.linux:
+            return ""
+        elif self.arch == Arch.ppc64le:
+            return "ppc64le"
+        elif self.arch == Arch.s390x:
+            return "s390x"
+        else:
+            return ""
+
+    def env_markers(self, extra: str) -> Dict[str, str]:
+        # If it is None, use the host version
+        minor_version = self.minor_version or host_interpreter_minor_version()
+
+        return {
+            "extra": extra,
+            "os_name": self.os_name,
+            "sys_platform": self.sys_platform,
+            "platform_machine": self.platform_machine,
+            "platform_system": self.platform_system,
+            "platform_release": "",  # unset
+            "platform_version": "",  # unset
+            "python_version": f"3.{minor_version}",
+            # FIXME @aignas 2024-01-14: is putting zero last a good idea? Maybe we should
+            # use `20` or something else to avoid having weird issues where the full version is used for
+            # matching and the author decides to only support 3.y.5 upwards.
+            "implementation_version": f"3.{minor_version}.0",
+            "python_full_version": f"3.{minor_version}.0",
+            # we assume that the following are the same as the interpreter used to setup the deps:
+            # "implementation_name": "cpython"
+            # "platform_python_implementation: "CPython",
+        }
diff --git a/python/private/pypi/whl_installer/wheel.py b/python/private/pypi/whl_installer/wheel.py
new file mode 100644
index 0000000..0f6bd27
--- /dev/null
+++ b/python/private/pypi/whl_installer/wheel.py
@@ -0,0 +1,383 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utility class to inspect an extracted wheel directory"""
+
+import email
+import re
+from collections import defaultdict
+from dataclasses import dataclass
+from pathlib import Path
+from typing import Dict, List, Optional, Set, Tuple
+
+import installer
+from packaging.requirements import Requirement
+from pip._vendor.packaging.utils import canonicalize_name
+
+from python.private.pypi.whl_installer.platform import (
+    Platform,
+    host_interpreter_minor_version,
+)
+
+
+@dataclass(frozen=True)
+class FrozenDeps:
+    deps: List[str]
+    deps_select: Dict[str, List[str]]
+
+
+class Deps:
+    """Deps is a dependency builder that has a build() method to return FrozenDeps."""
+
+    def __init__(
+        self,
+        name: str,
+        requires_dist: List[str],
+        *,
+        extras: Optional[Set[str]] = None,
+        platforms: Optional[Set[Platform]] = None,
+    ):
+        """Create a new instance and parse the requires_dist
+
+        Args:
+            name (str): The name of the whl distribution
+            requires_dist (list[Str]): The Requires-Dist from the METADATA of the whl
+                distribution.
+            extras (set[str], optional): The list of requested extras, defaults to None.
+            platforms (set[Platform], optional): The list of target platforms, defaults to
+                None. If the list of platforms has multiple `minor_version` values, it
+                will change the code to generate the select statements using
+                `@rules_python//python/config_settings:is_python_3.y` conditions.
+        """
+        self.name: str = Deps._normalize(name)
+        self._platforms: Set[Platform] = platforms or set()
+        self._target_versions = {p.minor_version for p in platforms or {}}
+        self._default_minor_version = None
+        if platforms and len(self._target_versions) > 2:
+            # TODO @aignas 2024-06-23: enable this to be set via a CLI arg
+            # for being more explicit.
+            self._default_minor_version = host_interpreter_minor_version()
+
+        if None in self._target_versions and len(self._target_versions) > 2:
+            raise ValueError(
+                f"all python versions need to be specified explicitly, got: {platforms}"
+            )
+
+        # Sort so that the dictionary order in the FrozenDeps is deterministic
+        # without the final sort because Python retains insertion order. That way
+        # the sorting by platform is limited within the Platform class itself and
+        # the unit-tests for the Deps can be simpler.
+        reqs = sorted(
+            (Requirement(wheel_req) for wheel_req in requires_dist),
+            key=lambda x: f"{x.name}:{sorted(x.extras)}",
+        )
+
+        want_extras = self._resolve_extras(reqs, extras)
+
+        # Then add all of the requirements in order
+        self._deps: Set[str] = set()
+        self._select: Dict[Platform, Set[str]] = defaultdict(set)
+        for req in reqs:
+            self._add_req(req, want_extras)
+
+    def _add(self, dep: str, platform: Optional[Platform]):
+        dep = Deps._normalize(dep)
+
+        # Self-edges are processed in _resolve_extras
+        if dep == self.name:
+            return
+
+        if not platform:
+            self._deps.add(dep)
+
+            # If the dep is in the platform-specific list, remove it from the select.
+            pop_keys = []
+            for p, deps in self._select.items():
+                if dep not in deps:
+                    continue
+
+                deps.remove(dep)
+                if not deps:
+                    pop_keys.append(p)
+
+            for p in pop_keys:
+                self._select.pop(p)
+            return
+
+        if dep in self._deps:
+            # If the dep is already in the main dependency list, no need to add it in the
+            # platform-specific dependency list.
+            return
+
+        # Add the platform-specific dep
+        self._select[platform].add(dep)
+
+        # Add the dep to specializations of the given platform if they
+        # exist in the select statement.
+        for p in platform.all_specializations():
+            if p not in self._select:
+                continue
+
+            self._select[p].add(dep)
+
+        if len(self._select[platform]) == 1:
+            # We are adding a new item to the select and we need to ensure that
+            # existing dependencies from less specialized platforms are propagated
+            # to the newly added dependency set.
+            for p, deps in self._select.items():
+                # Check if the existing platform overlaps with the given platform
+                if p == platform or platform not in p.all_specializations():
+                    continue
+
+                self._select[platform].update(self._select[p])
+
+    def _maybe_add_common_dep(self, dep):
+        if len(self._target_versions) < 2:
+            return
+
+        platforms = [Platform()] + [
+            Platform(minor_version=v) for v in self._target_versions
+        ]
+
+        # If the dep is targeting all target python versions, lets add it to
+        # the common dependency list to simplify the select statements.
+        for p in platforms:
+            if p not in self._select:
+                return
+
+            if dep not in self._select[p]:
+                return
+
+        # All of the python version-specific branches have the dep, so lets add
+        # it to the common deps.
+        self._deps.add(dep)
+        for p in platforms:
+            self._select[p].remove(dep)
+            if not self._select[p]:
+                self._select.pop(p)
+
+    @staticmethod
+    def _normalize(name: str) -> str:
+        return re.sub(r"[-_.]+", "_", name).lower()
+
+    def _resolve_extras(
+        self, reqs: List[Requirement], extras: Optional[Set[str]]
+    ) -> Set[str]:
+        """Resolve extras which are due to depending on self[some_other_extra].
+
+        Some packages may have cyclic dependencies resulting from extras being used, one example is
+        `etils`, where we have one set of extras as aliases for other extras
+        and we have an extra called 'all' that includes all other extras.
+
+        Example: github.com/google/etils/blob/a0b71032095db14acf6b33516bca6d885fe09e35/pyproject.toml#L32.
+
+        When the `requirements.txt` is generated by `pip-tools`, then it is likely that
+        this step is not needed, but for other `requirements.txt` files this may be useful.
+
+        NOTE @aignas 2023-12-08: the extra resolution is not platform dependent,
+        but in order for it to become platform dependent we would have to have
+        separate targets for each extra in extras.
+        """
+
+        # Resolve any extra extras due to self-edges, empty string means no
+        # extras The empty string in the set is just a way to make the handling
+        # of no extras and a single extra easier and having a set of {"", "foo"}
+        # is equivalent to having {"foo"}.
+        extras = extras or {""}
+
+        self_reqs = []
+        for req in reqs:
+            if Deps._normalize(req.name) != self.name:
+                continue
+
+            if req.marker is None:
+                # I am pretty sure we cannot reach this code as it does not
+                # make sense to specify packages in this way, but since it is
+                # easy to handle, lets do it.
+                #
+                # TODO @aignas 2023-12-08: add a test
+                extras = extras | req.extras
+            else:
+                # process these in a separate loop
+                self_reqs.append(req)
+
+        # A double loop is not strictly optimal, but always correct without recursion
+        for req in self_reqs:
+            if any(req.marker.evaluate({"extra": extra}) for extra in extras):
+                extras = extras | req.extras
+            else:
+                continue
+
+            # Iterate through all packages to ensure that we include all of the extras from previously
+            # visited packages.
+            for req_ in self_reqs:
+                if any(req_.marker.evaluate({"extra": extra}) for extra in extras):
+                    extras = extras | req_.extras
+
+        return extras
+
+    def _add_req(self, req: Requirement, extras: Set[str]) -> None:
+        if req.marker is None:
+            self._add(req.name, None)
+            return
+
+        marker_str = str(req.marker)
+
+        if not self._platforms:
+            if any(req.marker.evaluate({"extra": extra}) for extra in extras):
+                self._add(req.name, None)
+            return
+
+        # NOTE @aignas 2023-12-08: in order to have reasonable select statements
+        # we do have to have some parsing of the markers, so it begs the question
+        # if packaging should be reimplemented in Starlark to have the best solution
+        # for now we will implement it in Python and see what the best parsing result
+        # can be before making this decision.
+        match_os = any(
+            tag in marker_str
+            for tag in [
+                "os_name",
+                "sys_platform",
+                "platform_system",
+            ]
+        )
+        match_arch = "platform_machine" in marker_str
+        match_version = "version" in marker_str
+
+        if not (match_os or match_arch or match_version):
+            if any(req.marker.evaluate({"extra": extra}) for extra in extras):
+                self._add(req.name, None)
+            return
+
+        for plat in self._platforms:
+            if not any(
+                req.marker.evaluate(plat.env_markers(extra)) for extra in extras
+            ):
+                continue
+
+            if match_arch and self._default_minor_version:
+                self._add(req.name, plat)
+                if plat.minor_version == self._default_minor_version:
+                    self._add(req.name, Platform(plat.os, plat.arch))
+            elif match_arch:
+                self._add(req.name, Platform(plat.os, plat.arch))
+            elif match_os and self._default_minor_version:
+                self._add(req.name, Platform(plat.os, minor_version=plat.minor_version))
+                if plat.minor_version == self._default_minor_version:
+                    self._add(req.name, Platform(plat.os))
+            elif match_os:
+                self._add(req.name, Platform(plat.os))
+            elif match_version and self._default_minor_version:
+                self._add(req.name, Platform(minor_version=plat.minor_version))
+                if plat.minor_version == self._default_minor_version:
+                    self._add(req.name, Platform())
+            elif match_version:
+                self._add(req.name, None)
+
+        # Merge to common if possible after processing all platforms
+        self._maybe_add_common_dep(req.name)
+
+    def build(self) -> FrozenDeps:
+        return FrozenDeps(
+            deps=sorted(self._deps),
+            deps_select={str(p): sorted(deps) for p, deps in self._select.items()},
+        )
+
+
+class Wheel:
+    """Representation of the compressed .whl file"""
+
+    def __init__(self, path: Path):
+        self._path = path
+
+    @property
+    def path(self) -> Path:
+        return self._path
+
+    @property
+    def name(self) -> str:
+        # TODO Also available as installer.sources.WheelSource.distribution
+        name = str(self.metadata["Name"])
+        return canonicalize_name(name)
+
+    @property
+    def metadata(self) -> email.message.Message:
+        with installer.sources.WheelFile.open(self.path) as wheel_source:
+            metadata_contents = wheel_source.read_dist_info("METADATA")
+            metadata = installer.utils.parse_metadata_file(metadata_contents)
+        return metadata
+
+    @property
+    def version(self) -> str:
+        # TODO Also available as installer.sources.WheelSource.version
+        return str(self.metadata["Version"])
+
+    def entry_points(self) -> Dict[str, Tuple[str, str]]:
+        """Returns the entrypoints defined in the current wheel
+
+        See https://packaging.python.org/specifications/entry-points/ for more info
+
+        Returns:
+            Dict[str, Tuple[str, str]]: A mapping of the entry point's name to it's module and attribute
+        """
+        with installer.sources.WheelFile.open(self.path) as wheel_source:
+            if "entry_points.txt" not in wheel_source.dist_info_filenames:
+                return dict()
+
+            entry_points_mapping = dict()
+            entry_points_contents = wheel_source.read_dist_info("entry_points.txt")
+            entry_points = installer.utils.parse_entrypoints(entry_points_contents)
+            for script, module, attribute, script_section in entry_points:
+                if script_section == "console":
+                    entry_points_mapping[script] = (module, attribute)
+
+            return entry_points_mapping
+
+    def dependencies(
+        self,
+        extras_requested: Set[str] = None,
+        platforms: Optional[Set[Platform]] = None,
+    ) -> FrozenDeps:
+        return Deps(
+            self.name,
+            extras=extras_requested,
+            platforms=platforms,
+            requires_dist=self.metadata.get_all("Requires-Dist", []),
+        ).build()
+
+    def unzip(self, directory: str) -> None:
+        installation_schemes = {
+            "purelib": "/site-packages",
+            "platlib": "/site-packages",
+            "headers": "/include",
+            "scripts": "/bin",
+            "data": "/data",
+        }
+        destination = installer.destinations.SchemeDictionaryDestination(
+            installation_schemes,
+            # TODO Should entry_point scripts also be handled by installer rather than custom code?
+            interpreter="/dev/null",
+            script_kind="posix",
+            destdir=directory,
+            bytecode_optimization_levels=[],
+        )
+
+        with installer.sources.WheelFile.open(self.path) as wheel_source:
+            installer.install(
+                source=wheel_source,
+                destination=destination,
+                additional_metadata={
+                    "INSTALLER": b"https://github.com/bazelbuild/rules_python",
+                },
+            )
diff --git a/python/pip_install/tools/wheel_installer/wheel_installer.py b/python/private/pypi/whl_installer/wheel_installer.py
similarity index 98%
rename from python/pip_install/tools/wheel_installer/wheel_installer.py
rename to python/private/pypi/whl_installer/wheel_installer.py
index 801ef95..ef8181c 100644
--- a/python/pip_install/tools/wheel_installer/wheel_installer.py
+++ b/python/private/pypi/whl_installer/wheel_installer.py
@@ -27,7 +27,7 @@
 
 from pip._vendor.packaging.utils import canonicalize_name
 
-from python.pip_install.tools.wheel_installer import arguments, namespace_pkgs, wheel
+from python.private.pypi.whl_installer import arguments, namespace_pkgs, wheel
 
 
 def _configure_reproducible_wheels() -> None:
diff --git a/python/private/pypi/whl_library.bzl b/python/private/pypi/whl_library.bzl
new file mode 100644
index 0000000..2300eb3
--- /dev/null
+++ b/python/private/pypi/whl_library.bzl
@@ -0,0 +1,461 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("//python/private:auth.bzl", "AUTH_ATTRS", "get_auth")
+load("//python/private:envsubst.bzl", "envsubst")
+load("//python/private:python_repositories.bzl", "is_standalone_interpreter")
+load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "repo_utils")
+load(":attrs.bzl", "ATTRS", "use_isolated")
+load(":deps.bzl", "all_repo_names")
+load(":generate_whl_library_build_bazel.bzl", "generate_whl_library_build_bazel")
+load(":parse_whl_name.bzl", "parse_whl_name")
+load(":patch_whl.bzl", "patch_whl")
+load(":pypi_repo_utils.bzl", "pypi_repo_utils")
+load(":whl_target_platforms.bzl", "whl_target_platforms")
+
+_CPPFLAGS = "CPPFLAGS"
+_COMMAND_LINE_TOOLS_PATH_SLUG = "commandlinetools"
+_WHEEL_ENTRY_POINT_PREFIX = "rules_python_wheel_entry_point"
+
+def _get_xcode_location_cflags(rctx):
+    """Query the xcode sdk location to update cflags
+
+    Figure out if this interpreter target comes from rules_python, and patch the xcode sdk location if so.
+    Pip won't be able to compile c extensions from sdists with the pre built python distributions from indygreg
+    otherwise. See https://github.com/indygreg/python-build-standalone/issues/103
+    """
+
+    # Only run on MacOS hosts
+    if not rctx.os.name.lower().startswith("mac os"):
+        return []
+
+    xcode_sdk_location = repo_utils.execute_unchecked(
+        rctx,
+        op = "GetXcodeLocation",
+        arguments = [repo_utils.which_checked(rctx, "xcode-select"), "--print-path"],
+    )
+    if xcode_sdk_location.return_code != 0:
+        return []
+
+    xcode_root = xcode_sdk_location.stdout.strip()
+    if _COMMAND_LINE_TOOLS_PATH_SLUG not in xcode_root.lower():
+        # This is a full xcode installation somewhere like /Applications/Xcode13.0.app/Contents/Developer
+        # so we need to change the path to to the macos specific tools which are in a different relative
+        # path than xcode installed command line tools.
+        xcode_root = "{}/Platforms/MacOSX.platform/Developer".format(xcode_root)
+    return [
+        "-isysroot {}/SDKs/MacOSX.sdk".format(xcode_root),
+    ]
+
+def _get_toolchain_unix_cflags(rctx, python_interpreter, logger = None):
+    """Gather cflags from a standalone toolchain for unix systems.
+
+    Pip won't be able to compile c extensions from sdists with the pre built python distributions from indygreg
+    otherwise. See https://github.com/indygreg/python-build-standalone/issues/103
+    """
+
+    # Only run on Unix systems
+    if not rctx.os.name.lower().startswith(("mac os", "linux")):
+        return []
+
+    # Only update the location when using a standalone toolchain.
+    if not is_standalone_interpreter(rctx, python_interpreter, logger = logger):
+        return []
+
+    stdout = repo_utils.execute_checked_stdout(
+        rctx,
+        op = "GetPythonVersionForUnixCflags",
+        arguments = [
+            python_interpreter,
+            "-c",
+            "import sys; print(f'{sys.version_info[0]}.{sys.version_info[1]}', end='')",
+        ],
+    )
+    _python_version = stdout
+    include_path = "{}/include/python{}".format(
+        python_interpreter.dirname,
+        _python_version,
+    )
+
+    return ["-isystem {}".format(include_path)]
+
+def _parse_optional_attrs(rctx, args, extra_pip_args = None):
+    """Helper function to parse common attributes of pip_repository and whl_library repository rules.
+
+    This function also serializes the structured arguments as JSON
+    so they can be passed on the command line to subprocesses.
+
+    Args:
+        rctx: Handle to the rule repository context.
+        args: A list of parsed args for the rule.
+        extra_pip_args: The pip args to pass.
+    Returns: Augmented args list.
+    """
+
+    if use_isolated(rctx, rctx.attr):
+        args.append("--isolated")
+
+    # Bazel version 7.1.0 and later (and rolling releases from version 8.0.0-pre.20240128.3)
+    # support rctx.getenv(name, default): When building incrementally, any change to the value of
+    # the variable named by name will cause this repository to be re-fetched.
+    if "getenv" in dir(rctx):
+        getenv = rctx.getenv
+    else:
+        getenv = rctx.os.environ.get
+
+    # Check for None so we use empty default types from our attrs.
+    # Some args want to be list, and some want to be dict.
+    if extra_pip_args != None:
+        args += [
+            "--extra_pip_args",
+            json.encode(struct(arg = [
+                envsubst(pip_arg, rctx.attr.envsubst, getenv)
+                for pip_arg in extra_pip_args
+            ])),
+        ]
+
+    if rctx.attr.download_only:
+        args.append("--download_only")
+
+    if rctx.attr.pip_data_exclude != None:
+        args += [
+            "--pip_data_exclude",
+            json.encode(struct(arg = rctx.attr.pip_data_exclude)),
+        ]
+
+    if rctx.attr.enable_implicit_namespace_pkgs:
+        args.append("--enable_implicit_namespace_pkgs")
+
+    if rctx.attr.environment != None:
+        args += [
+            "--environment",
+            json.encode(struct(arg = rctx.attr.environment)),
+        ]
+
+    return args
+
+def _create_repository_execution_environment(rctx, python_interpreter, logger = None):
+    """Create a environment dictionary for processes we spawn with rctx.execute.
+
+    Args:
+        rctx (repository_ctx): The repository context.
+        python_interpreter (path): The resolved python interpreter.
+        logger: Optional logger to use for operations.
+    Returns:
+        Dictionary of environment variable suitable to pass to rctx.execute.
+    """
+
+    # Gather any available CPPFLAGS values
+    cppflags = []
+    cppflags.extend(_get_xcode_location_cflags(rctx))
+    cppflags.extend(_get_toolchain_unix_cflags(rctx, python_interpreter, logger = logger))
+
+    env = {
+        "PYTHONPATH": pypi_repo_utils.construct_pythonpath(
+            rctx,
+            entries = rctx.attr._python_path_entries,
+        ),
+        _CPPFLAGS: " ".join(cppflags),
+    }
+
+    return env
+
+def _whl_library_impl(rctx):
+    logger = repo_utils.logger(rctx)
+    python_interpreter = pypi_repo_utils.resolve_python_interpreter(
+        rctx,
+        python_interpreter = rctx.attr.python_interpreter,
+        python_interpreter_target = rctx.attr.python_interpreter_target,
+    )
+    args = [
+        python_interpreter,
+        "-m",
+        "python.private.pypi.whl_installer.wheel_installer",
+        "--requirement",
+        rctx.attr.requirement,
+    ]
+    extra_pip_args = []
+    extra_pip_args.extend(rctx.attr.extra_pip_args)
+
+    # Manually construct the PYTHONPATH since we cannot use the toolchain here
+    environment = _create_repository_execution_environment(rctx, python_interpreter, logger = logger)
+
+    whl_path = None
+    if rctx.attr.whl_file:
+        whl_path = rctx.path(rctx.attr.whl_file)
+
+        # Simulate the behaviour where the whl is present in the current directory.
+        rctx.symlink(whl_path, whl_path.basename)
+        whl_path = rctx.path(whl_path.basename)
+    elif rctx.attr.urls:
+        filename = rctx.attr.filename
+        urls = rctx.attr.urls
+        if not filename:
+            _, _, filename = urls[0].rpartition("/")
+
+        if not (filename.endswith(".whl") or filename.endswith("tar.gz") or filename.endswith(".zip")):
+            if rctx.attr.filename:
+                msg = "got '{}'".format(filename)
+            else:
+                msg = "detected '{}' from url:\n{}".format(filename, urls[0])
+            fail("Only '.whl', '.tar.gz' or '.zip' files are supported, {}".format(msg))
+
+        result = rctx.download(
+            url = urls,
+            output = filename,
+            sha256 = rctx.attr.sha256,
+            auth = get_auth(rctx, urls),
+        )
+
+        if not result.success:
+            fail("could not download the '{}' from {}:\n{}".format(filename, urls, result))
+
+        if filename.endswith(".whl"):
+            whl_path = rctx.path(rctx.attr.filename)
+        else:
+            # It is an sdist and we need to tell PyPI to use a file in this directory
+            # and not use any indexes.
+            extra_pip_args.extend(["--no-index", "--find-links", "."])
+
+    args = _parse_optional_attrs(rctx, args, extra_pip_args)
+
+    if not whl_path:
+        if rctx.attr.urls:
+            op_tmpl = "whl_library.BuildWheelFromSource({name}, {requirement})"
+        elif rctx.attr.download_only:
+            op_tmpl = "whl_library.DownloadWheel({name}, {requirement})"
+        else:
+            op_tmpl = "whl_library.ResolveRequirement({name}, {requirement})"
+
+        repo_utils.execute_checked(
+            rctx,
+            op = op_tmpl.format(name = rctx.attr.name, requirement = rctx.attr.requirement),
+            arguments = args,
+            environment = environment,
+            quiet = rctx.attr.quiet,
+            timeout = rctx.attr.timeout,
+            logger = logger,
+        )
+
+        whl_path = rctx.path(json.decode(rctx.read("whl_file.json"))["whl_file"])
+        if not rctx.delete("whl_file.json"):
+            fail("failed to delete the whl_file.json file")
+
+    if rctx.attr.whl_patches:
+        patches = {}
+        for patch_file, json_args in rctx.attr.whl_patches.items():
+            patch_dst = struct(**json.decode(json_args))
+            if whl_path.basename in patch_dst.whls:
+                patches[patch_file] = patch_dst.patch_strip
+
+        whl_path = patch_whl(
+            rctx,
+            op = "whl_library.PatchWhl({}, {})".format(rctx.attr.name, rctx.attr.requirement),
+            python_interpreter = python_interpreter,
+            whl_path = whl_path,
+            patches = patches,
+            quiet = rctx.attr.quiet,
+            timeout = rctx.attr.timeout,
+        )
+
+    target_platforms = rctx.attr.experimental_target_platforms
+    if target_platforms:
+        parsed_whl = parse_whl_name(whl_path.basename)
+        if parsed_whl.platform_tag != "any":
+            # NOTE @aignas 2023-12-04: if the wheel is a platform specific
+            # wheel, we only include deps for that target platform
+            target_platforms = [
+                p.target_platform
+                for p in whl_target_platforms(
+                    platform_tag = parsed_whl.platform_tag,
+                    abi_tag = parsed_whl.abi_tag,
+                )
+            ]
+
+    repo_utils.execute_checked(
+        rctx,
+        op = "whl_library.ExtractWheel({}, {})".format(rctx.attr.name, whl_path),
+        arguments = args + [
+            "--whl-file",
+            whl_path,
+        ] + ["--platform={}".format(p) for p in target_platforms],
+        environment = environment,
+        quiet = rctx.attr.quiet,
+        timeout = rctx.attr.timeout,
+        logger = logger,
+    )
+
+    metadata = json.decode(rctx.read("metadata.json"))
+    rctx.delete("metadata.json")
+
+    # NOTE @aignas 2024-06-22: this has to live on until we stop supporting
+    # passing `twine` as a `:pkg` library via the `WORKSPACE` builds.
+    #
+    # See ../../packaging.bzl line 190
+    entry_points = {}
+    for item in metadata["entry_points"]:
+        name = item["name"]
+        module = item["module"]
+        attribute = item["attribute"]
+
+        # There is an extreme edge-case with entry_points that end with `.py`
+        # See: https://github.com/bazelbuild/bazel/blob/09c621e4cf5b968f4c6cdf905ab142d5961f9ddc/src/test/java/com/google/devtools/build/lib/rules/python/PyBinaryConfiguredTargetTest.java#L174
+        entry_point_without_py = name[:-3] + "_py" if name.endswith(".py") else name
+        entry_point_target_name = (
+            _WHEEL_ENTRY_POINT_PREFIX + "_" + entry_point_without_py
+        )
+        entry_point_script_name = entry_point_target_name + ".py"
+
+        rctx.file(
+            entry_point_script_name,
+            _generate_entry_point_contents(module, attribute),
+        )
+        entry_points[entry_point_without_py] = entry_point_script_name
+
+    build_file_contents = generate_whl_library_build_bazel(
+        dep_template = rctx.attr.dep_template or "@{}{{name}}//:{{target}}".format(rctx.attr.repo_prefix),
+        whl_name = whl_path.basename,
+        dependencies = metadata["deps"],
+        dependencies_by_platform = metadata["deps_by_platform"],
+        group_name = rctx.attr.group_name,
+        group_deps = rctx.attr.group_deps,
+        data_exclude = rctx.attr.pip_data_exclude,
+        tags = [
+            "pypi_name=" + metadata["name"],
+            "pypi_version=" + metadata["version"],
+        ],
+        entry_points = entry_points,
+        annotation = None if not rctx.attr.annotation else struct(**json.decode(rctx.read(rctx.attr.annotation))),
+    )
+    rctx.file("BUILD.bazel", build_file_contents)
+
+    return
+
+def _generate_entry_point_contents(
+        module,
+        attribute,
+        shebang = "#!/usr/bin/env python3"):
+    """Generate the contents of an entry point script.
+
+    Args:
+        module (str): The name of the module to use.
+        attribute (str): The name of the attribute to call.
+        shebang (str, optional): The shebang to use for the entry point python
+            file.
+
+    Returns:
+        str: A string of python code.
+    """
+    contents = """\
+{shebang}
+import sys
+from {module} import {attribute}
+if __name__ == "__main__":
+    sys.exit({attribute}())
+""".format(
+        shebang = shebang,
+        module = module,
+        attribute = attribute,
+    )
+    return contents
+
+# NOTE @aignas 2024-03-21: The usage of dict({}, **common) ensures that all args to `dict` are unique
+whl_library_attrs = dict({
+    "annotation": attr.label(
+        doc = (
+            "Optional json encoded file containing annotation to apply to the extracted wheel. " +
+            "See `package_annotation`"
+        ),
+        allow_files = True,
+    ),
+    "dep_template": attr.string(
+        doc = """
+The dep template to use for referencing the dependencies. It should have `{name}`
+and `{target}` tokens that will be replaced with the normalized distribution name
+and the target that we need respectively.
+""",
+    ),
+    "filename": attr.string(
+        doc = "Download the whl file to this filename. Only used when the `urls` is passed. If not specified, will be auto-detected from the `urls`.",
+    ),
+    "group_deps": attr.string_list(
+        doc = "List of dependencies to skip in order to break the cycles within a dependency group.",
+        default = [],
+    ),
+    "group_name": attr.string(
+        doc = "Name of the group, if any.",
+    ),
+    "repo": attr.string(
+        mandatory = True,
+        doc = "Pointer to parent repo name. Used to make these rules rerun if the parent repo changes.",
+    ),
+    "repo_prefix": attr.string(
+        doc = """
+Prefix for the generated packages will be of the form `@<prefix><sanitized-package-name>//...`
+
+DEPRECATED. Only left for people who vendor requirements.bzl.
+""",
+    ),
+    "requirement": attr.string(
+        mandatory = True,
+        doc = "Python requirement string describing the package to make available, if 'urls' or 'whl_file' is given, then this only needs to include foo[any_extras] as a bare minimum.",
+    ),
+    "sha256": attr.string(
+        doc = "The sha256 of the downloaded whl. Only used when the `urls` is passed.",
+    ),
+    "urls": attr.string_list(
+        doc = """\
+The list of urls of the whl to be downloaded using bazel downloader. Using this
+attr makes `extra_pip_args` and `download_only` ignored.""",
+    ),
+    "whl_file": attr.label(
+        doc = "The whl file that should be used instead of downloading or building the whl.",
+    ),
+    "whl_patches": attr.label_keyed_string_dict(
+        doc = """a label-keyed-string dict that has
+            json.encode(struct([whl_file], patch_strip]) as values. This
+            is to maintain flexibility and correct bzlmod extension interface
+            until we have a better way to define whl_library and move whl
+            patching to a separate place. INTERNAL USE ONLY.""",
+    ),
+    "_python_path_entries": attr.label_list(
+        # Get the root directory of these rules and keep them as a default attribute
+        # in order to avoid unnecessary repository fetching restarts.
+        #
+        # This is very similar to what was done in https://github.com/bazelbuild/rules_go/pull/3478
+        default = [
+            Label("//:BUILD.bazel"),
+        ] + [
+            # Includes all the external dependencies from repositories.bzl
+            Label("@" + repo + "//:BUILD.bazel")
+            for repo in all_repo_names
+        ],
+    ),
+    "_rule_name": attr.string(default = "whl_library"),
+}, **ATTRS)
+whl_library_attrs.update(AUTH_ATTRS)
+
+whl_library = repository_rule(
+    attrs = whl_library_attrs,
+    doc = """
+Download and extracts a single wheel based into a bazel repo based on the requirement string passed in.
+Instantiated from pip_repository and inherits config options from there.""",
+    implementation = _whl_library_impl,
+    environ = [
+        "RULES_PYTHON_PIP_ISOLATED",
+        REPO_DEBUG_ENV_VAR,
+    ],
+)
diff --git a/python/private/pypi/whl_library_alias.bzl b/python/private/pypi/whl_library_alias.bzl
new file mode 100644
index 0000000..263d7ec
--- /dev/null
+++ b/python/private/pypi/whl_library_alias.bzl
@@ -0,0 +1,99 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""whl_library aliases for multi_pip_parse."""
+
+load("//python/private:full_version.bzl", "full_version")
+load(":render_pkg_aliases.bzl", "NO_MATCH_ERROR_MESSAGE_TEMPLATE")
+
+def _whl_library_alias_impl(rctx):
+    rules_python = rctx.attr._rules_python_workspace.workspace_name
+    if rctx.attr.default_version:
+        default_repo_prefix = rctx.attr.version_map[rctx.attr.default_version]
+    else:
+        default_repo_prefix = None
+    version_map = rctx.attr.version_map.items()
+    build_content = ["# Generated by python/pip.bzl"]
+    for alias_name in ["pkg", "whl", "data", "dist_info"]:
+        build_content.append(_whl_library_render_alias_target(
+            alias_name = alias_name,
+            default_repo_prefix = default_repo_prefix,
+            rules_python = rules_python,
+            version_map = version_map,
+            wheel_name = rctx.attr.wheel_name,
+        ))
+    rctx.file("BUILD.bazel", "\n".join(build_content))
+
+def _whl_library_render_alias_target(
+        alias_name,
+        default_repo_prefix,
+        rules_python,
+        version_map,
+        wheel_name):
+    alias = ["""\
+alias(
+    name = "{alias_name}",
+    actual = select({{""".format(alias_name = alias_name)]
+    for [python_version, repo_prefix] in version_map:
+        alias.append("""\
+        "@{rules_python}//python/config_settings:is_python_{full_python_version}": "{actual}",""".format(
+            full_python_version = full_version(python_version),
+            actual = "@{repo_prefix}{wheel_name}//:{alias_name}".format(
+                repo_prefix = repo_prefix,
+                wheel_name = wheel_name,
+                alias_name = alias_name,
+            ),
+            rules_python = rules_python,
+        ))
+    if default_repo_prefix:
+        default_actual = "@{repo_prefix}{wheel_name}//:{alias_name}".format(
+            repo_prefix = default_repo_prefix,
+            wheel_name = wheel_name,
+            alias_name = alias_name,
+        )
+        alias.append('        "//conditions:default": "{default_actual}",'.format(
+            default_actual = default_actual,
+        ))
+
+    alias.append("    },")  # Close select expression condition dict
+    if not default_repo_prefix:
+        supported_versions = sorted([python_version for python_version, _ in version_map])
+        alias.append('    no_match_error="""{}""",'.format(
+            NO_MATCH_ERROR_MESSAGE_TEMPLATE.format(
+                supported_versions = ", ".join(supported_versions),
+                rules_python = rules_python,
+            ),
+        ))
+    alias.append("    ),")  # Close the select expression
+    alias.append('    visibility = ["//visibility:public"],')
+    alias.append(")")  # Close the alias() expression
+    return "\n".join(alias)
+
+whl_library_alias = repository_rule(
+    _whl_library_alias_impl,
+    attrs = {
+        "default_version": attr.string(
+            mandatory = False,
+            doc = "Optional Python version in major.minor format, e.g. '3.10'." +
+                  "The Python version of the wheel to use when the versions " +
+                  "from `version_map` don't match. This allows the default " +
+                  "(version unaware) rules to match and select a wheel. If " +
+                  "not specified, then the default rules won't be able to " +
+                  "resolve a wheel and an error will occur.",
+        ),
+        "version_map": attr.string_dict(mandatory = True),
+        "wheel_name": attr.string(mandatory = True),
+        "_rules_python_workspace": attr.label(default = Label("//:WORKSPACE")),
+    },
+)
diff --git a/python/private/pypi/whl_repo_name.bzl b/python/private/pypi/whl_repo_name.bzl
new file mode 100644
index 0000000..295f5a4
--- /dev/null
+++ b/python/private/pypi/whl_repo_name.bzl
@@ -0,0 +1,52 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A function to convert a dist name to a valid bazel repo name.
+"""
+
+load("//python/private:normalize_name.bzl", "normalize_name")
+load(":parse_whl_name.bzl", "parse_whl_name")
+
+def whl_repo_name(prefix, filename, sha256):
+    """Return a valid whl_library repo name given a distribution filename.
+
+    Args:
+        prefix: str, the prefix of the whl_library.
+        filename: str, the filename of the distribution.
+        sha256: str, the sha256 of the distribution.
+
+    Returns:
+        a string that can be used in `whl_library`.
+    """
+    parts = [prefix]
+
+    if not filename.endswith(".whl"):
+        # Then the filename is basically foo-3.2.1.<ext>
+        parts.append(normalize_name(filename.rpartition("-")[0]))
+        parts.append("sdist")
+    else:
+        parsed = parse_whl_name(filename)
+        name = normalize_name(parsed.distribution)
+        python_tag, _, _ = parsed.python_tag.partition(".")
+        abi_tag, _, _ = parsed.abi_tag.partition(".")
+        platform_tag, _, _ = parsed.platform_tag.partition(".")
+
+        parts.append(name)
+        parts.append(python_tag)
+        parts.append(abi_tag)
+        parts.append(platform_tag)
+
+    parts.append(sha256[:8])
+
+    return "_".join(parts)
diff --git a/python/private/pypi/whl_target_platforms.bzl b/python/private/pypi/whl_target_platforms.bzl
new file mode 100644
index 0000000..bdc44c6
--- /dev/null
+++ b/python/private/pypi/whl_target_platforms.bzl
@@ -0,0 +1,241 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+A starlark implementation of the wheel platform tag parsing to get the target platform.
+"""
+
+load(":parse_whl_name.bzl", "parse_whl_name")
+
+# The order of the dictionaries is to keep definitions with their aliases next to each
+# other
+_CPU_ALIASES = {
+    "x86_32": "x86_32",
+    "i386": "x86_32",
+    "i686": "x86_32",
+    "x86": "x86_32",
+    "x86_64": "x86_64",
+    "amd64": "x86_64",
+    "aarch64": "aarch64",
+    "arm64": "aarch64",
+    "ppc": "ppc",
+    "ppc64": "ppc",
+    "ppc64le": "ppc",
+    "s390x": "s390x",
+    "arm": "arm",
+    "armv6l": "arm",
+    "armv7l": "arm",
+}  # buildifier: disable=unsorted-dict-items
+
+_OS_PREFIXES = {
+    "linux": "linux",
+    "manylinux": "linux",
+    "musllinux": "linux",
+    "macos": "osx",
+    "win": "windows",
+}  # buildifier: disable=unsorted-dict-items
+
+def select_whls(*, whls, want_platforms = [], logger = None):
+    """Select a subset of wheels suitable for target platforms from a list.
+
+    Args:
+        whls(list[struct]): A list of candidates which have a `filename`
+            attribute containing the `whl` filename.
+        want_platforms(str): The platforms in "{abi}_{os}_{cpu}" or "{os}_{cpu}" format.
+        logger: A logger for printing diagnostic messages.
+
+    Returns:
+        A filtered list of items from the `whls` arg where `filename` matches
+        the selected criteria. If no match is found, an empty list is returned.
+    """
+    if not whls:
+        return []
+
+    want_abis = {
+        "abi3": None,
+        "none": None,
+    }
+
+    _want_platforms = {}
+    version_limit = None
+
+    for p in want_platforms:
+        if not p.startswith("cp3"):
+            fail("expected all platforms to start with ABI, but got: {}".format(p))
+
+        abi, _, os_cpu = p.partition("_")
+        _want_platforms[os_cpu] = None
+        _want_platforms[p] = None
+
+        version_limit_candidate = int(abi[3:])
+        if not version_limit:
+            version_limit = version_limit_candidate
+        if version_limit and version_limit != version_limit_candidate:
+            fail("Only a single python version is supported for now")
+
+        # For some legacy implementations the wheels may target the `cp3xm` ABI
+        _want_platforms["{}m_{}".format(abi, os_cpu)] = None
+        want_abis[abi] = None
+        want_abis[abi + "m"] = None
+
+    want_platforms = sorted(_want_platforms)
+
+    candidates = {}
+    for whl in whls:
+        parsed = parse_whl_name(whl.filename)
+
+        if logger:
+            logger.trace(lambda: "Deciding whether to use '{}'".format(whl.filename))
+
+        supported_implementations = {}
+        whl_version_min = 0
+        for tag in parsed.python_tag.split("."):
+            supported_implementations[tag[:2]] = None
+
+            if tag.startswith("cp3") or tag.startswith("py3"):
+                version = int(tag[len("..3"):] or 0)
+            else:
+                # In this case it should be eithor "cp2" or "py2" and we will default
+                # to `whl_version_min` = 0
+                continue
+
+            if whl_version_min == 0 or version < whl_version_min:
+                whl_version_min = version
+
+        if not ("cp" in supported_implementations or "py" in supported_implementations):
+            if logger:
+                logger.trace(lambda: "Discarding the whl because the whl does not support CPython, whl supported implementations are: {}".format(supported_implementations))
+            continue
+
+        if want_abis and parsed.abi_tag not in want_abis:
+            # Filter out incompatible ABIs
+            if logger:
+                logger.trace(lambda: "Discarding the whl because the whl abi did not match")
+            continue
+
+        if whl_version_min > version_limit:
+            if logger:
+                logger.trace(lambda: "Discarding the whl because the whl supported python version is too high")
+            continue
+
+        compatible = False
+        if parsed.platform_tag == "any":
+            compatible = True
+        else:
+            for p in whl_target_platforms(parsed.platform_tag, abi_tag = parsed.abi_tag.strip("m") if parsed.abi_tag.startswith("cp") else None):
+                if p.target_platform in want_platforms:
+                    compatible = True
+                    break
+
+        if not compatible:
+            if logger:
+                logger.trace(lambda: "Discarding the whl because the whl does not support the desired platforms: {}".format(want_platforms))
+            continue
+
+        for implementation in supported_implementations:
+            candidates.setdefault(
+                (
+                    parsed.abi_tag,
+                    parsed.platform_tag,
+                ),
+                {},
+            ).setdefault(
+                (
+                    # prefer cp implementation
+                    implementation == "cp",
+                    # prefer higher versions
+                    whl_version_min,
+                    # prefer abi3 over none
+                    parsed.abi_tag != "none",
+                    # prefer cpx abi over abi3
+                    parsed.abi_tag != "abi3",
+                ),
+                [],
+            ).append(whl)
+
+    return [
+        candidates[key][sorted(v)[-1]][-1]
+        for key, v in candidates.items()
+    ]
+
+def whl_target_platforms(platform_tag, abi_tag = ""):
+    """Parse the wheel abi and platform tags and return (os, cpu) tuples.
+
+    Args:
+        platform_tag (str): The platform_tag part of the wheel name. See
+            ./parse_whl_name.bzl for more details.
+        abi_tag (str): The abi tag that should be used for parsing.
+
+    Returns:
+        A list of structs, with attributes:
+        * os: str, one of the _OS_PREFIXES values
+        * cpu: str, one of the _CPU_PREFIXES values
+        * abi: str, the ABI that the interpreter should have if it is passed.
+        * target_platform: str, the target_platform that can be given to the
+          wheel_installer for parsing whl METADATA.
+    """
+    cpus = _cpu_from_tag(platform_tag)
+
+    abi = None
+    if abi_tag not in ["", "none", "abi3"]:
+        abi = abi_tag
+
+    # TODO @aignas 2024-05-29: this code is present in many places, I think
+    _, _, tail = platform_tag.partition("_")
+    maybe_arch = tail
+    major, _, tail = tail.partition("_")
+    minor, _, tail = tail.partition("_")
+    if not tail or not major.isdigit() or not minor.isdigit():
+        tail = maybe_arch
+        major = 0
+        minor = 0
+
+    for prefix, os in _OS_PREFIXES.items():
+        if platform_tag.startswith(prefix):
+            return [
+                struct(
+                    os = os,
+                    cpu = cpu,
+                    abi = abi,
+                    version = (int(major), int(minor)),
+                    target_platform = "_".join([abi, os, cpu] if abi else [os, cpu]),
+                )
+                for cpu in cpus
+            ]
+
+    print("WARNING: ignoring unknown platform_tag os: {}".format(platform_tag))  # buildifier: disable=print
+    return []
+
+def _cpu_from_tag(tag):
+    candidate = [
+        cpu
+        for input, cpu in _CPU_ALIASES.items()
+        if tag.endswith(input)
+    ]
+    if candidate:
+        return candidate
+
+    if tag == "win32":
+        return ["x86_32"]
+    elif tag == "win_ia64":
+        return []
+    elif tag.startswith("macosx"):
+        if tag.endswith("universal2"):
+            return ["x86_64", "aarch64"]
+        elif tag.endswith("universal"):
+            return ["x86_64", "aarch64"]
+        elif tag.endswith("intel"):
+            return ["x86_32"]
+
+    return []
diff --git a/python/private/bzlmod/python.bzl b/python/private/python.bzl
similarity index 63%
rename from python/private/bzlmod/python.bzl
rename to python/private/python.bzl
index 3b59d5b..6a265d1 100644
--- a/python/private/bzlmod/python.bzl
+++ b/python/private/python.bzl
@@ -14,43 +14,28 @@
 
 "Python toolchain module extensions for use with bzlmod"
 
+load("@bazel_features//:features.bzl", "bazel_features")
 load("//python:repositories.bzl", "python_register_toolchains")
-load("//python/private:toolchains_repo.bzl", "multi_toolchain_aliases")
+load("//python:versions.bzl", "TOOL_VERSIONS")
+load("//python/private:repo_utils.bzl", "repo_utils")
 load(":pythons_hub.bzl", "hub_repo")
+load(":text_util.bzl", "render")
+load(":toolchains_repo.bzl", "multi_toolchain_aliases")
+load(":util.bzl", "IS_BAZEL_6_4_OR_HIGHER")
 
 # This limit can be increased essentially arbitrarily, but doing so will cause a rebuild of all
 # targets using any of these toolchains due to the changed repository name.
 _MAX_NUM_TOOLCHAINS = 9999
 _TOOLCHAIN_INDEX_PAD_LENGTH = len(str(_MAX_NUM_TOOLCHAINS))
 
-def _toolchain_prefix(index, name):
-    """Prefixes the given name with the index, padded with zeros to ensure lexicographic sorting.
-
-    Examples:
-      _toolchain_prefix(   2, "foo") == "_0002_foo_"
-      _toolchain_prefix(2000, "foo") == "_2000_foo_"
-    """
-    return "_{}_{}_".format(_left_pad_zero(index, _TOOLCHAIN_INDEX_PAD_LENGTH), name)
-
-def _left_pad_zero(index, length):
-    if index < 0:
-        fail("index must be non-negative")
-    return ("0" * length + str(index))[-length:]
-
-# Printing a warning msg not debugging, so we have to disable
-# the buildifier check.
-# buildifier: disable=print
-def _print_warn(msg):
-    print("WARNING:", msg)
-
-def _python_register_toolchains(name, toolchain_attr, module):
+def _python_register_toolchains(name, toolchain_attr, module, ignore_root_user_error):
     """Calls python_register_toolchains and returns a struct used to collect the toolchains.
     """
     python_register_toolchains(
         name = name,
         python_version = toolchain_attr.python_version,
         register_coverage_tool = toolchain_attr.configure_coverage_tool,
-        ignore_root_user_error = toolchain_attr.ignore_root_user_error,
+        ignore_root_user_error = ignore_root_user_error,
     )
     return struct(
         python_version = toolchain_attr.python_version,
@@ -59,6 +44,13 @@
     )
 
 def _python_impl(module_ctx):
+    if module_ctx.os.environ.get("RULES_PYTHON_BZLMOD_DEBUG", "0") == "1":
+        debug_info = {
+            "toolchains_registered": [],
+        }
+    else:
+        debug_info = None
+
     # The toolchain_info structs to register, in the order to register them in.
     # NOTE: The last element is special: it is treated as the default toolchain,
     # so there is special handling to ensure the last entry is the correct one.
@@ -72,10 +64,21 @@
     # Map of string Major.Minor to the toolchain_info struct
     global_toolchain_versions = {}
 
+    ignore_root_user_error = None
+
+    logger = repo_utils.logger(module_ctx, "python")
+
+    # if the root module does not register any toolchain then the
+    # ignore_root_user_error takes its default value: False
+    if not module_ctx.modules[0].tags.toolchain:
+        ignore_root_user_error = False
+
     for mod in module_ctx.modules:
         module_toolchain_versions = []
 
-        for toolchain_attr in mod.tags.toolchain:
+        toolchain_attr_structs = _create_toolchain_attr_structs(mod)
+
+        for toolchain_attr in toolchain_attr_structs:
             toolchain_version = toolchain_attr.python_version
             toolchain_name = "python_" + toolchain_version.replace(".", "_")
 
@@ -84,16 +87,25 @@
                 _fail_duplicate_module_toolchain_version(toolchain_version, mod.name)
             module_toolchain_versions.append(toolchain_version)
 
-            # Only the root module and rules_python are allowed to specify the default
-            # toolchain for a couple reasons:
-            # * It prevents submodules from specifying different defaults and only
-            #   one of them winning.
-            # * rules_python needs to set a soft default in case the root module doesn't,
-            #   e.g. if the root module doesn't use Python itself.
-            # * The root module is allowed to override the rules_python default.
             if mod.is_root:
-                # A single toolchain is treated as the default because it's unambiguous.
-                is_default = toolchain_attr.is_default or len(mod.tags.toolchain) == 1
+                # Only the root module and rules_python are allowed to specify the default
+                # toolchain for a couple reasons:
+                # * It prevents submodules from specifying different defaults and only
+                #   one of them winning.
+                # * rules_python needs to set a soft default in case the root module doesn't,
+                #   e.g. if the root module doesn't use Python itself.
+                # * The root module is allowed to override the rules_python default.
+                is_default = toolchain_attr.is_default
+
+                # Also only the root module should be able to decide ignore_root_user_error.
+                # Modules being depended upon don't know the final environment, so they aren't
+                # in the right position to know or decide what the correct setting is.
+
+                # If an inconsistency in the ignore_root_user_error among multiple toolchains is detected, fail.
+                if ignore_root_user_error != None and toolchain_attr.ignore_root_user_error != ignore_root_user_error:
+                    fail("Toolchains in the root module must have consistent 'ignore_root_user_error' attributes")
+
+                ignore_root_user_error = toolchain_attr.ignore_root_user_error
             elif mod.name == "rules_python" and not default_toolchain:
                 # We don't do the len() check because we want the default that rules_python
                 # sets to be clearly visible.
@@ -116,11 +128,14 @@
                 # version that rules_python provides as default.
                 first = global_toolchain_versions[toolchain_version]
                 if mod.name != "rules_python" or not first.module.is_root:
+                    # The warning can be enabled by setting the verbosity:
+                    # env RULES_PYTHON_REPO_DEBUG_VERBOSITY=INFO bazel build //...
                     _warn_duplicate_global_toolchain_version(
                         toolchain_version,
                         first = first,
                         second_toolchain_name = toolchain_name,
                         second_module_name = mod.name,
+                        logger = logger,
                     )
                 toolchain_info = None
             else:
@@ -128,8 +143,14 @@
                     toolchain_name,
                     toolchain_attr,
                     module = mod,
+                    ignore_root_user_error = ignore_root_user_error,
                 )
                 global_toolchain_versions[toolchain_version] = toolchain_info
+                if debug_info:
+                    debug_info["toolchains_registered"].append({
+                        "ignore_root_user_error": ignore_root_user_error,
+                        "name": toolchain_name,
+                    })
 
             if is_default:
                 # This toolchain is setting the default, but the actual
@@ -169,7 +190,7 @@
         name = "pythons_hub",
         default_python_version = default_toolchain.python_version,
         toolchain_prefixes = [
-            _toolchain_prefix(index, toolchain.name)
+            render.toolchain_prefix(index, toolchain.name, _TOOLCHAIN_INDEX_PAD_LENGTH)
             for index, toolchain in enumerate(toolchains)
         ],
         toolchain_python_versions = [t.python_version for t in toolchains],
@@ -192,6 +213,17 @@
         },
     )
 
+    if debug_info != None:
+        _debug_repo(
+            name = "rules_python_bzlmod_debug",
+            debug_info = json.encode_indent(debug_info),
+        )
+
+    if bazel_features.external_deps.extension_metadata_has_reproducible:
+        return module_ctx.extension_metadata(reproducible = True)
+    else:
+        return None
+
 def _fail_duplicate_module_toolchain_version(version, module):
     fail(("Duplicate module toolchain version: module '{module}' attempted " +
           "to use version '{version}' multiple times in itself").format(
@@ -199,11 +231,11 @@
         module = module,
     ))
 
-def _warn_duplicate_global_toolchain_version(version, first, second_toolchain_name, second_module_name):
-    _print_warn((
+def _warn_duplicate_global_toolchain_version(version, first, second_toolchain_name, second_module_name, logger):
+    logger.info(lambda: (
         "Ignoring toolchain '{second_toolchain}' from module '{second_module}': " +
         "Toolchain '{first_toolchain}' from module '{first_module}' " +
-        "already registered Python version {version} and has precedence"
+        "already registered Python version {version} and has precedence."
     ).format(
         first_toolchain = first.name,
         first_module = first.module.name,
@@ -220,11 +252,61 @@
         second = second,
     ))
 
+def _create_toolchain_attr_structs(mod):
+    arg_structs = []
+    seen_versions = {}
+    for tag in mod.tags.toolchain:
+        arg_structs.append(_create_toolchain_attrs_struct(tag = tag, toolchain_tag_count = len(mod.tags.toolchain)))
+        seen_versions[tag.python_version] = True
+
+    if mod.is_root:
+        register_all = False
+        for tag in mod.tags.rules_python_private_testing:
+            if tag.register_all_versions:
+                register_all = True
+                break
+        if register_all:
+            arg_structs.extend([
+                _create_toolchain_attrs_struct(python_version = v)
+                for v in TOOL_VERSIONS.keys()
+                if v not in seen_versions
+            ])
+    return arg_structs
+
+def _create_toolchain_attrs_struct(*, tag = None, python_version = None, toolchain_tag_count = None):
+    if tag and python_version:
+        fail("Only one of tag and python version can be specified")
+    if tag:
+        # A single toolchain is treated as the default because it's unambiguous.
+        is_default = tag.is_default or toolchain_tag_count == 1
+    else:
+        is_default = False
+
+    return struct(
+        is_default = is_default,
+        python_version = python_version if python_version else tag.python_version,
+        configure_coverage_tool = getattr(tag, "configure_coverage_tool", False),
+        ignore_root_user_error = getattr(tag, "ignore_root_user_error", False),
+    )
+
+def _get_bazel_version_specific_kwargs():
+    kwargs = {}
+
+    if IS_BAZEL_6_4_OR_HIGHER:
+        kwargs["environ"] = ["RULES_PYTHON_BZLMOD_DEBUG"]
+
+    return kwargs
+
 python = module_extension(
     doc = """Bzlmod extension that is used to register Python toolchains.
 """,
     implementation = _python_impl,
     tag_classes = {
+        "rules_python_private_testing": tag_class(
+            attrs = {
+                "register_all_versions": attr.bool(default = False),
+            },
+        ),
         "toolchain": tag_class(
             doc = """Tag class used to register Python toolchains.
 Use this tag class to register one or more Python toolchains. This class
@@ -263,7 +345,16 @@
                 ),
                 "ignore_root_user_error": attr.bool(
                     default = False,
-                    doc = "Whether the check for root should be ignored or not. This causes cache misses with .pyc files.",
+                    doc = """\
+If False, the Python runtime installation will be made read only. This improves
+the ability for Bazel to cache it, but prevents the interpreter from creating
+pyc files for the standard library dynamically at runtime as they are loaded.
+
+If True, the Python runtime installation is read-write. This allows the
+interpreter to create pyc files for the standard library, but, because they are
+created as needed, it adversely affects Bazel's ability to cache the runtime and
+can result in spurious build failures.
+""",
                     mandatory = False,
                 ),
                 "is_default": attr.bool(
@@ -279,4 +370,23 @@
             },
         ),
     },
+    **_get_bazel_version_specific_kwargs()
+)
+
+_DEBUG_BUILD_CONTENT = """
+package(
+    default_visibility = ["//visibility:public"],
+)
+exports_files(["debug_info.json"])
+"""
+
+def _debug_repo_impl(repo_ctx):
+    repo_ctx.file("BUILD.bazel", _DEBUG_BUILD_CONTENT)
+    repo_ctx.file("debug_info.json", repo_ctx.attr.debug_info)
+
+_debug_repo = repository_rule(
+    implementation = _debug_repo_impl,
+    attrs = {
+        "debug_info": attr.string(),
+    },
 )
diff --git a/python/private/python_bootstrap_template.txt b/python/private/python_bootstrap_template.txt
index 92dd6b8..0f9c90b 100644
--- a/python/private/python_bootstrap_template.txt
+++ b/python/private/python_bootstrap_template.txt
@@ -20,6 +20,7 @@
 
 import os
 import subprocess
+import uuid
 
 def IsRunningFromZip():
   return %is_zipfile%
@@ -88,11 +89,19 @@
   """Finds the real Python binary if it's not a normal absolute path."""
   return FindBinary(module_space, PYTHON_BINARY)
 
+def PrintVerbose(*args):
+  if os.environ.get("RULES_PYTHON_BOOTSTRAP_VERBOSE"):
+    print("bootstrap:", *args, file=sys.stderr, flush=True)
+
 def PrintVerboseCoverage(*args):
   """Print output if VERBOSE_COVERAGE is non-empty in the environment."""
   if os.environ.get("VERBOSE_COVERAGE"):
     print(*args, file=sys.stderr)
 
+def IsVerboseCoverage():
+  """Returns True if VERBOSE_COVERAGE is non-empty in the environment."""
+  return os.environ.get("VERBOSE_COVERAGE")
+
 def FindCoverageEntryPoint(module_space):
   cov_tool = '%coverage_tool%'
   if cov_tool:
@@ -371,7 +380,10 @@
   # type: (str, str, list[str], dict[str, str]) -> ...
   """Executes the given Python file using the various environment settings."""
   os.environ.update(env)
-  os.execv(python_program, [python_program, main_filename] + args)
+  PrintVerbose("RunExecv: environ:", os.environ)
+  argv = [python_program, main_filename] + args
+  PrintVerbose("RunExecv: argv:", python_program, argv)
+  os.execv(python_program, argv)
 
 def _RunForCoverage(python_program, main_filename, args, env,
                     coverage_entrypoint, workspace):
@@ -389,8 +401,8 @@
         runfiles directory if set.
   """
   # We need for coveragepy to use relative paths.  This can only be configured
-  # via an rc file, so we need to make one.
-  rcfile_name = os.path.join(os.environ['COVERAGE_DIR'], '.coveragerc')
+  unique_id = uuid.uuid4()
+  rcfile_name = os.path.join(os.environ['COVERAGE_DIR'], ".coveragerc_{}".format(unique_id))
   with open(rcfile_name, "w") as rcfile:
     rcfile.write('''[run]
 relative_files = True
@@ -415,18 +427,20 @@
 
   PrintVerboseCoverage('Converting coveragepy database to lcov:', output_filename)
   # Run coveragepy again to convert its .coverage database file into lcov.
+  # Under normal conditions running lcov outputs to stdout/stderr, which causes problems for `coverage`.
+  params = [python_program, coverage_entrypoint, "lcov", "--rcfile=" + rcfile_name, "-o", output_filename, "--quiet"]
+  kparams = {"env": env, "cwd": workspace, "stdout": subprocess.DEVNULL, "stderr": subprocess.DEVNULL}
+  if IsVerboseCoverage():
+    # reconnect stdout/stderr to lcov generation.  Should be useful for debugging `coverage` issues.
+    params.remove("--quiet")
+    kparams['stdout'] = sys.stderr
+    kparams['stderr'] = sys.stderr
+
   ret_code = subprocess.call(
-    [
-      python_program,
-      coverage_entrypoint,
-      "lcov",
-      "--rcfile=" + rcfile_name,
-      "-o",
-      output_filename
-    ],
-    env=env,
-    cwd=workspace
+    params,
+    **kparams
   ) or ret_code
+
   try:
     os.unlink(rcfile_name)
   except OSError as err:
@@ -528,6 +542,13 @@
   else:
     cov_tool = None
 
+  # Some older Python versions on macOS (namely Python 3.7) may unintentionally
+  # leave this environment variable set after starting the interpreter, which
+  # causes problems with Python subprocesses correctly locating sys.executable,
+  # which subsequently causes failure to launch on Python 3.11 and later.
+  if '__PYVENV_LAUNCHER__' in os.environ:
+    del os.environ['__PYVENV_LAUNCHER__']
+
   new_env.update((key, val) for key, val in os.environ.items() if key not in new_env)
 
   workspace = None
diff --git a/python/private/python_repositories.bzl b/python/private/python_repositories.bzl
new file mode 100644
index 0000000..25d8a96
--- /dev/null
+++ b/python/private/python_repositories.bzl
@@ -0,0 +1,749 @@
+# Copyright 2022 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This file contains macros to be called during WORKSPACE evaluation.
+
+For historic reasons, pip_repositories() is defined in //python:pip.bzl.
+"""
+
+load("@bazel_tools//tools/build_defs/repo:http.bzl", _http_archive = "http_archive")
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+load(
+    "//python:versions.bzl",
+    "DEFAULT_RELEASE_BASE_URL",
+    "PLATFORMS",
+    "TOOL_VERSIONS",
+    "get_release_info",
+)
+load("//python/private/pypi:deps.bzl", "pypi_deps")
+load(":auth.bzl", "get_auth")
+load(":bzlmod_enabled.bzl", "BZLMOD_ENABLED")
+load(":coverage_deps.bzl", "coverage_dep")
+load(":full_version.bzl", "full_version")
+load(":internal_config_repo.bzl", "internal_config_repo")
+load(":repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "repo_utils")
+load(
+    ":toolchains_repo.bzl",
+    "host_toolchain",
+    "multi_toolchain_aliases",
+    "toolchain_aliases",
+    "toolchains_repo",
+)
+
+def http_archive(**kwargs):
+    maybe(_http_archive, **kwargs)
+
+def py_repositories():
+    """Runtime dependencies that users must install.
+
+    This function should be loaded and called in the user's WORKSPACE.
+    With bzlmod enabled, this function is not needed since MODULE.bazel handles transitive deps.
+    """
+    maybe(
+        internal_config_repo,
+        name = "rules_python_internal",
+    )
+    http_archive(
+        name = "bazel_skylib",
+        sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
+        urls = [
+            "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+            "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
+        ],
+    )
+    http_archive(
+        name = "rules_cc",
+        urls = ["https://github.com/bazelbuild/rules_cc/releases/download/0.0.9/rules_cc-0.0.9.tar.gz"],
+        sha256 = "2037875b9a4456dce4a79d112a8ae885bbc4aad968e6587dca6e64f3a0900cdf",
+        strip_prefix = "rules_cc-0.0.9",
+    )
+    pypi_deps()
+
+########
+# Remaining content of the file is only used to support toolchains.
+########
+
+STANDALONE_INTERPRETER_FILENAME = "STANDALONE_INTERPRETER"
+
+def is_standalone_interpreter(rctx, python_interpreter_path, *, logger = None):
+    """Query a python interpreter target for whether or not it's a rules_rust provided toolchain
+
+    Args:
+        rctx (repository_ctx): The repository rule's context object.
+        python_interpreter_path (path): A path representing the interpreter.
+        logger: Optional logger to use for operations.
+
+    Returns:
+        bool: Whether or not the target is from a rules_python generated toolchain.
+    """
+
+    # Only update the location when using a hermetic toolchain.
+    if not python_interpreter_path:
+        return False
+
+    # This is a rules_python provided toolchain.
+    return repo_utils.execute_unchecked(
+        rctx,
+        op = "IsStandaloneInterpreter",
+        arguments = [
+            "ls",
+            "{}/{}".format(
+                python_interpreter_path.dirname,
+                STANDALONE_INTERPRETER_FILENAME,
+            ),
+        ],
+        logger = logger,
+    ).return_code == 0
+
+def _python_repository_impl(rctx):
+    if rctx.attr.distutils and rctx.attr.distutils_content:
+        fail("Only one of (distutils, distutils_content) should be set.")
+    if bool(rctx.attr.url) == bool(rctx.attr.urls):
+        fail("Exactly one of (url, urls) must be set.")
+
+    logger = repo_utils.logger(rctx)
+
+    platform = rctx.attr.platform
+    python_version = rctx.attr.python_version
+    python_version_info = python_version.split(".")
+    python_short_version = "{0}.{1}".format(*python_version_info)
+    release_filename = rctx.attr.release_filename
+    urls = rctx.attr.urls or [rctx.attr.url]
+    auth = get_auth(rctx, urls)
+
+    if release_filename.endswith(".zst"):
+        rctx.download(
+            url = urls,
+            sha256 = rctx.attr.sha256,
+            output = release_filename,
+            auth = auth,
+        )
+        unzstd = rctx.which("unzstd")
+        if not unzstd:
+            url = rctx.attr.zstd_url.format(version = rctx.attr.zstd_version)
+            rctx.download_and_extract(
+                url = url,
+                sha256 = rctx.attr.zstd_sha256,
+                auth = auth,
+            )
+            working_directory = "zstd-{version}".format(version = rctx.attr.zstd_version)
+
+            repo_utils.execute_checked(
+                rctx,
+                op = "python_repository.MakeZstd",
+                arguments = [
+                    repo_utils.which_checked(rctx, "make"),
+                    "--jobs=4",
+                ],
+                timeout = 600,
+                quiet = True,
+                working_directory = working_directory,
+                logger = logger,
+            )
+            zstd = "{working_directory}/zstd".format(working_directory = working_directory)
+            unzstd = "./unzstd"
+            rctx.symlink(zstd, unzstd)
+
+        repo_utils.execute_checked(
+            rctx,
+            op = "python_repository.ExtractRuntime",
+            arguments = [
+                repo_utils.which_checked(rctx, "tar"),
+                "--extract",
+                "--strip-components=2",
+                "--use-compress-program={unzstd}".format(unzstd = unzstd),
+                "--file={}".format(release_filename),
+            ],
+            logger = logger,
+        )
+    else:
+        rctx.download_and_extract(
+            url = urls,
+            sha256 = rctx.attr.sha256,
+            stripPrefix = rctx.attr.strip_prefix,
+            auth = auth,
+        )
+
+    patches = rctx.attr.patches
+    if patches:
+        for patch in patches:
+            # Should take the strip as an attr, but this is fine for the moment
+            rctx.patch(patch, strip = 1)
+
+    # Write distutils.cfg to the Python installation.
+    if "windows" in platform:
+        distutils_path = "Lib/distutils/distutils.cfg"
+    else:
+        distutils_path = "lib/python{}/distutils/distutils.cfg".format(python_short_version)
+    if rctx.attr.distutils:
+        rctx.file(distutils_path, rctx.read(rctx.attr.distutils))
+    elif rctx.attr.distutils_content:
+        rctx.file(distutils_path, rctx.attr.distutils_content)
+
+    if "darwin" in platform and "osx" == repo_utils.get_platforms_os_name(rctx):
+        # Fix up the Python distribution's LC_ID_DYLIB field.
+        # It points to a build directory local to the GitHub Actions
+        # host machine used in the Python standalone build, which causes
+        # dyld lookup errors. To fix, set the full path to the dylib as
+        # it appears in the Bazel workspace as its LC_ID_DYLIB using
+        # the `install_name_tool` bundled with macOS.
+        dylib = "lib/libpython{}.dylib".format(python_short_version)
+        full_dylib_path = rctx.path(dylib)
+        repo_utils.execute_checked(
+            rctx,
+            op = "python_repository.FixUpDyldIdPath",
+            arguments = [repo_utils.which_checked(rctx, "install_name_tool"), "-id", full_dylib_path, dylib],
+            logger = logger,
+        )
+
+    # Make the Python installation read-only. This is to prevent issues due to
+    # pycs being generated at runtime:
+    # * The pycs are not deterministic (they contain timestamps)
+    # * Multiple processes trying to write the same pycs can result in errors.
+    if not rctx.attr.ignore_root_user_error:
+        if "windows" not in platform:
+            lib_dir = "lib" if "windows" not in platform else "Lib"
+
+            repo_utils.execute_checked(
+                rctx,
+                op = "python_repository.MakeReadOnly",
+                arguments = [repo_utils.which_checked(rctx, "chmod"), "-R", "ugo-w", lib_dir],
+                logger = logger,
+            )
+            exec_result = repo_utils.execute_unchecked(
+                rctx,
+                op = "python_repository.TestReadOnly",
+                arguments = [repo_utils.which_checked(rctx, "touch"), "{}/.test".format(lib_dir)],
+                logger = logger,
+            )
+
+            # The issue with running as root is the installation is no longer
+            # read-only, so the problems due to pyc can resurface.
+            if exec_result.return_code == 0:
+                stdout = repo_utils.execute_checked_stdout(
+                    rctx,
+                    op = "python_repository.GetUserId",
+                    arguments = [repo_utils.which_checked(rctx, "id"), "-u"],
+                    logger = logger,
+                )
+                uid = int(stdout.strip())
+                if uid == 0:
+                    fail("The current user is root, please run as non-root when using the hermetic Python interpreter. See https://github.com/bazelbuild/rules_python/pull/713.")
+                else:
+                    fail("The current user has CAP_DAC_OVERRIDE set, please drop this capability when using the hermetic Python interpreter. See https://github.com/bazelbuild/rules_python/pull/713.")
+
+    python_bin = "python.exe" if ("windows" in platform) else "bin/python3"
+
+    glob_include = []
+    glob_exclude = [
+        "**/* *",  # Bazel does not support spaces in file names.
+        # Unused shared libraries. `python` executable and the `:libpython` target
+        # depend on `libpython{python_version}.so.1.0`.
+        "lib/libpython{python_version}.so".format(python_version = python_short_version),
+        # static libraries
+        "lib/**/*.a",
+        # tests for the standard libraries.
+        "lib/python{python_version}/**/test/**".format(python_version = python_short_version),
+        "lib/python{python_version}/**/tests/**".format(python_version = python_short_version),
+        "**/__pycache__/*.pyc.*",  # During pyc creation, temp files named *.pyc.NNN are created
+    ]
+
+    if "linux" in platform:
+        # Workaround around https://github.com/indygreg/python-build-standalone/issues/231
+        for url in urls:
+            head_and_release, _, _ = url.rpartition("/")
+            _, _, release = head_and_release.rpartition("/")
+            if not release.isdigit():
+                # Maybe this is some custom toolchain, so skip this
+                break
+
+            if int(release) >= 20240224:
+                # Starting with this release the Linux toolchains have infinite symlink loop
+                # on host platforms that are not Linux. Delete the files no
+                # matter the host platform so that the cross-built artifacts
+                # are the same irrespective of the host platform we are
+                # building on.
+                #
+                # Link to the first affected release:
+                # https://github.com/indygreg/python-build-standalone/releases/tag/20240224
+                rctx.delete("share/terminfo")
+                break
+
+    if rctx.attr.ignore_root_user_error or "windows" in platform:
+        glob_exclude += [
+            # These pycache files are created on first use of the associated python files.
+            # Exclude them from the glob because otherwise between the first time and second time a python toolchain is used,"
+            # the definition of this filegroup will change, and depending rules will get invalidated."
+            # See https://github.com/bazelbuild/rules_python/issues/1008 for unconditionally adding these to toolchains so we can stop ignoring them."
+            "**/__pycache__/*.pyc",
+            "**/__pycache__/*.pyo",
+        ]
+
+    if "windows" in platform:
+        glob_include += [
+            "*.exe",
+            "*.dll",
+            "bin/**",
+            "DLLs/**",
+            "extensions/**",
+            "include/**",
+            "Lib/**",
+            "libs/**",
+            "Scripts/**",
+            "share/**",
+            "tcl/**",
+        ]
+    else:
+        glob_include += [
+            "bin/**",
+            "extensions/**",
+            "include/**",
+            "lib/**",
+            "libs/**",
+            "share/**",
+        ]
+
+    if rctx.attr.coverage_tool:
+        if "windows" in platform:
+            coverage_tool = None
+        else:
+            coverage_tool = '"{}"'.format(rctx.attr.coverage_tool)
+
+        coverage_attr_text = """\
+    coverage_tool = select({{
+        ":coverage_enabled": {coverage_tool},
+        "//conditions:default": None
+    }}),
+""".format(coverage_tool = coverage_tool)
+    else:
+        coverage_attr_text = "    # coverage_tool attribute not supported by this Bazel version"
+
+    build_content = """\
+# Generated by python/repositories.bzl
+
+load("@rules_python//python:py_runtime.bzl", "py_runtime")
+load("@rules_python//python:py_runtime_pair.bzl", "py_runtime_pair")
+load("@rules_python//python/cc:py_cc_toolchain.bzl", "py_cc_toolchain")
+load("@rules_python//python/private:py_exec_tools_toolchain.bzl", "py_exec_tools_toolchain")
+
+package(default_visibility = ["//visibility:public"])
+
+filegroup(
+    name = "files",
+    srcs = glob(
+        include = {glob_include},
+        # Platform-agnostic filegroup can't match on all patterns.
+        allow_empty = True,
+        exclude = {glob_exclude},
+    ),
+)
+
+cc_import(
+    name = "interface",
+    interface_library = "libs/python{python_version_nodot}.lib",
+    system_provided = True,
+)
+
+filegroup(
+    name = "includes",
+    srcs = glob(["include/**/*.h"]),
+)
+
+cc_library(
+    name = "python_headers",
+    deps = select({{
+        "@bazel_tools//src/conditions:windows": [":interface"],
+        "//conditions:default": None,
+    }}),
+    hdrs = [":includes"],
+    includes = [
+        "include",
+        "include/python{python_version}",
+        "include/python{python_version}m",
+    ],
+)
+
+cc_library(
+    name = "libpython",
+    hdrs = [":includes"],
+    srcs = select({{
+        "@platforms//os:windows": ["python3.dll", "libs/python{python_version_nodot}.lib"],
+        "@platforms//os:macos": ["lib/libpython{python_version}.dylib"],
+        "@platforms//os:linux": ["lib/libpython{python_version}.so", "lib/libpython{python_version}.so.1.0"],
+    }}),
+)
+
+exports_files(["python", "{python_path}"])
+
+# Used to only download coverage toolchain when the coverage is collected by
+# bazel.
+config_setting(
+    name = "coverage_enabled",
+    values = {{"collect_code_coverage": "true"}},
+    visibility = ["//visibility:private"],
+)
+
+py_runtime(
+    name = "py3_runtime",
+    files = [":files"],
+{coverage_attr}
+    interpreter = "{python_path}",
+    interpreter_version_info = {{
+        "major": "{interpreter_version_info_major}",
+        "minor": "{interpreter_version_info_minor}",
+        "micro": "{interpreter_version_info_micro}",
+    }},
+    python_version = "PY3",
+    implementation_name = 'cpython',
+    pyc_tag = "cpython-{interpreter_version_info_major}{interpreter_version_info_minor}",
+)
+
+py_runtime_pair(
+    name = "python_runtimes",
+    py2_runtime = None,
+    py3_runtime = ":py3_runtime",
+)
+
+py_cc_toolchain(
+    name = "py_cc_toolchain",
+    headers = ":python_headers",
+    libs = ":libpython",
+    python_version = "{python_version}",
+)
+
+py_exec_tools_toolchain(
+    name = "py_exec_tools_toolchain",
+    precompiler = "@rules_python//tools/precompiler:precompiler",
+)
+""".format(
+        glob_exclude = repr(glob_exclude),
+        glob_include = repr(glob_include),
+        python_path = python_bin,
+        python_version = python_short_version,
+        python_version_nodot = python_short_version.replace(".", ""),
+        coverage_attr = coverage_attr_text,
+        interpreter_version_info_major = python_version_info[0],
+        interpreter_version_info_minor = python_version_info[1],
+        interpreter_version_info_micro = python_version_info[2],
+    )
+    rctx.delete("python")
+    rctx.symlink(python_bin, "python")
+    rctx.file(STANDALONE_INTERPRETER_FILENAME, "# File intentionally left blank. Indicates that this is an interpreter repo created by rules_python.")
+    rctx.file("BUILD.bazel", build_content)
+
+    attrs = {
+        "auth_patterns": rctx.attr.auth_patterns,
+        "coverage_tool": rctx.attr.coverage_tool,
+        "distutils": rctx.attr.distutils,
+        "distutils_content": rctx.attr.distutils_content,
+        "ignore_root_user_error": rctx.attr.ignore_root_user_error,
+        "name": rctx.attr.name,
+        "netrc": rctx.attr.netrc,
+        "patches": rctx.attr.patches,
+        "platform": platform,
+        "python_version": python_version,
+        "release_filename": release_filename,
+        "sha256": rctx.attr.sha256,
+        "strip_prefix": rctx.attr.strip_prefix,
+    }
+
+    if rctx.attr.url:
+        attrs["url"] = rctx.attr.url
+    else:
+        attrs["urls"] = urls
+
+    return attrs
+
+python_repository = repository_rule(
+    _python_repository_impl,
+    doc = "Fetches the external tools needed for the Python toolchain.",
+    attrs = {
+        "auth_patterns": attr.string_dict(
+            doc = "Override mapping of hostnames to authorization patterns; mirrors the eponymous attribute from http_archive",
+        ),
+        "coverage_tool": attr.string(
+            # Mirrors the definition at
+            # https://github.com/bazelbuild/bazel/blob/master/src/main/starlark/builtins_bzl/common/python/py_runtime_rule.bzl
+            doc = """
+This is a target to use for collecting code coverage information from `py_binary`
+and `py_test` targets.
+
+If set, the target must either produce a single file or be an executable target.
+The path to the single file, or the executable if the target is executable,
+determines the entry point for the python coverage tool.  The target and its
+runfiles will be added to the runfiles when coverage is enabled.
+
+The entry point for the tool must be loadable by a Python interpreter (e.g. a
+`.py` or `.pyc` file).  It must accept the command line arguments
+of coverage.py (https://coverage.readthedocs.io), at least including
+the `run` and `lcov` subcommands.
+
+The target is accepted as a string by the python_repository and evaluated within
+the context of the toolchain repository.
+
+For more information see the official bazel docs
+(https://bazel.build/reference/be/python#py_runtime.coverage_tool).
+""",
+        ),
+        "distutils": attr.label(
+            allow_single_file = True,
+            doc = "A distutils.cfg file to be included in the Python installation. " +
+                  "Either distutils or distutils_content can be specified, but not both.",
+            mandatory = False,
+        ),
+        "distutils_content": attr.string(
+            doc = "A distutils.cfg file content to be included in the Python installation. " +
+                  "Either distutils or distutils_content can be specified, but not both.",
+            mandatory = False,
+        ),
+        "ignore_root_user_error": attr.bool(
+            default = False,
+            doc = "Whether the check for root should be ignored or not. This causes cache misses with .pyc files.",
+            mandatory = False,
+        ),
+        "netrc": attr.string(
+            doc = ".netrc file to use for authentication; mirrors the eponymous attribute from http_archive",
+        ),
+        "patches": attr.label_list(
+            doc = "A list of patch files to apply to the unpacked interpreter",
+            mandatory = False,
+        ),
+        "platform": attr.string(
+            doc = "The platform name for the Python interpreter tarball.",
+            mandatory = True,
+            values = PLATFORMS.keys(),
+        ),
+        "python_version": attr.string(
+            doc = "The Python version.",
+            mandatory = True,
+        ),
+        "release_filename": attr.string(
+            doc = "The filename of the interpreter to be downloaded",
+            mandatory = True,
+        ),
+        "sha256": attr.string(
+            doc = "The SHA256 integrity hash for the Python interpreter tarball.",
+            mandatory = True,
+        ),
+        "strip_prefix": attr.string(
+            doc = "A directory prefix to strip from the extracted files.",
+        ),
+        "url": attr.string(
+            doc = "The URL of the interpreter to download. Exactly one of url and urls must be set.",
+        ),
+        "urls": attr.string_list(
+            doc = "The URL of the interpreter to download. Exactly one of url and urls must be set.",
+        ),
+        "zstd_sha256": attr.string(
+            default = "7c42d56fac126929a6a85dbc73ff1db2411d04f104fae9bdea51305663a83fd0",
+        ),
+        "zstd_url": attr.string(
+            default = "https://github.com/facebook/zstd/releases/download/v{version}/zstd-{version}.tar.gz",
+        ),
+        "zstd_version": attr.string(
+            default = "1.5.2",
+        ),
+        "_rule_name": attr.string(default = "python_repository"),
+    },
+    environ = [REPO_DEBUG_ENV_VAR],
+)
+
+# Wrapper macro around everything above, this is the primary API.
+def python_register_toolchains(
+        name,
+        python_version,
+        distutils = None,
+        distutils_content = None,
+        register_toolchains = True,
+        register_coverage_tool = False,
+        set_python_version_constraint = False,
+        tool_versions = TOOL_VERSIONS,
+        **kwargs):
+    """Convenience macro for users which does typical setup.
+
+    - Create a repository for each built-in platform like "python_linux_amd64" -
+      this repository is lazily fetched when Python is needed for that platform.
+    - Create a repository exposing toolchains for each platform like
+      "python_platforms".
+    - Register a toolchain pointing at each platform.
+    Users can avoid this macro and do these steps themselves, if they want more
+    control.
+    Args:
+        name: base name for all created repos, like "python38".
+        python_version: the Python version.
+        distutils: see the distutils attribute in the python_repository repository rule.
+        distutils_content: see the distutils_content attribute in the python_repository repository rule.
+        register_toolchains: Whether or not to register the downloaded toolchains.
+        register_coverage_tool: Whether or not to register the downloaded coverage tool to the toolchains.
+            NOTE: Coverage support using the toolchain is only supported in Bazel 6 and higher.
+
+        set_python_version_constraint: When set to true, target_compatible_with for the toolchains will include a version constraint.
+        tool_versions: a dict containing a mapping of version with SHASUM and platform info. If not supplied, the defaults
+            in python/versions.bzl will be used.
+        **kwargs: passed to each python_repositories call.
+    """
+
+    if BZLMOD_ENABLED:
+        # you cannot used native.register_toolchains when using bzlmod.
+        register_toolchains = False
+
+    base_url = kwargs.pop("base_url", DEFAULT_RELEASE_BASE_URL)
+
+    python_version = full_version(python_version)
+
+    toolchain_repo_name = "{name}_toolchains".format(name = name)
+
+    # When using unreleased Bazel versions, the version is an empty string
+    if native.bazel_version:
+        bazel_major = int(native.bazel_version.split(".")[0])
+        if bazel_major < 6:
+            if register_coverage_tool:
+                # buildifier: disable=print
+                print((
+                    "WARNING: ignoring register_coverage_tool=True when " +
+                    "registering @{name}: Bazel 6+ required, got {version}"
+                ).format(
+                    name = name,
+                    version = native.bazel_version,
+                ))
+            register_coverage_tool = False
+
+    loaded_platforms = []
+    for platform in PLATFORMS.keys():
+        sha256 = tool_versions[python_version]["sha256"].get(platform, None)
+        if not sha256:
+            continue
+
+        loaded_platforms.append(platform)
+        (release_filename, urls, strip_prefix, patches) = get_release_info(platform, python_version, base_url, tool_versions)
+
+        # allow passing in a tool version
+        coverage_tool = None
+        coverage_tool = tool_versions[python_version].get("coverage_tool", {}).get(platform, None)
+        if register_coverage_tool and coverage_tool == None:
+            coverage_tool = coverage_dep(
+                name = "{name}_{platform}_coverage".format(
+                    name = name,
+                    platform = platform,
+                ),
+                python_version = python_version,
+                platform = platform,
+                visibility = ["@{name}_{platform}//:__subpackages__".format(
+                    name = name,
+                    platform = platform,
+                )],
+            )
+
+        python_repository(
+            name = "{name}_{platform}".format(
+                name = name,
+                platform = platform,
+            ),
+            sha256 = sha256,
+            patches = patches,
+            platform = platform,
+            python_version = python_version,
+            release_filename = release_filename,
+            urls = urls,
+            distutils = distutils,
+            distutils_content = distutils_content,
+            strip_prefix = strip_prefix,
+            coverage_tool = coverage_tool,
+            **kwargs
+        )
+        if register_toolchains:
+            native.register_toolchains("@{toolchain_repo_name}//:{platform}_toolchain".format(
+                toolchain_repo_name = toolchain_repo_name,
+                platform = platform,
+            ))
+            native.register_toolchains("@{toolchain_repo_name}//:{platform}_py_cc_toolchain".format(
+                toolchain_repo_name = toolchain_repo_name,
+                platform = platform,
+            ))
+            native.register_toolchains("@{toolchain_repo_name}//:{platform}_py_exec_tools_toolchain".format(
+                toolchain_repo_name = toolchain_repo_name,
+                platform = platform,
+            ))
+
+    host_toolchain(
+        name = name + "_host",
+        python_version = python_version,
+        user_repository_name = name,
+        platforms = loaded_platforms,
+    )
+
+    toolchain_aliases(
+        name = name,
+        python_version = python_version,
+        user_repository_name = name,
+        platforms = loaded_platforms,
+    )
+
+    # in bzlmod we write out our own toolchain repos
+    if BZLMOD_ENABLED:
+        return
+
+    toolchains_repo(
+        name = toolchain_repo_name,
+        python_version = python_version,
+        set_python_version_constraint = set_python_version_constraint,
+        user_repository_name = name,
+    )
+
+def python_register_multi_toolchains(
+        name,
+        python_versions,
+        default_version = None,
+        **kwargs):
+    """Convenience macro for registering multiple Python toolchains.
+
+    Args:
+        name: base name for each name in python_register_toolchains call.
+        python_versions: the Python version.
+        default_version: the default Python version. If not set, the first version in
+            python_versions is used.
+        **kwargs: passed to each python_register_toolchains call.
+    """
+    if len(python_versions) == 0:
+        fail("python_versions must not be empty")
+
+    if not default_version:
+        default_version = python_versions.pop(0)
+    for python_version in python_versions:
+        if python_version == default_version:
+            # We register the default version lastly so that it's not picked first when --platforms
+            # is set with a constraint during toolchain resolution. This is due to the fact that
+            # Bazel will match the unconstrained toolchain if we register it before the constrained
+            # ones.
+            continue
+        python_register_toolchains(
+            name = name + "_" + python_version.replace(".", "_"),
+            python_version = python_version,
+            set_python_version_constraint = True,
+            **kwargs
+        )
+    python_register_toolchains(
+        name = name + "_" + default_version.replace(".", "_"),
+        python_version = default_version,
+        set_python_version_constraint = False,
+        **kwargs
+    )
+
+    multi_toolchain_aliases(
+        name = name,
+        python_versions = {
+            python_version: name + "_" + python_version.replace(".", "_")
+            for python_version in (python_versions + [default_version])
+        },
+    )
diff --git a/python/private/bzlmod/pythons_hub.bzl b/python/private/pythons_hub.bzl
similarity index 79%
rename from python/private/bzlmod/pythons_hub.bzl
rename to python/private/pythons_hub.bzl
index 5f536f3..7a8c874 100644
--- a/python/private/bzlmod/pythons_hub.bzl
+++ b/python/private/pythons_hub.bzl
@@ -14,13 +14,9 @@
 
 "Repo rule used by bzlmod extension to create a repo that has a map of Python interpreters and their labels"
 
-load("//python:versions.bzl", "WINDOWS_NAME")
 load("//python/private:full_version.bzl", "full_version")
 load(
     "//python/private:toolchains_repo.bzl",
-    "get_host_os_arch",
-    "get_host_platform",
-    "get_repository_name",
     "python_toolchain_build_file_content",
 )
 
@@ -31,6 +27,7 @@
 
 _HUB_BUILD_FILE_TEMPLATE = """\
 load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("@@{rules_python}//python/private:py_toolchain_suite.bzl", "py_toolchain_suite")
 
 bzl_library(
     name = "interpreters_bzl",
@@ -56,19 +53,24 @@
     if not _have_same_length(python_versions, set_python_version_constraints, user_repository_names):
         fail("all lists must have the same length")
 
-    rules_python = get_repository_name(workspace_location)
-
     # Iterate over the length of python_versions and call
     # build the toolchain content by calling python_toolchain_build_file_content
-    toolchains = "\n".join([python_toolchain_build_file_content(
-        prefix = prefixes[i],
-        python_version = full_version(python_versions[i]),
-        set_python_version_constraint = set_python_version_constraints[i],
-        user_repository_name = user_repository_names[i],
-        rules_python = rules_python,
-    ) for i in range(len(python_versions))])
+    toolchains = "\n".join(
+        [
+            python_toolchain_build_file_content(
+                prefix = prefixes[i],
+                python_version = full_version(python_versions[i]),
+                set_python_version_constraint = set_python_version_constraints[i],
+                user_repository_name = user_repository_names[i],
+            )
+            for i in range(len(python_versions))
+        ],
+    )
 
-    return _HUB_BUILD_FILE_TEMPLATE.format(toolchains = toolchains)
+    return _HUB_BUILD_FILE_TEMPLATE.format(
+        toolchains = toolchains,
+        rules_python = workspace_location.workspace_name,
+    )
 
 _interpreters_bzl_template = """
 INTERPRETER_LABELS = {{
@@ -78,7 +80,7 @@
 """
 
 _line_for_hub_template = """\
-    "{name}": Label("@{name}_{platform}//:{path}"),
+    "{name}_host": Label("@{name}_host//:python"),
 """
 
 def _hub_repo_impl(rctx):
@@ -96,18 +98,12 @@
         executable = False,
     )
 
-    (os, arch) = get_host_os_arch(rctx)
-    platform = get_host_platform(os, arch)
-    is_windows = (os == WINDOWS_NAME)
-    path = "python.exe" if is_windows else "bin/python3"
-
     # Create a dict that is later used to create
     # a symlink to a interpreter.
-    interpreter_labels = "".join([_line_for_hub_template.format(
-        name = name,
-        platform = platform,
-        path = path,
-    ) for name in rctx.attr.toolchain_user_repository_names])
+    interpreter_labels = "".join([
+        _line_for_hub_template.format(name = name)
+        for name in rctx.attr.toolchain_user_repository_names
+    ])
 
     rctx.file(
         "interpreters.bzl",
diff --git a/python/private/render_pkg_aliases.bzl b/python/private/render_pkg_aliases.bzl
deleted file mode 100644
index 9ebbc36..0000000
--- a/python/private/render_pkg_aliases.bzl
+++ /dev/null
@@ -1,190 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""render_pkg_aliases is a function to generate BUILD.bazel contents used to create user-friendly aliases.
-
-This is used in bzlmod and non-bzlmod setups."""
-
-load("//python/private:normalize_name.bzl", "normalize_name")
-load(":text_util.bzl", "render")
-load(":version_label.bzl", "version_label")
-
-NO_MATCH_ERROR_MESSAGE_TEMPLATE = """\
-No matching wheel for current configuration's Python version.
-
-The current build configuration's Python version doesn't match any of the Python
-versions available for this wheel. This wheel supports the following Python versions:
-    {supported_versions}
-
-As matched by the `@{rules_python}//python/config_settings:is_python_<version>`
-configuration settings.
-
-To determine the current configuration's Python version, run:
-    `bazel config <config id>` (shown further below)
-and look for
-    {rules_python}//python/config_settings:python_version
-
-If the value is missing, then the "default" Python version is being used,
-which has a "null" version value and will not match version constraints.
-"""
-
-def _render_whl_library_alias(
-        *,
-        name,
-        repo_name,
-        dep,
-        target,
-        default_version,
-        versions,
-        rules_python):
-    """Render an alias for common targets
-
-    If the versions is passed, then the `rules_python` must be passed as well and
-    an alias with a select statement based on the python version is going to be
-    generated.
-    """
-    if versions == None:
-        return render.alias(
-            name = name,
-            actual = repr("@{repo_name}_{dep}//:{target}".format(
-                repo_name = repo_name,
-                dep = dep,
-                target = target,
-            )),
-        )
-
-    # Create the alias repositories which contains different select
-    # statements  These select statements point to the different pip
-    # whls that are based on a specific version of Python.
-    selects = {}
-    for full_version in versions:
-        condition = "@@{rules_python}//python/config_settings:is_python_{full_python_version}".format(
-            rules_python = rules_python,
-            full_python_version = full_version,
-        )
-        actual = "@{repo_name}_{version}_{dep}//:{target}".format(
-            repo_name = repo_name,
-            version = version_label(full_version),
-            dep = dep,
-            target = target,
-        )
-        selects[condition] = actual
-
-    if default_version:
-        no_match_error = None
-        default_actual = "@{repo_name}_{version}_{dep}//:{target}".format(
-            repo_name = repo_name,
-            version = version_label(default_version),
-            dep = dep,
-            target = target,
-        )
-        selects["//conditions:default"] = default_actual
-    else:
-        no_match_error = "_NO_MATCH_ERROR"
-
-    return render.alias(
-        name = name,
-        actual = render.select(
-            selects,
-            no_match_error = no_match_error,
-        ),
-    )
-
-def _render_common_aliases(repo_name, name, versions = None, default_version = None, rules_python = None):
-    lines = [
-        """package(default_visibility = ["//visibility:public"])""",
-    ]
-
-    if versions:
-        versions = sorted(versions)
-
-    if not versions:
-        pass
-    elif default_version in versions:
-        pass
-    else:
-        error_msg = NO_MATCH_ERROR_MESSAGE_TEMPLATE.format(
-            supported_versions = ", ".join(versions),
-            rules_python = rules_python,
-        )
-
-        lines.append("_NO_MATCH_ERROR = \"\"\"\\\n{error_msg}\"\"\"".format(
-            error_msg = error_msg,
-        ))
-
-        # This is to simplify the code in _render_whl_library_alias and to ensure
-        # that we don't pass a 'default_version' that is not in 'versions'.
-        default_version = None
-
-    lines.append(
-        render.alias(
-            name = name,
-            actual = repr(":pkg"),
-        ),
-    )
-    lines.extend(
-        [
-            _render_whl_library_alias(
-                name = target,
-                repo_name = repo_name,
-                dep = name,
-                target = target,
-                versions = versions,
-                default_version = default_version,
-                rules_python = rules_python,
-            )
-            for target in ["pkg", "whl", "data", "dist_info"]
-        ],
-    )
-
-    return "\n\n".join(lines)
-
-def render_pkg_aliases(*, repo_name, bzl_packages = None, whl_map = None, rules_python = None, default_version = None):
-    """Create alias declarations for each PyPI package.
-
-    The aliases should be appended to the pip_repository BUILD.bazel file. These aliases
-    allow users to use requirement() without needed a corresponding `use_repo()` for each dep
-    when using bzlmod.
-
-    Args:
-        repo_name: the repository name of the hub repository that is visible to the users that is
-            also used as the prefix for the spoke repo names (e.g. "pip", "pypi").
-        bzl_packages: the list of packages to setup, if not specified, whl_map.keys() will be used instead.
-        whl_map: the whl_map for generating Python version aware aliases.
-        default_version: the default version to be used for the aliases.
-        rules_python: the name of the rules_python workspace.
-
-    Returns:
-        A dict of file paths and their contents.
-    """
-    if not bzl_packages and whl_map:
-        bzl_packages = list(whl_map.keys())
-
-    contents = {}
-    for name in bzl_packages:
-        versions = None
-        if whl_map != None:
-            versions = whl_map[name]
-        name = normalize_name(name)
-
-        filename = "{}/BUILD.bazel".format(name)
-        contents[filename] = _render_common_aliases(
-            repo_name = repo_name,
-            name = name,
-            versions = versions,
-            rules_python = rules_python,
-            default_version = default_version,
-        ).strip()
-
-    return contents
diff --git a/python/private/repo_utils.bzl b/python/private/repo_utils.bzl
new file mode 100644
index 0000000..aab0325
--- /dev/null
+++ b/python/private/repo_utils.bzl
@@ -0,0 +1,430 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Functionality shared only by repository rule phase code.
+
+This code should only be loaded and used during the repository phase.
+"""
+
+REPO_DEBUG_ENV_VAR = "RULES_PYTHON_REPO_DEBUG"
+REPO_VERBOSITY_ENV_VAR = "RULES_PYTHON_REPO_DEBUG_VERBOSITY"
+
+def _is_repo_debug_enabled(mrctx):
+    """Tells if debbugging output is requested during repo operatiosn.
+
+    Args:
+        mrctx: repository_ctx or module_ctx object
+
+    Returns:
+        True if enabled, False if not.
+    """
+    return _getenv(mrctx, REPO_DEBUG_ENV_VAR) == "1"
+
+def _logger(mrctx, name = None):
+    """Creates a logger instance for printing messages.
+
+    Args:
+        mrctx: repository_ctx or module_ctx object. If the attribute
+            `_rule_name` is present, it will be included in log messages.
+        name: name for the logger. Optional for repository_ctx usage.
+
+    Returns:
+        A struct with attributes logging: trace, debug, info, warn, fail.
+    """
+    if _is_repo_debug_enabled(mrctx):
+        verbosity_level = "DEBUG"
+    else:
+        verbosity_level = "WARN"
+
+    env_var_verbosity = _getenv(mrctx, REPO_VERBOSITY_ENV_VAR)
+    verbosity_level = env_var_verbosity or verbosity_level
+
+    verbosity = {
+        "DEBUG": 2,
+        "INFO": 1,
+        "TRACE": 3,
+    }.get(verbosity_level, 0)
+
+    if hasattr(mrctx, "attr"):
+        rctx = mrctx  # This is `repository_ctx`.
+        name = name or "{}(@@{})".format(getattr(rctx.attr, "_rule_name", "?"), rctx.name)
+    elif not name:
+        fail("The name has to be specified when using the logger with `module_ctx`")
+
+    def _log(enabled_on_verbosity, level, message_cb_or_str, printer = print):
+        if verbosity < enabled_on_verbosity:
+            return
+
+        if type(message_cb_or_str) == "string":
+            message = message_cb_or_str
+        else:
+            message = message_cb_or_str()
+
+        # NOTE: printer may be the `fail` function.
+        printer("\nrules_python:{} {}:".format(
+            name,
+            level.upper(),
+        ), message)  # buildifier: disable=print
+
+    return struct(
+        trace = lambda message_cb: _log(3, "TRACE", message_cb),
+        debug = lambda message_cb: _log(2, "DEBUG", message_cb),
+        info = lambda message_cb: _log(1, "INFO", message_cb),
+        warn = lambda message_cb: _log(0, "WARNING", message_cb),
+        fail = lambda message_cb: _log(-1, "FAIL", message_cb, fail),
+    )
+
+def _execute_internal(
+        mrctx,
+        *,
+        op,
+        fail_on_error = False,
+        arguments,
+        environment = {},
+        logger = None,
+        **kwargs):
+    """Execute a subprocess with debugging instrumentation.
+
+    Args:
+        mrctx: module_ctx or repository_ctx object
+        op: string, brief description of the operation this command
+            represents. Used to succintly describe it in logging and
+            error messages.
+        fail_on_error: bool, True if fail() should be called if the command
+            fails (non-zero exit code), False if not.
+        arguments: list of arguments; see module_ctx.execute#arguments or
+            repository_ctx#arguments.
+        environment: optional dict of the environment to run the command
+            in; see module_ctx.execute#environment or
+            repository_ctx.execute#environment.
+        logger: optional `Logger` to use for logging execution details. Must be
+            specified when using module_ctx. If not specified, a default will
+            be created.
+        **kwargs: additional kwargs to pass onto rctx.execute
+
+    Returns:
+        exec_result object, see repository_ctx.execute return type.
+    """
+    if not logger and hasattr(mrctx, "attr"):
+        rctx = mrctx
+        logger = _logger(rctx)
+    elif not logger:
+        fail("logger must be specified when using 'module_ctx'")
+
+    logger.debug(lambda: (
+        "repo.execute: {op}: start\n" +
+        "  command: {cmd}\n" +
+        "  working dir: {cwd}\n" +
+        "  timeout: {timeout}\n" +
+        "  environment:{env_str}\n"
+    ).format(
+        op = op,
+        cmd = _args_to_str(arguments),
+        cwd = _cwd_to_str(mrctx, kwargs),
+        timeout = _timeout_to_str(kwargs),
+        env_str = _env_to_str(environment),
+    ))
+
+    mrctx.report_progress("Running {}".format(op))
+    result = mrctx.execute(arguments, environment = environment, **kwargs)
+
+    if fail_on_error and result.return_code != 0:
+        logger.fail((
+            "repo.execute: {op}: end: failure:\n" +
+            "  command: {cmd}\n" +
+            "  return code: {return_code}\n" +
+            "  working dir: {cwd}\n" +
+            "  timeout: {timeout}\n" +
+            "  environment:{env_str}\n" +
+            "{output}"
+        ).format(
+            op = op,
+            cmd = _args_to_str(arguments),
+            return_code = result.return_code,
+            cwd = _cwd_to_str(mrctx, kwargs),
+            timeout = _timeout_to_str(kwargs),
+            env_str = _env_to_str(environment),
+            output = _outputs_to_str(result),
+        ))
+    elif _is_repo_debug_enabled(mrctx):
+        logger.debug((
+            "repo.execute: {op}: end: {status}\n" +
+            "  return code: {return_code}\n" +
+            "{output}"
+        ).format(
+            op = op,
+            status = "success" if result.return_code == 0 else "failure",
+            return_code = result.return_code,
+            output = _outputs_to_str(result),
+        ))
+
+    result_kwargs = {k: getattr(result, k) for k in dir(result)}
+    return struct(
+        describe_failure = lambda: _execute_describe_failure(
+            op = op,
+            arguments = arguments,
+            result = result,
+            mrctx = mrctx,
+            kwargs = kwargs,
+            environment = environment,
+        ),
+        **result_kwargs
+    )
+
+def _execute_unchecked(*args, **kwargs):
+    """Execute a subprocess.
+
+    Additional information will be printed if debug output is enabled.
+
+    Args:
+        *args: see _execute_internal
+        **kwargs: see _execute_internal
+
+    Returns:
+        exec_result object, see repository_ctx.execute return type.
+    """
+    return _execute_internal(fail_on_error = False, *args, **kwargs)
+
+def _execute_checked(*args, **kwargs):
+    """Execute a subprocess, failing for a non-zero exit code.
+
+    If the command fails, then fail() is called with detailed information
+    about the command and its failure.
+
+    Args:
+        *args: see _execute_internal
+        **kwargs: see _execute_internal
+
+    Returns:
+        exec_result object, see repository_ctx.execute return type.
+    """
+    return _execute_internal(fail_on_error = True, *args, **kwargs)
+
+def _execute_checked_stdout(*args, **kwargs):
+    """Calls execute_checked, but only returns the stdout value."""
+    return _execute_checked(*args, **kwargs).stdout
+
+def _execute_describe_failure(*, op, arguments, result, mrctx, kwargs, environment):
+    return (
+        "repo.execute: {op}: failure:\n" +
+        "  command: {cmd}\n" +
+        "  return code: {return_code}\n" +
+        "  working dir: {cwd}\n" +
+        "  timeout: {timeout}\n" +
+        "  environment:{env_str}\n" +
+        "{output}"
+    ).format(
+        op = op,
+        cmd = _args_to_str(arguments),
+        return_code = result.return_code,
+        cwd = _cwd_to_str(mrctx, kwargs),
+        timeout = _timeout_to_str(kwargs),
+        env_str = _env_to_str(environment),
+        output = _outputs_to_str(result),
+    )
+
+def _which_checked(mrctx, binary_name):
+    """Tests to see if a binary exists, and otherwise fails with a message.
+
+    Args:
+        binary_name: name of the binary to find.
+        mrctx: module_ctx or repository_ctx.
+
+    Returns:
+        mrctx.Path for the binary.
+    """
+    result = _which_unchecked(mrctx, binary_name)
+    if result.binary == None:
+        fail(result.describe_failure())
+    return result.binary
+
+def _which_unchecked(mrctx, binary_name):
+    """Tests to see if a binary exists.
+
+    This is also watch the `PATH` environment variable.
+
+    Args:
+        binary_name: name of the binary to find.
+        mrctx: repository context.
+
+    Returns:
+        `struct` with attributes:
+        * `binary`: `repository_ctx.Path`
+        * `describe_failure`: `Callable | None`; takes no args. If the
+          binary couldn't be found, provides a detailed error description.
+    """
+    path = _getenv(mrctx, "PATH", "")
+    binary = mrctx.which(binary_name)
+    if binary:
+        _watch(mrctx, binary)
+        describe_failure = None
+    else:
+        describe_failure = lambda: _which_describe_failure(binary_name, path)
+
+    return struct(
+        binary = binary,
+        describe_failure = describe_failure,
+    )
+
+def _which_describe_failure(binary_name, path):
+    return (
+        "Unable to find the binary '{binary_name}' on PATH.\n" +
+        "  PATH = {path}"
+    ).format(
+        binary_name = binary_name,
+        path = path,
+    )
+
+def _getenv(mrctx, name, default = None):
+    # Bazel 7+ API has (repository|module)_ctx.getenv
+    return getattr(mrctx, "getenv", mrctx.os.environ.get)(name, default)
+
+def _args_to_str(arguments):
+    return " ".join([_arg_repr(a) for a in arguments])
+
+def _arg_repr(value):
+    if _arg_should_be_quoted(value):
+        return repr(value)
+    else:
+        return str(value)
+
+_SPECIAL_SHELL_CHARS = [" ", "'", '"', "{", "$", "("]
+
+def _arg_should_be_quoted(value):
+    # `value` may be non-str, such as mrctx.path objects
+    value_str = str(value)
+    for char in _SPECIAL_SHELL_CHARS:
+        if char in value_str:
+            return True
+    return False
+
+def _cwd_to_str(mrctx, kwargs):
+    cwd = kwargs.get("working_directory")
+    if not cwd:
+        cwd = "<default: {}>".format(mrctx.path(""))
+    return cwd
+
+def _env_to_str(environment):
+    if not environment:
+        env_str = " <default environment>"
+    else:
+        env_str = "\n".join(["{}={}".format(k, repr(v)) for k, v in environment.items()])
+        env_str = "\n" + env_str
+    return env_str
+
+def _timeout_to_str(kwargs):
+    return kwargs.get("timeout", "<default timeout>")
+
+def _outputs_to_str(result):
+    lines = []
+    items = [
+        ("stdout", result.stdout),
+        ("stderr", result.stderr),
+    ]
+    for name, content in items:
+        if content:
+            lines.append("===== {} start =====".format(name))
+
+            # Prevent adding an extra new line, which makes the output look odd.
+            if content.endswith("\n"):
+                lines.append(content[:-1])
+            else:
+                lines.append(content)
+            lines.append("===== {} end =====".format(name))
+        else:
+            lines.append("<{} empty>".format(name))
+    return "\n".join(lines)
+
+# This includes the vendored _translate_cpu and _translate_os from
+# @platforms//host:extension.bzl at version 0.0.9 so that we don't
+# force the users to depend on it.
+
+def _get_platforms_os_name(mrctx):
+    """Return the name in @platforms//os for the host os.
+
+    Args:
+        mrctx: module_ctx or repository_ctx.
+
+    Returns:
+        `str`. The target name.
+    """
+    os = mrctx.os.name.lower()
+
+    if os.startswith("mac os"):
+        return "osx"
+    if os.startswith("freebsd"):
+        return "freebsd"
+    if os.startswith("openbsd"):
+        return "openbsd"
+    if os.startswith("linux"):
+        return "linux"
+    if os.startswith("windows"):
+        return "windows"
+    return os
+
+def _get_platforms_cpu_name(mrctx):
+    """Return the name in @platforms//cpu for the host arch.
+
+    Args:
+        mrctx: module_ctx or repository_ctx.
+
+    Returns:
+        `str`. The target name.
+    """
+    arch = mrctx.os.arch.lower()
+    if arch in ["i386", "i486", "i586", "i686", "i786", "x86"]:
+        return "x86_32"
+    if arch in ["amd64", "x86_64", "x64"]:
+        return "x86_64"
+    if arch in ["ppc", "ppc64", "ppc64le"]:
+        return "ppc"
+    if arch in ["arm", "armv7l"]:
+        return "arm"
+    if arch in ["aarch64"]:
+        return "aarch64"
+    if arch in ["s390x", "s390"]:
+        return "s390x"
+    if arch in ["mips64el", "mips64"]:
+        return "mips64"
+    if arch in ["riscv64"]:
+        return "riscv64"
+    return arch
+
+# TODO: Remove after Bazel 6 support dropped
+def _watch(mrctx, *args, **kwargs):
+    """Calls mrctx.watch, if available."""
+    if hasattr(mrctx, "watch"):
+        mrctx.watch(*args, **kwargs)
+
+# TODO: Remove after Bazel 6 support dropped
+def _watch_tree(mrctx, *args, **kwargs):
+    """Calls mrctx.watch_tree, if available."""
+    if hasattr(mrctx, "watch_tree"):
+        mrctx.watch_tree(*args, **kwargs)
+
+repo_utils = struct(
+    # keep sorted
+    execute_checked = _execute_checked,
+    execute_checked_stdout = _execute_checked_stdout,
+    execute_unchecked = _execute_unchecked,
+    get_platforms_cpu_name = _get_platforms_cpu_name,
+    get_platforms_os_name = _get_platforms_os_name,
+    getenv = _getenv,
+    is_repo_debug_enabled = _is_repo_debug_enabled,
+    logger = _logger,
+    watch = _watch,
+    watch_tree = _watch_tree,
+    which_checked = _which_checked,
+    which_unchecked = _which_unchecked,
+)
diff --git a/python/private/runtime_env_toolchain.bzl b/python/private/runtime_env_toolchain.bzl
new file mode 100644
index 0000000..1601926
--- /dev/null
+++ b/python/private/runtime_env_toolchain.bzl
@@ -0,0 +1,112 @@
+# Copyright 2019 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Definitions related to the Python toolchain."""
+
+load("@rules_cc//cc:defs.bzl", "cc_library")
+load("//python:py_runtime.bzl", "py_runtime")
+load("//python:py_runtime_pair.bzl", "py_runtime_pair")
+load("//python/cc:py_cc_toolchain.bzl", "py_cc_toolchain")
+load(":py_exec_tools_toolchain.bzl", "py_exec_tools_toolchain")
+load(":toolchain_types.bzl", "EXEC_TOOLS_TOOLCHAIN_TYPE", "PY_CC_TOOLCHAIN_TYPE", "TARGET_TOOLCHAIN_TYPE")
+
+_IS_EXEC_TOOLCHAIN_ENABLED = Label("//python/config_settings:is_exec_tools_toolchain_enabled")
+
+def define_runtime_env_toolchain(name):
+    """Defines the runtime_env Python toolchain.
+
+    This is a minimal suite of toolchains that provided limited functionality.
+    They're mostly only useful to aid migration off the builtin
+    `@bazel_tools//tools/python:autodetecting_toolchain` toolchain.
+
+    NOTE: This was previously called the "autodetecting" toolchain, but was
+    renamed to better reflect its behavior, since it doesn't autodetect
+    anything.
+
+    Args:
+        name: The name of the toolchain to introduce.
+    """
+    base_name = name.replace("_toolchain", "")
+
+    py_runtime(
+        name = "_runtime_env_py3_runtime",
+        interpreter = "//python/private:runtime_env_toolchain_interpreter.sh",
+        python_version = "PY3",
+        stub_shebang = "#!/usr/bin/env python3",
+        visibility = ["//visibility:private"],
+        tags = ["manual"],
+    )
+
+    # This is a dummy runtime whose interpreter_path triggers the native rule
+    # logic to use the legacy behavior on Windows.
+    # TODO(#7844): Remove this target.
+    py_runtime(
+        name = "_magic_sentinel_runtime",
+        interpreter_path = "/_magic_pyruntime_sentinel_do_not_use",
+        python_version = "PY3",
+        visibility = ["//visibility:private"],
+        tags = ["manual"],
+    )
+
+    py_runtime_pair(
+        name = "_runtime_env_py_runtime_pair",
+        py3_runtime = select({
+            # If we're on windows, inject the sentinel to tell native rule logic
+            # that we attempted to use the runtime_env toolchain and need to
+            # switch back to legacy behavior.
+            # TODO(#7844): Remove this hack.
+            "@platforms//os:windows": ":_magic_sentinel_runtime",
+            "//conditions:default": ":_runtime_env_py3_runtime",
+        }),
+        visibility = ["//visibility:public"],
+        tags = ["manual"],
+    )
+
+    native.toolchain(
+        name = name,
+        toolchain = ":_runtime_env_py_runtime_pair",
+        toolchain_type = TARGET_TOOLCHAIN_TYPE,
+        visibility = ["//visibility:public"],
+    )
+
+    py_exec_tools_toolchain(
+        name = "_runtime_env_py_exec_tools_toolchain_impl",
+        precompiler = Label("//tools/precompiler:precompiler"),
+        visibility = ["//visibility:private"],
+        tags = ["manual"],
+    )
+    native.toolchain(
+        name = base_name + "_py_exec_tools_toolchain",
+        toolchain = "_runtime_env_py_exec_tools_toolchain_impl",
+        toolchain_type = EXEC_TOOLS_TOOLCHAIN_TYPE,
+        target_settings = [_IS_EXEC_TOOLCHAIN_ENABLED],
+        visibility = ["//visibility:public"],
+    )
+    cc_library(
+        name = "_empty_cc_lib",
+        visibility = ["//visibility:private"],
+        tags = ["manual"],
+    )
+    py_cc_toolchain(
+        name = "_runtime_env_py_cc_toolchain_impl",
+        headers = ":_empty_cc_lib",
+        libs = ":_empty_cc_lib",
+        python_version = "0.0",
+        tags = ["manual"],
+    )
+    native.toolchain(
+        name = base_name + "_py_cc_toolchain",
+        toolchain = ":_runtime_env_py_cc_toolchain_impl",
+        toolchain_type = PY_CC_TOOLCHAIN_TYPE,
+        visibility = ["//visibility:public"],
+    )
diff --git a/python/private/autodetecting_toolchain_interpreter.sh b/python/private/runtime_env_toolchain_interpreter.sh
old mode 100644
new mode 100755
similarity index 93%
rename from python/private/autodetecting_toolchain_interpreter.sh
rename to python/private/runtime_env_toolchain_interpreter.sh
index 5c8a10d..2cb7cc7
--- a/python/private/autodetecting_toolchain_interpreter.sh
+++ b/python/private/runtime_env_toolchain_interpreter.sh
@@ -8,8 +8,8 @@
 # We do lose the ability to set -o pipefail.
 
 FAILURE_HEADER="\
-Error occurred while attempting to use the default Python toolchain \
-(@rules_python//python:autodetecting_toolchain)."
+Error occurred while attempting to use the deprecated Python toolchain \
+(@rules_python//python/runtime_env_toolchain:all)."
 
 die() {
   echo "$FAILURE_HEADER" 1>&2
diff --git a/python/private/stage1_bootstrap_template.sh b/python/private/stage1_bootstrap_template.sh
new file mode 100644
index 0000000..959e7ba
--- /dev/null
+++ b/python/private/stage1_bootstrap_template.sh
@@ -0,0 +1,148 @@
+#!/bin/bash
+
+set -e
+
+if [[ -n "${RULES_PYTHON_BOOTSTRAP_VERBOSE:-}" ]]; then
+  set -x
+fi
+
+# runfiles-relative path
+STAGE2_BOOTSTRAP="%stage2_bootstrap%"
+
+# runfiles-relative path, absolute path, or single word
+PYTHON_BINARY='%python_binary%'
+
+# 0 or 1
+IS_ZIPFILE="%is_zipfile%"
+
+if [[ "$IS_ZIPFILE" == "1" ]]; then
+  # NOTE: Macs have an old version of mktemp, so we must use only the
+  # minimal functionality of it.
+  zip_dir=$(mktemp -d)
+
+  if [[ -n "$zip_dir" && -z "${RULES_PYTHON_BOOTSTRAP_VERBOSE:-}" ]]; then
+    trap 'rm -fr "$zip_dir"' EXIT
+  fi
+  # unzip emits a warning and exits with code 1 when there is extraneous data,
+  # like this bootstrap prelude code, but otherwise successfully extracts, so
+  # we have to ignore its exit code and suppress stderr.
+  # The alternative requires having to copy ourselves elsewhere with the prelude
+  # stripped (because zip can't extract from a stream). We avoid that because
+  # it's wasteful.
+  ( unzip -q -d "$zip_dir" "$0" 2>/dev/null || true )
+
+  RUNFILES_DIR="$zip_dir/runfiles"
+  if [[ ! -d "$RUNFILES_DIR" ]]; then
+    echo "Runfiles dir not found: zip extraction likely failed"
+    echo "Run with RULES_PYTHON_BOOTSTRAP_VERBOSE=1 to aid debugging"
+    exit 1
+  fi
+
+else
+  function find_runfiles_root() {
+    if [[ -n "${RUNFILES_DIR:-}" ]]; then
+      echo "$RUNFILES_DIR"
+      return 0
+    elif [[ "${RUNFILES_MANIFEST_FILE:-}" = *".runfiles_manifest" ]]; then
+      echo "${RUNFILES_MANIFEST_FILE%%.runfiles_manifest}"
+      return 0
+    elif [[ "${RUNFILES_MANIFEST_FILE:-}" = *".runfiles/MANIFEST" ]]; then
+      echo "${RUNFILES_MANIFEST_FILE%%.runfiles/MANIFEST}"
+      return 0
+    fi
+
+    stub_filename="$1"
+    # A relative path to our executable, as happens with
+    # a build action or bazel-bin/ invocation
+    if [[ "$stub_filename" != /* ]]; then
+      stub_filename="$PWD/$stub_filename"
+    fi
+
+    while true; do
+      module_space="${stub_filename}.runfiles"
+      if [[ -d "$module_space" ]]; then
+        echo "$module_space"
+        return 0
+      fi
+      if [[ "$stub_filename" == *.runfiles/* ]]; then
+        echo "${stub_filename%.runfiles*}.runfiles"
+        return 0
+      fi
+      if [[ ! -L "$stub_filename" ]]; then
+        break
+      fi
+      target=$(realpath $maybe_runfiles_root)
+      stub_filename="$target"
+    done
+    echo >&2 "Unable to find runfiles directory for $1"
+    exit 1
+  }
+  RUNFILES_DIR=$(find_runfiles_root $0)
+fi
+
+
+function find_python_interpreter() {
+  runfiles_root="$1"
+  interpreter_path="$2"
+  if [[ "$interpreter_path" == /* ]]; then
+    # An absolute path, i.e. platform runtime
+    echo "$interpreter_path"
+  elif [[ "$interpreter_path" == */* ]]; then
+    # A runfiles-relative path
+    echo "$runfiles_root/$interpreter_path"
+  else
+    # A plain word, e.g. "python3". Rely on searching PATH
+    echo "$interpreter_path"
+  fi
+}
+
+python_exe=$(find_python_interpreter $RUNFILES_DIR $PYTHON_BINARY)
+stage2_bootstrap="$RUNFILES_DIR/$STAGE2_BOOTSTRAP"
+
+declare -a interpreter_env
+declare -a interpreter_args
+
+# Don't prepend a potentially unsafe path to sys.path
+# See: https://docs.python.org/3.11/using/cmdline.html#envvar-PYTHONSAFEPATH
+# NOTE: Only works for 3.11+
+# We inherit the value from the outer environment in case the user wants to
+# opt-out of using PYTHONSAFEPATH. To opt-out, they have to set
+# `PYTHONSAFEPATH=` (empty string). This is because Python treats the empty
+# value as false, and any non-empty value as true.
+# ${FOO+WORD} expands to empty if $FOO is undefined, and WORD otherwise.
+if [[ -z "${PYTHONSAFEPATH+x}" ]]; then
+  # ${FOO-WORD} expands to WORD if $FOO is undefined, and $FOO otherwise
+  interpreter_env+=("PYTHONSAFEPATH=${PYTHONSAFEPATH-1}")
+fi
+
+if [[ "$IS_ZIPFILE" == "1" ]]; then
+  interpreter_args+=("-XRULES_PYTHON_ZIP_DIR=$zip_dir")
+fi
+
+
+export RUNFILES_DIR
+
+command=(
+  env
+  "${interpreter_env[@]}"
+  "$python_exe"
+  "${interpreter_args[@]}"
+  "$stage2_bootstrap"
+  "$@"
+)
+
+# We use `exec` instead of a child process so that signals sent directly (e.g.
+# using `kill`) to this process (the PID seen by the calling process) are
+# received by the Python process. Otherwise, this process receives the signal
+# and would have to manually propagate it.
+# See https://github.com/bazelbuild/rules_python/issues/2043#issuecomment-2215469971
+# for more information.
+#
+# However, when running a zip file, we need to clean up the workspace after the
+# process finishes so control must return here.
+if [[ "$IS_ZIPFILE" == "1" ]]; then
+  "${command[@]}"
+  exit $?
+else
+  exec "${command[@]}"
+fi
diff --git a/python/private/stage2_bootstrap_template.py b/python/private/stage2_bootstrap_template.py
new file mode 100644
index 0000000..29f59d2
--- /dev/null
+++ b/python/private/stage2_bootstrap_template.py
@@ -0,0 +1,530 @@
+# This is a "stage 2" bootstrap. We can assume we've running under the desired
+# interpreter, with some of the basic interpreter options/envvars set.
+# However, more setup is required to make the app's real main file runnable.
+
+import sys
+
+# The Python interpreter unconditionally prepends the directory containing this
+# script (following symlinks) to the import path. This is the cause of #9239,
+# and is a special case of #7091. We therefore explicitly delete that entry.
+# TODO(#7091): Remove this hack when no longer necessary.
+# TODO: Use sys.flags.safe_path to determine whether this removal should be
+# performed
+del sys.path[0]
+
+import contextlib
+import os
+import re
+import runpy
+import subprocess
+import uuid
+
+# ===== Template substitutions start =====
+# We just put them in one place so its easy to tell which are used.
+
+# Runfiles-relative path to the main Python source file.
+MAIN = "%main%"
+# Colon-delimited string of runfiles-relative import paths to add
+IMPORTS_STR = "%imports%"
+WORKSPACE_NAME = "%workspace_name%"
+# Though the import all value is the correct literal, we quote it
+# so this file is parsable by tools.
+IMPORT_ALL = True if "%import_all%" == "True" else False
+# Runfiles-relative path to the coverage tool entry point, if any.
+COVERAGE_TOOL = "%coverage_tool%"
+
+# ===== Template substitutions end =====
+
+
+# Return True if running on Windows
+def is_windows():
+    return os.name == "nt"
+
+
+def get_windows_path_with_unc_prefix(path):
+    path = path.strip()
+
+    # No need to add prefix for non-Windows platforms.
+    if not is_windows() or sys.version_info[0] < 3:
+        return path
+
+    # Starting in Windows 10, version 1607(OS build 14393), MAX_PATH limitations have been
+    # removed from common Win32 file and directory functions.
+    # Related doc: https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=cmd#enable-long-paths-in-windows-10-version-1607-and-later
+    import platform
+
+    if platform.win32_ver()[1] >= "10.0.14393":
+        return path
+
+    # import sysconfig only now to maintain python 2.6 compatibility
+    import sysconfig
+
+    if sysconfig.get_platform() == "mingw":
+        return path
+
+    # Lets start the unicode fun
+    if path.startswith(unicode_prefix):
+        return path
+
+    # os.path.abspath returns a normalized absolute path
+    return unicode_prefix + os.path.abspath(path)
+
+
+def search_path(name):
+    """Finds a file in a given search path."""
+    search_path = os.getenv("PATH", os.defpath).split(os.pathsep)
+    for directory in search_path:
+        if directory:
+            path = os.path.join(directory, name)
+            if os.path.isfile(path) and os.access(path, os.X_OK):
+                return path
+    return None
+
+
+def is_verbose():
+    return bool(os.environ.get("RULES_PYTHON_BOOTSTRAP_VERBOSE"))
+
+
+def print_verbose(*args, mapping=None, values=None):
+    if is_verbose():
+        if mapping is not None:
+            for key, value in sorted((mapping or {}).items()):
+                print(
+                    "bootstrap: stage 2:",
+                    *args,
+                    f"{key}={value!r}",
+                    file=sys.stderr,
+                    flush=True,
+                )
+        elif values is not None:
+            for i, v in enumerate(values):
+                print(
+                    "bootstrap: stage 2:",
+                    *args,
+                    f"[{i}] {v!r}",
+                    file=sys.stderr,
+                    flush=True,
+                )
+        else:
+            print("bootstrap: stage 2:", *args, file=sys.stderr, flush=True)
+
+
+def print_verbose_coverage(*args):
+    """Print output if VERBOSE_COVERAGE is non-empty in the environment."""
+    if os.environ.get("VERBOSE_COVERAGE"):
+        print(*args, file=sys.stderr, flush=True)
+
+
+def is_verbose_coverage():
+    """Returns True if VERBOSE_COVERAGE is non-empty in the environment."""
+    return os.environ.get("VERBOSE_COVERAGE") or is_verbose()
+
+
+def find_coverage_entry_point(module_space):
+    cov_tool = COVERAGE_TOOL
+    if cov_tool:
+        print_verbose_coverage("Using toolchain coverage_tool %r" % cov_tool)
+    else:
+        cov_tool = os.environ.get("PYTHON_COVERAGE")
+        if cov_tool:
+            print_verbose_coverage("PYTHON_COVERAGE: %r" % cov_tool)
+    if cov_tool:
+        return find_binary(module_space, cov_tool)
+    return None
+
+
+def find_binary(module_space, bin_name):
+    """Finds the real binary if it's not a normal absolute path."""
+    if not bin_name:
+        return None
+    if bin_name.startswith("//"):
+        # Case 1: Path is a label. Not supported yet.
+        raise AssertionError(
+            "Bazel does not support execution of Python interpreters via labels yet"
+        )
+    elif os.path.isabs(bin_name):
+        # Case 2: Absolute path.
+        return bin_name
+    # Use normpath() to convert slashes to os.sep on Windows.
+    elif os.sep in os.path.normpath(bin_name):
+        # Case 3: Path is relative to the repo root.
+        return os.path.join(module_space, bin_name)
+    else:
+        # Case 4: Path has to be looked up in the search path.
+        return search_path(bin_name)
+
+
+def create_python_path_entries(python_imports, module_space):
+    parts = python_imports.split(":")
+    return [module_space] + ["%s/%s" % (module_space, path) for path in parts]
+
+
+def find_runfiles_root(main_rel_path):
+    """Finds the runfiles tree."""
+    # When the calling process used the runfiles manifest to resolve the
+    # location of this stub script, the path may be expanded. This means
+    # argv[0] may no longer point to a location inside the runfiles
+    # directory. We should therefore respect RUNFILES_DIR and
+    # RUNFILES_MANIFEST_FILE set by the caller.
+    runfiles_dir = os.environ.get("RUNFILES_DIR", None)
+    if not runfiles_dir:
+        runfiles_manifest_file = os.environ.get("RUNFILES_MANIFEST_FILE", "")
+        if runfiles_manifest_file.endswith(
+            ".runfiles_manifest"
+        ) or runfiles_manifest_file.endswith(".runfiles/MANIFEST"):
+            runfiles_dir = runfiles_manifest_file[:-9]
+    # Be defensive: the runfiles dir should contain our main entry point. If
+    # it doesn't, then it must not be our runfiles directory.
+    if runfiles_dir and os.path.exists(os.path.join(runfiles_dir, main_rel_path)):
+        return runfiles_dir
+
+    stub_filename = sys.argv[0]
+    if not os.path.isabs(stub_filename):
+        stub_filename = os.path.join(os.getcwd(), stub_filename)
+
+    while True:
+        module_space = stub_filename + (".exe" if is_windows() else "") + ".runfiles"
+        if os.path.isdir(module_space):
+            return module_space
+
+        runfiles_pattern = r"(.*\.runfiles)" + (r"\\" if is_windows() else "/") + ".*"
+        matchobj = re.match(runfiles_pattern, stub_filename)
+        if matchobj:
+            return matchobj.group(1)
+
+        if not os.path.islink(stub_filename):
+            break
+        target = os.readlink(stub_filename)
+        if os.path.isabs(target):
+            stub_filename = target
+        else:
+            stub_filename = os.path.join(os.path.dirname(stub_filename), target)
+
+    raise AssertionError("Cannot find .runfiles directory for %s" % sys.argv[0])
+
+
+# Returns repository roots to add to the import path.
+def get_repositories_imports(module_space, import_all):
+    if import_all:
+        repo_dirs = [os.path.join(module_space, d) for d in os.listdir(module_space)]
+        repo_dirs.sort()
+        return [d for d in repo_dirs if os.path.isdir(d)]
+    return [os.path.join(module_space, WORKSPACE_NAME)]
+
+
+def runfiles_envvar(module_space):
+    """Finds the runfiles manifest or the runfiles directory.
+
+    Returns:
+      A tuple of (var_name, var_value) where var_name is either 'RUNFILES_DIR' or
+      'RUNFILES_MANIFEST_FILE' and var_value is the path to that directory or
+      file, or (None, None) if runfiles couldn't be found.
+    """
+    # If this binary is the data-dependency of another one, the other sets
+    # RUNFILES_MANIFEST_FILE or RUNFILES_DIR for our sake.
+    runfiles = os.environ.get("RUNFILES_MANIFEST_FILE", None)
+    if runfiles:
+        return ("RUNFILES_MANIFEST_FILE", runfiles)
+
+    runfiles = os.environ.get("RUNFILES_DIR", None)
+    if runfiles:
+        return ("RUNFILES_DIR", runfiles)
+
+    # Look for the runfiles "output" manifest, argv[0] + ".runfiles_manifest"
+    runfiles = module_space + "_manifest"
+    if os.path.exists(runfiles):
+        return ("RUNFILES_MANIFEST_FILE", runfiles)
+
+    # Look for the runfiles "input" manifest, argv[0] + ".runfiles/MANIFEST"
+    # Normally .runfiles_manifest and MANIFEST are both present, but the
+    # former will be missing for zip-based builds or if someone copies the
+    # runfiles tree elsewhere.
+    runfiles = os.path.join(module_space, "MANIFEST")
+    if os.path.exists(runfiles):
+        return ("RUNFILES_MANIFEST_FILE", runfiles)
+
+    # If running in a sandbox and no environment variables are set, then
+    # Look for the runfiles  next to the binary.
+    if module_space.endswith(".runfiles") and os.path.isdir(module_space):
+        return ("RUNFILES_DIR", module_space)
+
+    return (None, None)
+
+
+def deduplicate(items):
+    """Efficiently filter out duplicates, keeping the first element only."""
+    seen = set()
+    for it in items:
+        if it not in seen:
+            seen.add(it)
+            yield it
+
+
+def instrumented_file_paths():
+    """Yields tuples of realpath of each instrumented file with the relative path."""
+    manifest_filename = os.environ.get("COVERAGE_MANIFEST")
+    if not manifest_filename:
+        return
+    with open(manifest_filename, "r") as manifest:
+        for line in manifest:
+            filename = line.strip()
+            if not filename:
+                continue
+            try:
+                realpath = os.path.realpath(filename)
+            except OSError:
+                print(
+                    "Could not find instrumented file {}".format(filename),
+                    file=sys.stderr,
+                    flush=True,
+                )
+                continue
+            if realpath != filename:
+                print_verbose_coverage("Fixing up {} -> {}".format(realpath, filename))
+                yield (realpath, filename)
+
+
+def unresolve_symlinks(output_filename):
+    # type: (str) -> None
+    """Replace realpath of instrumented files with the relative path in the lcov output.
+
+    Though we are asking coveragepy to use relative file names, currently
+    ignore that for purposes of generating the lcov report (and other reports
+    which are not the XML report), so we need to go and fix up the report.
+
+    This function is a workaround for that issue. Once that issue is fixed
+    upstream and the updated version is widely in use, this should be removed.
+
+    See https://github.com/nedbat/coveragepy/issues/963.
+    """
+    substitutions = list(instrumented_file_paths())
+    if substitutions:
+        unfixed_file = output_filename + ".tmp"
+        os.rename(output_filename, unfixed_file)
+        with open(unfixed_file, "r") as unfixed:
+            with open(output_filename, "w") as output_file:
+                for line in unfixed:
+                    if line.startswith("SF:"):
+                        for realpath, filename in substitutions:
+                            line = line.replace(realpath, filename)
+                    output_file.write(line)
+        os.unlink(unfixed_file)
+
+
+def _run_py(main_filename, *, args, cwd=None):
+    # type: (str, str, list[str], dict[str, str]) -> ...
+    """Executes the given Python file using the various environment settings."""
+
+    orig_argv = sys.argv
+    orig_cwd = os.getcwd()
+    try:
+        sys.argv = [main_filename] + args
+        if cwd:
+            os.chdir(cwd)
+        print_verbose("run_py: cwd:", os.getcwd())
+        print_verbose("run_py: sys.argv: ", values=sys.argv)
+        print_verbose("run_py: os.environ:", mapping=os.environ)
+        print_verbose("run_py: sys.path:", values=sys.path)
+        runpy.run_path(main_filename, run_name="__main__")
+    finally:
+        os.chdir(orig_cwd)
+        sys.argv = orig_argv
+
+
[email protected]
+def _maybe_collect_coverage(enable):
+    if not enable:
+        yield
+        return
+
+    import uuid
+
+    import coverage
+
+    coverage_dir = os.environ["COVERAGE_DIR"]
+    unique_id = uuid.uuid4()
+
+    # We need for coveragepy to use relative paths.  This can only be configured
+    rcfile_name = os.path.join(coverage_dir, ".coveragerc_{}".format(unique_id))
+    with open(rcfile_name, "w") as rcfile:
+        rcfile.write(
+            """[run]
+relative_files = True
+"""
+        )
+    try:
+        cov = coverage.Coverage(
+            config_file=rcfile_name,
+            branch=True,
+            # NOTE: The messages arg controls what coverage prints to stdout/stderr,
+            # which can interfere with the Bazel coverage command. Enabling message
+            # output is only useful for debugging coverage support.
+            messages=is_verbose_coverage(),
+            omit=[
+                # Pipes can't be read back later, which can cause coverage to
+                # throw an error when trying to get its source code.
+                "/dev/fd/*",
+            ],
+        )
+        cov.start()
+        try:
+            yield
+        finally:
+            cov.stop()
+            lcov_path = os.path.join(coverage_dir, "pylcov.dat")
+            cov.lcov_report(
+                outfile=lcov_path,
+                # Ignore errors because sometimes instrumented files aren't
+                # readable afterwards. e.g. if they come from /dev/fd or if
+                # they were transient code-under-test in /tmp
+                ignore_errors=True,
+            )
+            if os.path.isfile(lcov_path):
+                unresolve_symlinks(lcov_path)
+    finally:
+        try:
+            os.unlink(rcfile_name)
+        except OSError as err:
+            # It's possible that the profiled program might execute another Python
+            # binary through a wrapper that would then delete the rcfile.  Not much
+            # we can do about that, besides ignore the failure here.
+            print_verbose_coverage("Error removing temporary coverage rc file:", err)
+
+
+def main():
+    print_verbose("initial argv:", values=sys.argv)
+    print_verbose("initial cwd:", os.getcwd())
+    print_verbose("initial environ:", mapping=os.environ)
+    print_verbose("initial sys.path:", values=sys.path)
+
+    main_rel_path = MAIN
+    if is_windows():
+        main_rel_path = main_rel_path.replace("/", os.sep)
+
+    module_space = find_runfiles_root(main_rel_path)
+    print_verbose("runfiles root:", module_space)
+
+    # Recreate the "add main's dir to sys.path[0]" behavior to match the
+    # system-python bootstrap / typical Python behavior.
+    #
+    # Without safe path enabled, when `python foo/bar.py` is run, python will
+    # resolve the foo/bar.py symlink to its real path, then add the directory
+    # of that path to sys.path. But, the resolved directory for the symlink
+    # depends on if the file is generated or not.
+    #
+    # When foo/bar.py is a source file, then it's a symlink pointing
+    # back to the client source directory. This means anything from that source
+    # directory becomes importable, i.e. most code is importable.
+    #
+    # When foo/bar.py is a generated file, then it's a symlink pointing to
+    # somewhere under bazel-out/.../bin, i.e. where generated files are. This
+    # means only other generated files are importable (not source files).
+    #
+    # To replicate this behavior, we add main's directory within the runfiles
+    # when safe path isn't enabled.
+    if not getattr(sys.flags, "safe_path", False):
+        prepend_path_entries = [
+            os.path.join(module_space, os.path.dirname(main_rel_path))
+        ]
+    else:
+        prepend_path_entries = []
+    python_path_entries = create_python_path_entries(IMPORTS_STR, module_space)
+    python_path_entries += get_repositories_imports(module_space, IMPORT_ALL)
+    python_path_entries = [
+        get_windows_path_with_unc_prefix(d) for d in python_path_entries
+    ]
+
+    # Remove duplicates to avoid overly long PYTHONPATH (#10977). Preserve order,
+    # keep first occurrence only.
+    python_path_entries = deduplicate(python_path_entries)
+
+    if is_windows():
+        python_path_entries = [p.replace("/", os.sep) for p in python_path_entries]
+    else:
+        # deduplicate returns a generator, but we need a list after this.
+        python_path_entries = list(python_path_entries)
+
+    # We're emulating PYTHONPATH being set, so we insert at the start
+    # This isn't a great idea (it can shadow the stdlib), but is the historical
+    # behavior.
+    runfiles_envkey, runfiles_envvalue = runfiles_envvar(module_space)
+    if runfiles_envkey:
+        os.environ[runfiles_envkey] = runfiles_envvalue
+
+    main_filename = os.path.join(module_space, main_rel_path)
+    main_filename = get_windows_path_with_unc_prefix(main_filename)
+    assert os.path.exists(main_filename), (
+        "Cannot exec() %r: file not found." % main_filename
+    )
+    assert os.access(main_filename, os.R_OK), (
+        "Cannot exec() %r: file not readable." % main_filename
+    )
+
+    # COVERAGE_DIR is set if coverage is enabled and instrumentation is configured
+    # for something, though it could be another program executing this one or
+    # one executed by this one (e.g. an extension module).
+    if os.environ.get("COVERAGE_DIR"):
+        cov_tool = find_coverage_entry_point(module_space)
+        if cov_tool is None:
+            print_verbose_coverage(
+                "Coverage was enabled, but python coverage tool was not configured."
+                + "To enable coverage, consult the docs at "
+                + "https://rules-python.readthedocs.io/en/latest/coverage.html"
+            )
+        else:
+            # Inhibit infinite recursion:
+            if "PYTHON_COVERAGE" in os.environ:
+                del os.environ["PYTHON_COVERAGE"]
+
+            if not os.path.exists(cov_tool):
+                raise EnvironmentError(
+                    "Python coverage tool %r not found. "
+                    "Try running with VERBOSE_COVERAGE=1 to collect more information."
+                    % cov_tool
+                )
+
+            # coverage library expects sys.path[0] to contain the library, and replaces
+            # it with the directory of the program it starts. Our actual sys.path[0] is
+            # the runfiles directory, which must not be replaced.
+            # CoverageScript.do_execute() undoes this sys.path[0] setting.
+            #
+            # Update sys.path such that python finds the coverage package. The coverage
+            # entry point is coverage.coverage_main, so we need to do twice the dirname.
+            coverage_dir = os.path.dirname(os.path.dirname(cov_tool))
+            print_verbose("coverage: adding to sys.path:", coverage_dir)
+            python_path_entries.append(coverage_dir)
+            python_path_entries = deduplicate(python_path_entries)
+    else:
+        cov_tool = None
+
+    sys.stdout.flush()
+
+    # Add the user imports after the stdlib, but before the runtime's
+    # site-packages directory. This gives the stdlib precedence, while allowing
+    # users to override non-stdlib packages that may have been bundled with
+    # the runtime (usually pip).
+    # NOTE: There isn't a good way to identify the stdlib paths, so we just
+    # expect site-packages comes after it, per
+    # https://docs.python.org/3/library/sys_path_init.html#sys-path-init
+    for i, path in enumerate(sys.path):
+        # dist-packages is a debian convention, see
+        # https://wiki.debian.org/Python#Deviations_from_upstream
+        if os.path.basename(path) in ("site-packages", "dist-packages"):
+            sys.path[i:i] = python_path_entries
+            break
+    else:
+        # Otherwise, no site-packages directory was found, which is odd but ok.
+        sys.path.extend(python_path_entries)
+
+    # NOTE: The sys.path must be modified before coverage is imported/activated
+    # NOTE: Perform this after the user imports are appended. This avoids a
+    # user import accidentally triggering the site-packages logic above.
+    sys.path[0:0] = prepend_path_entries
+
+    with _maybe_collect_coverage(enable=cov_tool is not None):
+        # The first arg is this bootstrap, so drop that for the re-invocation.
+        _run_py(main_filename, args=sys.argv[1:])
+        sys.exit(0)
+
+
+main()
diff --git a/python/private/text_util.bzl b/python/private/text_util.bzl
index 78f62be..38f2b0e 100644
--- a/python/private/text_util.bzl
+++ b/python/private/text_util.bzl
@@ -20,6 +20,15 @@
 
     return "\n".join([indent + line for line in text.splitlines()])
 
+def _hanging_indent(text, indent = " " * 4):
+    if "\n" not in text:
+        return text
+
+    lines = text.splitlines()
+    for i, line in enumerate(lines):
+        lines[i] = (indent if i != 0 else "") + line
+    return "\n".join(lines)
+
 def _render_alias(name, actual, *, visibility = None):
     args = [
         "name = \"{}\",".format(name),
@@ -35,18 +44,21 @@
         ")",
     ])
 
-def _render_dict(d, *, value_repr = repr):
+def _render_dict(d, *, key_repr = repr, value_repr = repr):
+    if not d:
+        return "{}"
+
     return "\n".join([
         "{",
         _indent("\n".join([
-            "{}: {},".format(repr(k), value_repr(v))
+            "{}: {},".format(key_repr(k), value_repr(v))
             for k, v in d.items()
         ])),
         "}",
     ])
 
-def _render_select(selects, *, no_match_error = None, value_repr = repr):
-    dict_str = _render_dict(selects, value_repr = value_repr) + ","
+def _render_select(selects, *, no_match_error = None, key_repr = repr, value_repr = repr, name = "select"):
+    dict_str = _render_dict(selects, key_repr = key_repr, value_repr = value_repr) + ","
 
     if no_match_error:
         args = "\n".join([
@@ -62,16 +74,26 @@
             "",
         ])
 
-    return "select({})".format(args)
+    return "{}({})".format(name, args)
 
-def _render_list(items):
+def _render_list(items, *, hanging_indent = ""):
+    """Convert a list to formatted text.
+
+    Args:
+        items: list of items.
+        hanging_indent: str, indent to apply to second and following lines of
+            the formatted text.
+
+    Returns:
+        The list pretty formatted as a string.
+    """
     if not items:
         return "[]"
 
     if len(items) == 1:
         return "[{}]".format(repr(items[0]))
 
-    return "\n".join([
+    text = "\n".join([
         "[",
         _indent("\n".join([
             "{},".format(repr(item))
@@ -79,11 +101,52 @@
         ])),
         "]",
     ])
+    if hanging_indent:
+        text = _hanging_indent(text, hanging_indent)
+    return text
+
+def _render_str(value):
+    return repr(value)
+
+def _render_tuple(items, *, value_repr = repr):
+    if not items:
+        return "tuple()"
+
+    if len(items) == 1:
+        return "({},)".format(value_repr(items[0]))
+
+    return "\n".join([
+        "(",
+        _indent("\n".join([
+            "{},".format(value_repr(item))
+            for item in items
+        ])),
+        ")",
+    ])
+
+def _toolchain_prefix(index, name, pad_length):
+    """Prefixes the given name with the index, padded with zeros to ensure lexicographic sorting.
+
+    Examples:
+      toolchain_prefix(   2, "foo", 4) == "_0002_foo_"
+      toolchain_prefix(2000, "foo", 4) == "_2000_foo_"
+    """
+    return "_{}_{}_".format(_left_pad_zero(index, pad_length), name)
+
+def _left_pad_zero(index, length):
+    if index < 0:
+        fail("index must be non-negative")
+    return ("0" * length + str(index))[-length:]
 
 render = struct(
     alias = _render_alias,
     dict = _render_dict,
+    hanging_indent = _hanging_indent,
     indent = _indent,
+    left_pad_zero = _left_pad_zero,
     list = _render_list,
     select = _render_select,
+    str = _render_str,
+    toolchain_prefix = _toolchain_prefix,
+    tuple = _render_tuple,
 )
diff --git a/python/private/toolchain_types.bzl b/python/private/toolchain_types.bzl
new file mode 100644
index 0000000..ef81bf3
--- /dev/null
+++ b/python/private/toolchain_types.bzl
@@ -0,0 +1,23 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Labels to identify toolchain types.
+
+This is a separate file because things needing the toolchain types (in
+particular, toolchain() registrations) shouldn't need to load the entire
+implementation of the toolchain.
+"""
+
+TARGET_TOOLCHAIN_TYPE = Label("//python:toolchain_type")
+EXEC_TOOLS_TOOLCHAIN_TYPE = Label("//python:exec_tools_toolchain_type")
+PY_CC_TOOLCHAIN_TYPE = Label("//python/cc:toolchain_type")
diff --git a/python/private/toolchains_repo.bzl b/python/private/toolchains_repo.bzl
index 4b6bd11..df16fb8 100644
--- a/python/private/toolchains_repo.bzl
+++ b/python/private/toolchains_repo.bzl
@@ -30,7 +30,8 @@
     "PLATFORMS",
     "WINDOWS_NAME",
 )
-load(":which.bzl", "which_with_fail")
+load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "repo_utils")
+load("//python/private:text_util.bzl", "render")
 
 def get_repository_name(repository_workspace):
     dummy_label = "//:_"
@@ -40,8 +41,7 @@
         prefix,
         python_version,
         set_python_version_constraint,
-        user_repository_name,
-        rules_python):
+        user_repository_name):
     """Creates the content for toolchain definitions for a build file.
 
     Args:
@@ -51,58 +51,34 @@
             have the Python version constraint added as a requirement for
             matching the toolchain, "False" if not.
         user_repository_name: names for the user repos
-        rules_python: rules_python label
 
     Returns:
         build_content: Text containing toolchain definitions
     """
-    if set_python_version_constraint == "True":
-        constraint = "{rules_python}//python/config_settings:is_python_{python_version}".format(
-            rules_python = rules_python,
-            python_version = python_version,
-        )
-        target_settings = '["{}"]'.format(constraint)
-    elif set_python_version_constraint == "False":
-        target_settings = "[]"
-    else:
-        fail(("Invalid set_python_version_constraint value: got {} {}, wanted " +
-              "either the string 'True' or the string 'False'; " +
-              "(did you convert bool to string?)").format(
-            type(set_python_version_constraint),
-            repr(set_python_version_constraint),
-        ))
 
     # We create a list of toolchain content from iterating over
     # the enumeration of PLATFORMS.  We enumerate PLATFORMS in
     # order to get us an index to increment the increment.
-    return "".join([
-        """
-toolchain(
-    name = "{prefix}{platform}_toolchain",
+    return "\n\n".join([
+        """\
+py_toolchain_suite(
+    user_repository_name = "{user_repository_name}_{platform}",
+    prefix = "{prefix}{platform}",
     target_compatible_with = {compatible_with},
-    target_settings = {target_settings},
-    toolchain = "@{user_repository_name}_{platform}//:python_runtimes",
-    toolchain_type = "@bazel_tools//tools/python:toolchain_type",
-)
-
-toolchain(
-    name = "{prefix}{platform}_py_cc_toolchain",
-    target_compatible_with = {compatible_with},
-    target_settings = {target_settings},
-    toolchain = "@{user_repository_name}_{platform}//:py_cc_toolchain",
-    toolchain_type = "@rules_python//python/cc:toolchain_type",
-
-)
-""".format(
-            compatible_with = meta.compatible_with,
+    flag_values = {flag_values},
+    python_version = "{python_version}",
+    set_python_version_constraint = "{set_python_version_constraint}",
+)""".format(
+            compatible_with = render.indent(render.list(meta.compatible_with)).lstrip(),
+            flag_values = render.indent(render.dict(
+                meta.flag_values,
+                key_repr = lambda x: repr(str(x)),  # this is to correctly display labels
+            )).lstrip(),
             platform = platform,
-            # We have to use a String value here because bzlmod is passing in a
-            # string as we cannot have list of bools in build rule attribues.
-            # This if statement does not appear to work unless it is in the
-            # toolchain file.
-            target_settings = target_settings,
+            set_python_version_constraint = set_python_version_constraint,
             user_repository_name = user_repository_name,
             prefix = prefix,
+            python_version = python_version,
         )
         for platform, meta in PLATFORMS.items()
     ])
@@ -116,17 +92,17 @@
 # python_register_toolchains macro so you don't normally need to interact with
 # these targets.
 
-"""
+load("@{rules_python}//python/private:py_toolchain_suite.bzl", "py_toolchain_suite")
 
-    # Get the repository name
-    rules_python = get_repository_name(rctx.attr._rules_python_workspace)
+""".format(
+        rules_python = rctx.attr._rules_python_workspace.workspace_name,
+    )
 
     toolchains = python_toolchain_build_file_content(
         prefix = "",
         python_version = rctx.attr.python_version,
         set_python_version_constraint = str(rctx.attr.set_python_version_constraint),
         user_repository_name = rctx.attr.user_repository_name,
-        rules_python = rules_python,
     )
 
     rctx.file("BUILD.bazel", build_content + toolchains)
@@ -144,9 +120,10 @@
 )
 
 def _toolchain_aliases_impl(rctx):
-    (os_name, arch) = get_host_os_arch(rctx)
+    logger = repo_utils.logger(rctx)
+    (os_name, arch) = _get_host_os_arch(rctx, logger)
 
-    host_platform = get_host_platform(os_name, arch)
+    host_platform = _get_host_platform(os_name, arch)
 
     is_windows = (os_name == WINDOWS_NAME)
     python3_binary_path = "python.exe" if is_windows else "bin/python3"
@@ -240,8 +217,101 @@
 
 toolchain_aliases = repository_rule(
     _toolchain_aliases_impl,
-    doc = """Creates a repository with a shorter name meant for the host platform, which contains
-    a BUILD.bazel file declaring aliases to the host platform's targets.
+    doc = """\
+Creates a repository with a shorter name only referencing the python version,
+it contains a BUILD.bazel file declaring aliases to the host platform's targets
+and is a great fit for any usage related to setting up toolchains for build
+actions.""",
+    attrs = {
+        "platforms": attr.string_list(
+            doc = "List of platforms for which aliases shall be created",
+        ),
+        "python_version": attr.string(doc = "The Python version."),
+        "user_repository_name": attr.string(
+            mandatory = True,
+            doc = "The base name for all created repositories, like 'python38'.",
+        ),
+        "_rules_python_workspace": attr.label(default = Label("//:WORKSPACE")),
+    },
+    environ = [REPO_DEBUG_ENV_VAR],
+)
+
+def _host_toolchain_impl(rctx):
+    logger = repo_utils.logger(rctx)
+    rctx.file("BUILD.bazel", """\
+# Generated by python/private/toolchains_repo.bzl
+
+exports_files(["python"], visibility = ["//visibility:public"])
+""")
+
+    (os_name, arch) = _get_host_os_arch(rctx, logger)
+    host_platform = _get_host_platform(os_name, arch)
+    repo = "@@{py_repository}_{host_platform}".format(
+        py_repository = rctx.attr.name[:-len("_host")],
+        host_platform = host_platform,
+    )
+
+    rctx.report_progress("Symlinking interpreter files to the target platform")
+    host_python_repo = rctx.path(Label("{repo}//:BUILD.bazel".format(repo = repo)))
+
+    # The interpreter might not work on platfroms that don't have symlink support if
+    # we just symlink the interpreter itself. rctx.symlink does a copy in such cases
+    # so we can just attempt to symlink all of the directories in the host interpreter
+    # repo, which should be faster than re-downloading it.
+    for p in host_python_repo.dirname.readdir():
+        if p.basename in [
+            # ignore special files created by the repo rule automatically
+            "BUILD.bazel",
+            "MODULE.bazel",
+            "REPO.bazel",
+            "WORKSPACE",
+            "WORKSPACE.bazel",
+            "WORKSPACE.bzlmod",
+        ]:
+            continue
+
+        # symlink works on all platforms that bazel supports, so it should work on
+        # UNIX and Windows with and without symlink support. For better performance
+        # users should enable the symlink startup option, however that requires admin
+        # privileges.
+        rctx.symlink(p, p.basename)
+
+    is_windows = (os_name == WINDOWS_NAME)
+    python_binary = "python.exe" if is_windows else "python"
+
+    # Ensure that we can run the interpreter and check that we are not
+    # using the host interpreter.
+    python_tester_contents = """\
+from pathlib import Path
+import sys
+
+python = Path(sys.executable)
+want_python = str(Path("{python}").resolve())
+got_python = str(Path(sys.executable).resolve())
+
+assert want_python == got_python, \
+    "Expected to use a different interpreter:\\nwant: '{{}}'\\n got: '{{}}'".format(
+        want_python,
+        got_python,
+    )
+""".format(repo = repo.strip("@"), python = python_binary)
+    python_tester = rctx.path("python_tester.py")
+    rctx.file(python_tester, python_tester_contents)
+    repo_utils.execute_checked(
+        rctx,
+        op = "CheckHostInterpreter",
+        arguments = [rctx.path(python_binary), python_tester],
+    )
+    if not rctx.delete(python_tester):
+        fail("Failed to delete the python tester")
+
+host_toolchain = repository_rule(
+    _host_toolchain_impl,
+    doc = """\
+Creates a repository with a shorter name meant to be used in the repository_ctx,
+which needs to have `symlinks` for the interpreter. This is separate from the
+toolchain_aliases repo because referencing the `python` interpreter target from
+this repo causes an eager fetch of the toolchain for the host platform.
     """,
     attrs = {
         "platforms": attr.string_list(
@@ -252,6 +322,7 @@
             mandatory = True,
             doc = "The base name for all created repositories, like 'python38'.",
         ),
+        "_rule_name": attr.string(default = "host_toolchain"),
         "_rules_python_workspace": attr.label(default = Label("//:WORKSPACE")),
     },
 )
@@ -316,7 +387,7 @@
 def sanitize_platform_name(platform):
     return platform.replace("-", "_")
 
-def get_host_platform(os_name, arch):
+def _get_host_platform(os_name, arch):
     """Gets the host platform.
 
     Args:
@@ -333,11 +404,13 @@
         fail("No platform declared for host OS {} on arch {}".format(os_name, arch))
     return host_platform
 
-def get_host_os_arch(rctx):
+def _get_host_os_arch(rctx, logger):
     """Infer the host OS name and arch from a repository context.
 
     Args:
         rctx: Bazel's repository_ctx.
+        logger: Logger to use for operations.
+
     Returns:
         A tuple with the host OS name and arch.
     """
@@ -351,7 +424,12 @@
         os_name = WINDOWS_NAME
     else:
         # This is not ideal, but bazel doesn't directly expose arch.
-        arch = rctx.execute([which_with_fail("uname", rctx), "-m"]).stdout.strip()
+        arch = repo_utils.execute_unchecked(
+            rctx,
+            op = "GetUname",
+            arguments = [repo_utils.which_checked(rctx, "uname"), "-m"],
+            logger = logger,
+        ).stdout.strip()
 
         # Normalize the os_name.
         if "mac" in os_name.lower():
diff --git a/python/private/util.bzl b/python/private/util.bzl
index 71476f9..16b8ff8 100644
--- a/python/private/util.bzl
+++ b/python/private/util.bzl
@@ -89,3 +89,16 @@
 # Bazel 5.4 has a bug where every access of testing.ExecutionInfo is a
 # different object that isn't equal to any other. This is fixed in bazel 6+.
 IS_BAZEL_6_OR_HIGHER = testing.ExecutionInfo == testing.ExecutionInfo
+
+_marker_rule_to_detect_bazel_6_4_or_higher = rule(implementation = lambda ctx: None)
+
+# Bazel 6.4 and higher have a bug fix where rule names show up in the str()
+# of a rule. See
+# https://github.com/bazelbuild/bazel/commit/002490b9a2376f0b2ea4a37102c5e94fc50a65ba
+# https://github.com/bazelbuild/bazel/commit/443cbcb641e17f7337ccfdecdfa5e69bc16cae55
+# This technique is done instead of using native.bazel_version because,
+# under stardoc, the native.bazel_version attribute is entirely missing, which
+# prevents doc generation from being able to correctly generate docs.
+IS_BAZEL_6_4_OR_HIGHER = "_marker_rule_to_detect_bazel_6_4_or_higher" in str(
+    _marker_rule_to_detect_bazel_6_4_or_higher,
+)
diff --git a/python/private/whl_filegroup/BUILD.bazel b/python/private/whl_filegroup/BUILD.bazel
new file mode 100644
index 0000000..398b9af
--- /dev/null
+++ b/python/private/whl_filegroup/BUILD.bazel
@@ -0,0 +1,20 @@
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("//python:defs.bzl", "py_binary")
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["**"]),
+    visibility = ["//python/private:__pkg__"],
+)
+
+bzl_library(
+    name = "whl_filegroup_bzl",
+    srcs = ["whl_filegroup.bzl"],
+    visibility = ["//:__subpackages__"],
+)
+
+py_binary(
+    name = "extract_wheel_files",
+    srcs = ["extract_wheel_files.py"],
+    visibility = ["//visibility:public"],
+)
diff --git a/python/private/whl_filegroup/extract_wheel_files.py b/python/private/whl_filegroup/extract_wheel_files.py
new file mode 100644
index 0000000..e81e6a3
--- /dev/null
+++ b/python/private/whl_filegroup/extract_wheel_files.py
@@ -0,0 +1,62 @@
+"""Extract files from a wheel's RECORD."""
+
+import re
+import sys
+import zipfile
+from collections.abc import Iterable
+from pathlib import Path
+
+WhlRecord = dict[str, tuple[str, int]]
+
+
+def get_record(whl_path: Path) -> WhlRecord:
+    try:
+        zipf = zipfile.ZipFile(whl_path)
+    except zipfile.BadZipFile as ex:
+        raise RuntimeError(f"{whl_path} is not a valid zip file") from ex
+    files = zipf.namelist()
+    try:
+        (record_file,) = [name for name in files if name.endswith(".dist-info/RECORD")]
+    except ValueError:
+        raise RuntimeError(f"{whl_path} doesn't contain exactly one .dist-info/RECORD")
+    record_lines = zipf.read(record_file).decode().splitlines()
+    return {
+        file: (filehash, int(filelen))
+        for line in record_lines
+        for file, filehash, filelen in [line.split(",")]
+        if filehash  # Skip RECORD itself, which has no hash or length
+    }
+
+
+def get_files(whl_record: WhlRecord, regex_pattern: str) -> list[str]:
+    """Get files in a wheel that match a regex pattern."""
+    p = re.compile(regex_pattern)
+    return [filepath for filepath in whl_record.keys() if re.match(p, filepath)]
+
+
+def extract_files(whl_path: Path, files: Iterable[str], outdir: Path) -> None:
+    """Extract files from whl_path to outdir."""
+    zipf = zipfile.ZipFile(whl_path)
+    for file in files:
+        zipf.extract(file, outdir)
+
+
+def main() -> None:
+    if len(sys.argv) not in {3, 4}:
+        print(
+            f"Usage: {sys.argv[0]} <wheel> <out_dir> [regex_pattern]",
+            file=sys.stderr,
+        )
+        sys.exit(1)
+
+    whl_path = Path(sys.argv[1]).resolve()
+    outdir = Path(sys.argv[2])
+    regex_pattern = sys.argv[3] if len(sys.argv) == 4 else ""
+
+    whl_record = get_record(whl_path)
+    files = get_files(whl_record, regex_pattern)
+    extract_files(whl_path, files, outdir)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/python/private/whl_filegroup/whl_filegroup.bzl b/python/private/whl_filegroup/whl_filegroup.bzl
new file mode 100644
index 0000000..c5f97e6
--- /dev/null
+++ b/python/private/whl_filegroup/whl_filegroup.bzl
@@ -0,0 +1,57 @@
+"""Implementation of whl_filegroup rule."""
+
+def _whl_filegroup_impl(ctx):
+    out_dir = ctx.actions.declare_directory(ctx.attr.name)
+    ctx.actions.run(
+        outputs = [out_dir],
+        inputs = [ctx.file.whl],
+        arguments = [
+            ctx.file.whl.path,
+            out_dir.path,
+            ctx.attr.pattern,
+        ],
+        executable = ctx.executable._extract_wheel_files_tool,
+        mnemonic = "PyExtractWheelFiles",
+        progress_message = "Extracting %s files from %s" % (ctx.attr.pattern, ctx.file.whl.short_path),
+    )
+    return [DefaultInfo(
+        files = depset([out_dir]),
+        runfiles = ctx.runfiles(files = [out_dir] if ctx.attr.runfiles else []),
+    )]
+
+whl_filegroup = rule(
+    _whl_filegroup_impl,
+    doc = """Extract files matching a regular expression from a wheel file.
+
+An empty pattern will match all files.
+
+Example usage:
+```starlark
+load("@rules_cc//cc:defs.bzl", "cc_library")
+load("@rules_python//python:pip.bzl", "whl_filegroup")
+
+whl_filegroup(
+    name = "numpy_includes",
+    pattern = "numpy/core/include/numpy",
+    whl = "@pypi//numpy:whl",
+)
+
+cc_library(
+    name = "numpy_headers",
+    hdrs = [":numpy_includes"],
+    includes = ["numpy_includes/numpy/core/include"],
+    deps = ["@rules_python//python/cc:current_py_cc_headers"],
+)
+```
+""",
+    attrs = {
+        "pattern": attr.string(default = "", doc = "Only file paths matching this regex pattern will be extracted."),
+        "runfiles": attr.bool(default = False, doc = "Whether to include the output TreeArtifact in this target's runfiles."),
+        "whl": attr.label(mandatory = True, allow_single_file = True, doc = "The wheel to extract files from."),
+        "_extract_wheel_files_tool": attr.label(
+            default = Label("//python/private/whl_filegroup:extract_wheel_files"),
+            cfg = "exec",
+            executable = True,
+        ),
+    },
+)
diff --git a/python/private/whl_target_platforms.bzl b/python/private/whl_target_platforms.bzl
deleted file mode 100644
index 2c63efe..0000000
--- a/python/private/whl_target_platforms.bzl
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-A starlark implementation of the wheel platform tag parsing to get the target platform.
-"""
-
-# The order of the dictionaries is to keep definitions with their aliases next to each
-# other
-_CPU_ALIASES = {
-    "x86_32": "x86_32",
-    "i386": "x86_32",
-    "i686": "x86_32",
-    "x86": "x86_32",
-    "x86_64": "x86_64",
-    "amd64": "x86_64",
-    "aarch64": "aarch64",
-    "arm64": "aarch64",
-    "ppc": "ppc",
-    "ppc64le": "ppc",
-    "s390x": "s390x",
-}  # buildifier: disable=unsorted-dict-items
-
-_OS_PREFIXES = {
-    "linux": "linux",
-    "manylinux": "linux",
-    "musllinux": "linux",
-    "macos": "osx",
-    "win": "windows",
-}  # buildifier: disable=unsorted-dict-items
-
-def whl_target_platforms(tag):
-    """Parse the wheel platform tag and return (os, cpu) tuples.
-
-    Args:
-        tag (str): The platform_tag part of the wheel name. See
-            ./parse_whl_name.bzl for more details.
-
-    Returns:
-        A list of structs, with attributes:
-        * os: str, one of the _OS_PREFIXES values
-        * cpu: str, one of the _CPU_PREFIXES values
-    """
-    cpus = _cpu_from_tag(tag)
-
-    for prefix, os in _OS_PREFIXES.items():
-        if tag.startswith(prefix):
-            return [
-                struct(os = os, cpu = cpu)
-                for cpu in cpus
-            ]
-
-    fail("unknown tag os: {}".format(tag))
-
-def _cpu_from_tag(tag):
-    candidate = [
-        cpu
-        for input, cpu in _CPU_ALIASES.items()
-        if tag.endswith(input)
-    ]
-    if candidate:
-        return candidate
-
-    if tag == "win32":
-        return ["x86_32"]
-    elif tag.endswith("universal2") and tag.startswith("macosx"):
-        return ["x86_64", "aarch64"]
-    else:
-        fail("Unrecognized tag: '{}': cannot determine CPU".format(tag))
diff --git a/python/private/zip_main_template.py b/python/private/zip_main_template.py
new file mode 100644
index 0000000..2d3aea7
--- /dev/null
+++ b/python/private/zip_main_template.py
@@ -0,0 +1,292 @@
+# Template for the __main__.py file inserted into zip files
+#
+# NOTE: This file is a "stage 1" bootstrap, so it's responsible for locating the
+# desired runtime and having it run the stage 2 bootstrap. This means it can't
+# assume much about the current runtime and environment. e.g., the current
+# runtime may not be the correct one, the zip may not have been extract, the
+# runfiles env vars may not be set, etc.
+#
+# NOTE: This program must retain compatibility with a wide variety of Python
+# versions since it is run by an unknown Python interpreter.
+
+import sys
+
+# The Python interpreter unconditionally prepends the directory containing this
+# script (following symlinks) to the import path. This is the cause of #9239,
+# and is a special case of #7091. We therefore explicitly delete that entry.
+# TODO(#7091): Remove this hack when no longer necessary.
+del sys.path[0]
+
+import os
+import shutil
+import subprocess
+import tempfile
+import zipfile
+
+_STAGE2_BOOTSTRAP = "%stage2_bootstrap%"
+_PYTHON_BINARY = "%python_binary%"
+_WORKSPACE_NAME = "%workspace_name%"
+
+
+# Return True if running on Windows
+def is_windows():
+    return os.name == "nt"
+
+
+def get_windows_path_with_unc_prefix(path):
+    """Adds UNC prefix after getting a normalized absolute Windows path.
+
+    No-op for non-Windows platforms or if running under python2.
+    """
+    path = path.strip()
+
+    # No need to add prefix for non-Windows platforms.
+    # And \\?\ doesn't work in python 2 or on mingw
+    if not is_windows() or sys.version_info[0] < 3:
+        return path
+
+    # Starting in Windows 10, version 1607(OS build 14393), MAX_PATH limitations have been
+    # removed from common Win32 file and directory functions.
+    # Related doc: https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=cmd#enable-long-paths-in-windows-10-version-1607-and-later
+    import platform
+
+    if platform.win32_ver()[1] >= "10.0.14393":
+        return path
+
+    # import sysconfig only now to maintain python 2.6 compatibility
+    import sysconfig
+
+    if sysconfig.get_platform() == "mingw":
+        return path
+
+    # Lets start the unicode fun
+    unicode_prefix = "\\\\?\\"
+    if path.startswith(unicode_prefix):
+        return path
+
+    # os.path.abspath returns a normalized absolute path
+    return unicode_prefix + os.path.abspath(path)
+
+
+def has_windows_executable_extension(path):
+    return path.endswith(".exe") or path.endswith(".com") or path.endswith(".bat")
+
+
+if is_windows() and not has_windows_executable_extension(_PYTHON_BINARY):
+    _PYTHON_BINARY = _PYTHON_BINARY + ".exe"
+
+
+def search_path(name):
+    """Finds a file in a given search path."""
+    search_path = os.getenv("PATH", os.defpath).split(os.pathsep)
+    for directory in search_path:
+        if directory:
+            path = os.path.join(directory, name)
+            if os.path.isfile(path) and os.access(path, os.X_OK):
+                return path
+    return None
+
+
+def find_python_binary(module_space):
+    """Finds the real Python binary if it's not a normal absolute path."""
+    return find_binary(module_space, _PYTHON_BINARY)
+
+
+def print_verbose(*args, mapping=None, values=None):
+    if bool(os.environ.get("RULES_PYTHON_BOOTSTRAP_VERBOSE")):
+        if mapping is not None:
+            for key, value in sorted((mapping or {}).items()):
+                print(
+                    "bootstrap: stage 1:",
+                    *args,
+                    f"{key}={value!r}",
+                    file=sys.stderr,
+                    flush=True,
+                )
+        elif values is not None:
+            for i, v in enumerate(values):
+                print(
+                    "bootstrap: stage 1:",
+                    *args,
+                    f"[{i}] {v!r}",
+                    file=sys.stderr,
+                    flush=True,
+                )
+        else:
+            print("bootstrap: stage 1:", *args, file=sys.stderr, flush=True)
+
+
+def find_binary(module_space, bin_name):
+    """Finds the real binary if it's not a normal absolute path."""
+    if not bin_name:
+        return None
+    if bin_name.startswith("//"):
+        # Case 1: Path is a label. Not supported yet.
+        raise AssertionError(
+            "Bazel does not support execution of Python interpreters via labels yet"
+        )
+    elif os.path.isabs(bin_name):
+        # Case 2: Absolute path.
+        return bin_name
+    # Use normpath() to convert slashes to os.sep on Windows.
+    elif os.sep in os.path.normpath(bin_name):
+        # Case 3: Path is relative to the repo root.
+        return os.path.join(module_space, bin_name)
+    else:
+        # Case 4: Path has to be looked up in the search path.
+        return search_path(bin_name)
+
+
+def extract_zip(zip_path, dest_dir):
+    """Extracts the contents of a zip file, preserving the unix file mode bits.
+
+    These include the permission bits, and in particular, the executable bit.
+
+    Ideally the zipfile module should set these bits, but it doesn't. See:
+    https://bugs.python.org/issue15795.
+
+    Args:
+        zip_path: The path to the zip file to extract
+        dest_dir: The path to the destination directory
+    """
+    zip_path = get_windows_path_with_unc_prefix(zip_path)
+    dest_dir = get_windows_path_with_unc_prefix(dest_dir)
+    with zipfile.ZipFile(zip_path) as zf:
+        for info in zf.infolist():
+            zf.extract(info, dest_dir)
+            # UNC-prefixed paths must be absolute/normalized. See
+            # https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file#maximum-path-length-limitation
+            file_path = os.path.abspath(os.path.join(dest_dir, info.filename))
+            # The Unix st_mode bits (see "man 7 inode") are stored in the upper 16
+            # bits of external_attr. Of those, we set the lower 12 bits, which are the
+            # file mode bits (since the file type bits can't be set by chmod anyway).
+            attrs = info.external_attr >> 16
+            if attrs != 0:  # Rumor has it these can be 0 for zips created on Windows.
+                os.chmod(file_path, attrs & 0o7777)
+
+
+# Create the runfiles tree by extracting the zip file
+def create_module_space():
+    temp_dir = tempfile.mkdtemp("", "Bazel.runfiles_")
+    extract_zip(os.path.dirname(__file__), temp_dir)
+    # IMPORTANT: Later code does `rm -fr` on dirname(module_space) -- it's
+    # important that deletion code be in sync with this directory structure
+    return os.path.join(temp_dir, "runfiles")
+
+
+def execute_file(
+    python_program,
+    main_filename,
+    args,
+    env,
+    module_space,
+    workspace,
+):
+    # type: (str, str, list[str], dict[str, str], str, str|None, str|None) -> ...
+    """Executes the given Python file using the various environment settings.
+
+    This will not return, and acts much like os.execv, except is much
+    more restricted, and handles Bazel-related edge cases.
+
+    Args:
+      python_program: (str) Path to the Python binary to use for execution
+      main_filename: (str) The Python file to execute
+      args: (list[str]) Additional args to pass to the Python file
+      env: (dict[str, str]) A dict of environment variables to set for the execution
+      module_space: (str) Path to the module space/runfiles tree directory
+      workspace: (str|None) Name of the workspace to execute in. This is expected to be a
+          directory under the runfiles tree.
+    """
+    # We want to use os.execv instead of subprocess.call, which causes
+    # problems with signal passing (making it difficult to kill
+    # Bazel). However, these conditions force us to run via
+    # subprocess.call instead:
+    #
+    # - On Windows, os.execv doesn't handle arguments with spaces
+    #   correctly, and it actually starts a subprocess just like
+    #   subprocess.call.
+    # - When running in a workspace or zip file, we need to clean up the
+    #   workspace after the process finishes so control must return here.
+    try:
+        subprocess_argv = [python_program, main_filename] + args
+        print_verbose("subprocess argv:", values=subprocess_argv)
+        print_verbose("subprocess env:", mapping=env)
+        print_verbose("subprocess cwd:", workspace)
+        ret_code = subprocess.call(subprocess_argv, env=env, cwd=workspace)
+        sys.exit(ret_code)
+    finally:
+        # NOTE: dirname() is called because create_module_space() creates a
+        # sub-directory within a temporary directory, and we want to remove the
+        # whole temporary directory.
+        shutil.rmtree(os.path.dirname(module_space), True)
+
+
+def main():
+    print_verbose("running zip main bootstrap")
+    print_verbose("initial argv:", values=sys.argv)
+    print_verbose("initial environ:", mapping=os.environ)
+    print_verbose("initial sys.executable", sys.executable)
+    print_verbose("initial sys.version", sys.version)
+
+    args = sys.argv[1:]
+
+    new_env = {}
+
+    # The main Python source file.
+    # The magic string percent-main-percent is replaced with the runfiles-relative
+    # filename of the main file of the Python binary in BazelPythonSemantics.java.
+    main_rel_path = _STAGE2_BOOTSTRAP
+    if is_windows():
+        main_rel_path = main_rel_path.replace("/", os.sep)
+
+    module_space = create_module_space()
+    print_verbose("extracted runfiles to:", module_space)
+
+    new_env["RUNFILES_DIR"] = module_space
+
+    # Don't prepend a potentially unsafe path to sys.path
+    # See: https://docs.python.org/3.11/using/cmdline.html#envvar-PYTHONSAFEPATH
+    new_env["PYTHONSAFEPATH"] = "1"
+
+    main_filename = os.path.join(module_space, main_rel_path)
+    main_filename = get_windows_path_with_unc_prefix(main_filename)
+    assert os.path.exists(main_filename), (
+        "Cannot exec() %r: file not found." % main_filename
+    )
+    assert os.access(main_filename, os.R_OK), (
+        "Cannot exec() %r: file not readable." % main_filename
+    )
+
+    program = python_program = find_python_binary(module_space)
+    if python_program is None:
+        raise AssertionError("Could not find python binary: " + _PYTHON_BINARY)
+
+    # Some older Python versions on macOS (namely Python 3.7) may unintentionally
+    # leave this environment variable set after starting the interpreter, which
+    # causes problems with Python subprocesses correctly locating sys.executable,
+    # which subsequently causes failure to launch on Python 3.11 and later.
+    if "__PYVENV_LAUNCHER__" in os.environ:
+        del os.environ["__PYVENV_LAUNCHER__"]
+
+    new_env.update((key, val) for key, val in os.environ.items() if key not in new_env)
+
+    workspace = None
+    # If RUN_UNDER_RUNFILES equals 1, it means we need to
+    # change directory to the right runfiles directory.
+    # (So that the data files are accessible)
+    if os.environ.get("RUN_UNDER_RUNFILES") == "1":
+        workspace = os.path.join(module_space, _WORKSPACE_NAME)
+
+    sys.stdout.flush()
+    execute_file(
+        python_program,
+        main_filename,
+        args,
+        new_env,
+        module_space,
+        workspace,
+    )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/python/py_binary.bzl b/python/py_binary.bzl
index ed63ebe..f7f68e6 100644
--- a/python/py_binary.bzl
+++ b/python/py_binary.bzl
@@ -23,10 +23,21 @@
 _py_binary_impl = _starlark_py_binary if config.enable_pystar else native.py_binary
 
 def py_binary(**attrs):
-    """See the Bazel core [py_binary](https://docs.bazel.build/versions/master/be/python.html#py_binary) documentation.
+    """Creates an executable Python program.
+
+    This is the public macro wrapping the underlying rule. Args are forwarded
+    on as-is unless otherwise specified. See
+    the underlying {bzl:obj}`py_binary rule<//python/private/common:py_binary_rule_bazel.bzl%py_binary>`
+    for detailed attribute documentation.
+
+    This macro affects the following args:
+    * `python_version`: cannot be `PY2`
+    * `srcs_version`: cannot be `PY2` or `PY2ONLY`
+    * `tags`: May have special marker values added, if not already present.
 
     Args:
-      **attrs: Rule attributes
+      **attrs: Rule attributes forwarded onto the underlying
+          {bzl:obj}`py_binary rule<//python/private/common:py_binary_rule_bazel.bzl%py_binary>`
     """
     if attrs.get("python_version") == "PY2":
         fail("Python 2 is no longer supported: https://github.com/bazelbuild/rules_python/issues/886")
diff --git a/python/py_library.bzl b/python/py_library.bzl
index 2aa797a..3b9ddd1 100644
--- a/python/py_library.bzl
+++ b/python/py_library.bzl
@@ -23,10 +23,20 @@
 _py_library_impl = _starlark_py_library if config.enable_pystar else native.py_library
 
 def py_library(**attrs):
-    """See the Bazel core [py_library](https://docs.bazel.build/versions/master/be/python.html#py_library) documentation.
+    """Creates an executable Python program.
+
+    This is the public macro wrapping the underlying rule. Args are forwarded
+    on as-is unless otherwise specified. See
+    {bzl:obj}`py_library <//python/private/common:py_library_rule_bazel.bzl%py_library>`
+    for detailed attribute documentation.
+
+    This macro affects the following args:
+    * `srcs_version`: cannot be `PY2` or `PY2ONLY`
+    * `tags`: May have special marker values added, if not already present.
 
     Args:
-      **attrs: Rule attributes
+      **attrs: Rule attributes forwarded onto
+          {bzl:obj}`py_library <//python/private/common:py_library_rule_bazel.bzl%py_library>`
     """
     if attrs.get("srcs_version") in ("PY2", "PY2ONLY"):
         fail("Python 2 is no longer supported: https://github.com/bazelbuild/rules_python/issues/886")
diff --git a/python/py_runtime.bzl b/python/py_runtime.bzl
index d4b913d..9c8cd00 100644
--- a/python/py_runtime.bzl
+++ b/python/py_runtime.bzl
@@ -21,10 +21,21 @@
 _py_runtime_impl = _starlark_py_runtime if IS_BAZEL_6_OR_HIGHER else native.py_runtime
 
 def py_runtime(**attrs):
-    """See the Bazel core [py_runtime](https://docs.bazel.build/versions/master/be/python.html#py_runtime) documentation.
+    """Creates an executable Python program.
+
+    This is the public macro wrapping the underlying rule. Args are forwarded
+    on as-is unless otherwise specified. See
+    {bzl:obj}`py_runtime <//python/private/common:py_runtime_rule.bzl%py_runtime>`
+    for detailed attribute documentation.
+
+    This macro affects the following args:
+    * `python_version`: cannot be `PY2`
+    * `srcs_version`: cannot be `PY2` or `PY2ONLY`
+    * `tags`: May have special marker values added, if not already present.
 
     Args:
-      **attrs: Rule attributes
+      **attrs: Rule attributes forwarded onto
+          {bzl:obj}`py_runtime <//python/private/common:py_runtime_rule.bzl%py_runtime>`
     """
     if attrs.get("python_version") == "PY2":
         fail("Python 2 is no longer supported: see https://github.com/bazelbuild/rules_python/issues/886")
diff --git a/python/py_runtime_info.bzl b/python/py_runtime_info.bzl
index c0a9288..e88e0c0 100644
--- a/python/py_runtime_info.bzl
+++ b/python/py_runtime_info.bzl
@@ -14,8 +14,8 @@
 
 """Public entry point for PyRuntimeInfo."""
 
+load("@rules_python_internal//:rules_python_config.bzl", "config")
 load("//python/private:reexports.bzl", "BuiltinPyRuntimeInfo")
-load("//python/private:util.bzl", "IS_BAZEL_6_OR_HIGHER")
 load("//python/private/common:providers.bzl", _starlark_PyRuntimeInfo = "PyRuntimeInfo")
 
-PyRuntimeInfo = _starlark_PyRuntimeInfo if IS_BAZEL_6_OR_HIGHER else BuiltinPyRuntimeInfo
+PyRuntimeInfo = _starlark_PyRuntimeInfo if config.enable_pystar else BuiltinPyRuntimeInfo
diff --git a/python/py_test.bzl b/python/py_test.bzl
index f58f067..8f93b27 100644
--- a/python/py_test.bzl
+++ b/python/py_test.bzl
@@ -23,10 +23,21 @@
 _py_test_impl = _starlark_py_test if config.enable_pystar else native.py_test
 
 def py_test(**attrs):
-    """See the Bazel core [py_test](https://docs.bazel.build/versions/master/be/python.html#py_test) documentation.
+    """Creates an executable Python program.
+
+    This is the public macro wrapping the underlying rule. Args are forwarded
+    on as-is unless otherwise specified. See
+    {bzl:obj}`py_test <//python/private/common:py_test_rule_bazel.bzl%py_test>`
+    for detailed attribute documentation.
+
+    This macro affects the following args:
+    * `python_version`: cannot be `PY2`
+    * `srcs_version`: cannot be `PY2` or `PY2ONLY`
+    * `tags`: May have special marker values added, if not already present.
 
     Args:
-      **attrs: Rule attributes
+      **attrs: Rule attributes forwarded onto
+          {bzl:obj}`py_test <//python/private/common:py_test_rule_bazel.bzl%py_test>`
     """
     if attrs.get("python_version") == "PY2":
         fail("Python 2 is no longer supported: https://github.com/bazelbuild/rules_python/issues/886")
diff --git a/python/repositories.bzl b/python/repositories.bzl
index 21becb5..cf87234 100644
--- a/python/repositories.bzl
+++ b/python/repositories.bzl
@@ -1,4 +1,4 @@
-# Copyright 2022 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,643 +13,26 @@
 # limitations under the License.
 
 """This file contains macros to be called during WORKSPACE evaluation.
-
-For historic reasons, pip_repositories() is defined in //python:pip.bzl.
 """
 
-load("@bazel_tools//tools/build_defs/repo:http.bzl", _http_archive = "http_archive")
-load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
-load("//python/pip_install:repositories.bzl", "pip_install_dependencies")
-load("//python/private:auth.bzl", "get_auth")
-load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
-load("//python/private:coverage_deps.bzl", "coverage_dep")
-load("//python/private:full_version.bzl", "full_version")
-load("//python/private:internal_config_repo.bzl", "internal_config_repo")
 load(
-    "//python/private:toolchains_repo.bzl",
-    "multi_toolchain_aliases",
-    "toolchain_aliases",
-    "toolchains_repo",
-)
-load("//python/private:which.bzl", "which_with_fail")
-load(
-    ":versions.bzl",
-    "DEFAULT_RELEASE_BASE_URL",
-    "PLATFORMS",
-    "TOOL_VERSIONS",
-    "get_release_info",
+    "//python/private:python_repositories.bzl",
+    _STANDALONE_INTERPRETER_FILENAME = "STANDALONE_INTERPRETER_FILENAME",
+    _http_archive = "http_archive",
+    _is_standalone_interpreter = "is_standalone_interpreter",
+    _py_repositories = "py_repositories",
+    _python_register_multi_toolchains = "python_register_multi_toolchains",
+    _python_register_toolchains = "python_register_toolchains",
+    _python_repository = "python_repository",
 )
 
-def http_archive(**kwargs):
-    maybe(_http_archive, **kwargs)
+py_repositories = _py_repositories
+python_register_multi_toolchains = _python_register_multi_toolchains
+python_register_toolchains = _python_register_toolchains
 
-def py_repositories():
-    """Runtime dependencies that users must install.
-
-    This function should be loaded and called in the user's WORKSPACE.
-    With bzlmod enabled, this function is not needed since MODULE.bazel handles transitive deps.
-    """
-    maybe(
-        internal_config_repo,
-        name = "rules_python_internal",
-    )
-    http_archive(
-        name = "bazel_skylib",
-        sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
-        urls = [
-            "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
-            "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
-        ],
-    )
-    pip_install_dependencies()
-
-########
-# Remaining content of the file is only used to support toolchains.
-########
-
-STANDALONE_INTERPRETER_FILENAME = "STANDALONE_INTERPRETER"
-
-def is_standalone_interpreter(rctx, python_interpreter_path):
-    """Query a python interpreter target for whether or not it's a rules_rust provided toolchain
-
-    Args:
-        rctx (repository_ctx): The repository rule's context object.
-        python_interpreter_path (path): A path representing the interpreter.
-
-    Returns:
-        bool: Whether or not the target is from a rules_python generated toolchain.
-    """
-
-    # Only update the location when using a hermetic toolchain.
-    if not python_interpreter_path:
-        return False
-
-    # This is a rules_python provided toolchain.
-    return rctx.execute([
-        "ls",
-        "{}/{}".format(
-            python_interpreter_path.dirname,
-            STANDALONE_INTERPRETER_FILENAME,
-        ),
-    ]).return_code == 0
-
-def _python_repository_impl(rctx):
-    if rctx.attr.distutils and rctx.attr.distutils_content:
-        fail("Only one of (distutils, distutils_content) should be set.")
-    if bool(rctx.attr.url) == bool(rctx.attr.urls):
-        fail("Exactly one of (url, urls) must be set.")
-
-    platform = rctx.attr.platform
-    python_version = rctx.attr.python_version
-    python_short_version = python_version.rpartition(".")[0]
-    release_filename = rctx.attr.release_filename
-    urls = rctx.attr.urls or [rctx.attr.url]
-    auth = get_auth(rctx, urls)
-
-    if release_filename.endswith(".zst"):
-        rctx.download(
-            url = urls,
-            sha256 = rctx.attr.sha256,
-            output = release_filename,
-            auth = auth,
-        )
-        unzstd = rctx.which("unzstd")
-        if not unzstd:
-            url = rctx.attr.zstd_url.format(version = rctx.attr.zstd_version)
-            rctx.download_and_extract(
-                url = url,
-                sha256 = rctx.attr.zstd_sha256,
-                auth = auth,
-            )
-            working_directory = "zstd-{version}".format(version = rctx.attr.zstd_version)
-
-            make_result = rctx.execute(
-                [which_with_fail("make", rctx), "--jobs=4"],
-                timeout = 600,
-                quiet = True,
-                working_directory = working_directory,
-            )
-            if make_result.return_code:
-                fail_msg = (
-                    "Failed to compile 'zstd' from source for use in Python interpreter extraction. " +
-                    "'make' error message: {}".format(make_result.stderr)
-                )
-                fail(fail_msg)
-            zstd = "{working_directory}/zstd".format(working_directory = working_directory)
-            unzstd = "./unzstd"
-            rctx.symlink(zstd, unzstd)
-
-        exec_result = rctx.execute([
-            which_with_fail("tar", rctx),
-            "--extract",
-            "--strip-components=2",
-            "--use-compress-program={unzstd}".format(unzstd = unzstd),
-            "--file={}".format(release_filename),
-        ])
-        if exec_result.return_code:
-            fail_msg = (
-                "Failed to extract Python interpreter from '{}'. ".format(release_filename) +
-                "'tar' error message: {}".format(exec_result.stderr)
-            )
-            fail(fail_msg)
-    else:
-        rctx.download_and_extract(
-            url = urls,
-            sha256 = rctx.attr.sha256,
-            stripPrefix = rctx.attr.strip_prefix,
-            auth = auth,
-        )
-
-    patches = rctx.attr.patches
-    if patches:
-        for patch in patches:
-            # Should take the strip as an attr, but this is fine for the moment
-            rctx.patch(patch, strip = 1)
-
-    # Write distutils.cfg to the Python installation.
-    if "windows" in rctx.os.name:
-        distutils_path = "Lib/distutils/distutils.cfg"
-    else:
-        distutils_path = "lib/python{}/distutils/distutils.cfg".format(python_short_version)
-    if rctx.attr.distutils:
-        rctx.file(distutils_path, rctx.read(rctx.attr.distutils))
-    elif rctx.attr.distutils_content:
-        rctx.file(distutils_path, rctx.attr.distutils_content)
-
-    # Make the Python installation read-only.
-    if not rctx.attr.ignore_root_user_error:
-        if "windows" not in rctx.os.name:
-            lib_dir = "lib" if "windows" not in platform else "Lib"
-
-            exec_result = rctx.execute([which_with_fail("chmod", rctx), "-R", "ugo-w", lib_dir])
-            if exec_result.return_code != 0:
-                fail_msg = "Failed to make interpreter installation read-only. 'chmod' error msg: {}".format(
-                    exec_result.stderr,
-                )
-                fail(fail_msg)
-            exec_result = rctx.execute([which_with_fail("touch", rctx), "{}/.test".format(lib_dir)])
-            if exec_result.return_code == 0:
-                exec_result = rctx.execute([which_with_fail("id", rctx), "-u"])
-                if exec_result.return_code != 0:
-                    fail("Could not determine current user ID. 'id -u' error msg: {}".format(
-                        exec_result.stderr,
-                    ))
-                uid = int(exec_result.stdout.strip())
-                if uid == 0:
-                    fail("The current user is root, please run as non-root when using the hermetic Python interpreter. See https://github.com/bazelbuild/rules_python/pull/713.")
-                else:
-                    fail("The current user has CAP_DAC_OVERRIDE set, please drop this capability when using the hermetic Python interpreter. See https://github.com/bazelbuild/rules_python/pull/713.")
-
-    python_bin = "python.exe" if ("windows" in platform) else "bin/python3"
-
-    glob_include = []
-    glob_exclude = [
-        "**/* *",  # Bazel does not support spaces in file names.
-        # Unused shared libraries. `python` executable and the `:libpython` target
-        # depend on `libpython{python_version}.so.1.0`.
-        "lib/libpython{python_version}.so".format(python_version = python_short_version),
-        # static libraries
-        "lib/**/*.a",
-        # tests for the standard libraries.
-        "lib/python{python_version}/**/test/**".format(python_version = python_short_version),
-        "lib/python{python_version}/**/tests/**".format(python_version = python_short_version),
-        "**/__pycache__/*.pyc.*",  # During pyc creation, temp files named *.pyc.NNN are created
-    ]
-
-    if rctx.attr.ignore_root_user_error:
-        glob_exclude += [
-            # These pycache files are created on first use of the associated python files.
-            # Exclude them from the glob because otherwise between the first time and second time a python toolchain is used,"
-            # the definition of this filegroup will change, and depending rules will get invalidated."
-            # See https://github.com/bazelbuild/rules_python/issues/1008 for unconditionally adding these to toolchains so we can stop ignoring them."
-            "**/__pycache__/*.pyc",
-            "**/__pycache__/*.pyo",
-        ]
-
-    if "windows" in platform:
-        glob_include += [
-            "*.exe",
-            "*.dll",
-            "bin/**",
-            "DLLs/**",
-            "extensions/**",
-            "include/**",
-            "Lib/**",
-            "libs/**",
-            "Scripts/**",
-            "share/**",
-            "tcl/**",
-        ]
-    else:
-        glob_include += [
-            "bin/**",
-            "extensions/**",
-            "include/**",
-            "lib/**",
-            "libs/**",
-            "share/**",
-        ]
-
-    if rctx.attr.coverage_tool:
-        if "windows" in rctx.os.name:
-            coverage_tool = None
-        else:
-            coverage_tool = '"{}"'.format(rctx.attr.coverage_tool)
-
-        coverage_attr_text = """\
-    coverage_tool = select({{
-        ":coverage_enabled": {coverage_tool},
-        "//conditions:default": None
-    }}),
-""".format(coverage_tool = coverage_tool)
-    else:
-        coverage_attr_text = "    # coverage_tool attribute not supported by this Bazel version"
-
-    build_content = """\
-# Generated by python/repositories.bzl
-
-load("@rules_python//python:py_runtime.bzl", "py_runtime")
-load("@rules_python//python:py_runtime_pair.bzl", "py_runtime_pair")
-load("@rules_python//python/cc:py_cc_toolchain.bzl", "py_cc_toolchain")
-
-package(default_visibility = ["//visibility:public"])
-
-filegroup(
-    name = "files",
-    srcs = glob(
-        include = {glob_include},
-        # Platform-agnostic filegroup can't match on all patterns.
-        allow_empty = True,
-        exclude = {glob_exclude},
-    ),
-)
-
-cc_import(
-    name = "interface",
-    interface_library = "libs/python{python_version_nodot}.lib",
-    system_provided = True,
-)
-
-filegroup(
-    name = "includes",
-    srcs = glob(["include/**/*.h"]),
-)
-
-cc_library(
-    name = "python_headers",
-    deps = select({{
-        "@bazel_tools//src/conditions:windows": [":interface"],
-        "//conditions:default": None,
-    }}),
-    hdrs = [":includes"],
-    includes = [
-        "include",
-        "include/python{python_version}",
-        "include/python{python_version}m",
-    ],
-)
-
-cc_library(
-    name = "libpython",
-    hdrs = [":includes"],
-    srcs = select({{
-        "@platforms//os:windows": ["python3.dll", "libs/python{python_version_nodot}.lib"],
-        "@platforms//os:macos": ["lib/libpython{python_version}.dylib"],
-        "@platforms//os:linux": ["lib/libpython{python_version}.so", "lib/libpython{python_version}.so.1.0"],
-    }}),
-)
-
-exports_files(["python", "{python_path}"])
-
-# Used to only download coverage toolchain when the coverage is collected by
-# bazel.
-config_setting(
-    name = "coverage_enabled",
-    values = {{"collect_code_coverage": "true"}},
-    visibility = ["//visibility:private"],
-)
-
-py_runtime(
-    name = "py3_runtime",
-    files = [":files"],
-{coverage_attr}
-    interpreter = "{python_path}",
-    python_version = "PY3",
-)
-
-py_runtime_pair(
-    name = "python_runtimes",
-    py2_runtime = None,
-    py3_runtime = ":py3_runtime",
-)
-
-py_cc_toolchain(
-    name = "py_cc_toolchain",
-    headers = ":python_headers",
-    python_version = "{python_version}",
-)
-""".format(
-        glob_exclude = repr(glob_exclude),
-        glob_include = repr(glob_include),
-        python_path = python_bin,
-        python_version = python_short_version,
-        python_version_nodot = python_short_version.replace(".", ""),
-        coverage_attr = coverage_attr_text,
-    )
-    rctx.delete("python")
-    rctx.symlink(python_bin, "python")
-    rctx.file(STANDALONE_INTERPRETER_FILENAME, "# File intentionally left blank. Indicates that this is an interpreter repo created by rules_python.")
-    rctx.file("BUILD.bazel", build_content)
-
-    attrs = {
-        "auth_patterns": rctx.attr.auth_patterns,
-        "coverage_tool": rctx.attr.coverage_tool,
-        "distutils": rctx.attr.distutils,
-        "distutils_content": rctx.attr.distutils_content,
-        "ignore_root_user_error": rctx.attr.ignore_root_user_error,
-        "name": rctx.attr.name,
-        "netrc": rctx.attr.netrc,
-        "patches": rctx.attr.patches,
-        "platform": platform,
-        "python_version": python_version,
-        "release_filename": release_filename,
-        "sha256": rctx.attr.sha256,
-        "strip_prefix": rctx.attr.strip_prefix,
-    }
-
-    if rctx.attr.url:
-        attrs["url"] = rctx.attr.url
-    else:
-        attrs["urls"] = urls
-
-    return attrs
-
-python_repository = repository_rule(
-    _python_repository_impl,
-    doc = "Fetches the external tools needed for the Python toolchain.",
-    attrs = {
-        "auth_patterns": attr.string_dict(
-            doc = "Override mapping of hostnames to authorization patterns; mirrors the eponymous attribute from http_archive",
-        ),
-        "coverage_tool": attr.string(
-            # Mirrors the definition at
-            # https://github.com/bazelbuild/bazel/blob/master/src/main/starlark/builtins_bzl/common/python/py_runtime_rule.bzl
-            doc = """
-This is a target to use for collecting code coverage information from `py_binary`
-and `py_test` targets.
-
-If set, the target must either produce a single file or be an executable target.
-The path to the single file, or the executable if the target is executable,
-determines the entry point for the python coverage tool.  The target and its
-runfiles will be added to the runfiles when coverage is enabled.
-
-The entry point for the tool must be loadable by a Python interpreter (e.g. a
-`.py` or `.pyc` file).  It must accept the command line arguments
-of coverage.py (https://coverage.readthedocs.io), at least including
-the `run` and `lcov` subcommands.
-
-The target is accepted as a string by the python_repository and evaluated within
-the context of the toolchain repository.
-
-For more information see the official bazel docs
-(https://bazel.build/reference/be/python#py_runtime.coverage_tool).
-""",
-        ),
-        "distutils": attr.label(
-            allow_single_file = True,
-            doc = "A distutils.cfg file to be included in the Python installation. " +
-                  "Either distutils or distutils_content can be specified, but not both.",
-            mandatory = False,
-        ),
-        "distutils_content": attr.string(
-            doc = "A distutils.cfg file content to be included in the Python installation. " +
-                  "Either distutils or distutils_content can be specified, but not both.",
-            mandatory = False,
-        ),
-        "ignore_root_user_error": attr.bool(
-            default = False,
-            doc = "Whether the check for root should be ignored or not. This causes cache misses with .pyc files.",
-            mandatory = False,
-        ),
-        "netrc": attr.string(
-            doc = ".netrc file to use for authentication; mirrors the eponymous attribute from http_archive",
-        ),
-        "patches": attr.label_list(
-            doc = "A list of patch files to apply to the unpacked interpreter",
-            mandatory = False,
-        ),
-        "platform": attr.string(
-            doc = "The platform name for the Python interpreter tarball.",
-            mandatory = True,
-            values = PLATFORMS.keys(),
-        ),
-        "python_version": attr.string(
-            doc = "The Python version.",
-            mandatory = True,
-        ),
-        "release_filename": attr.string(
-            doc = "The filename of the interpreter to be downloaded",
-            mandatory = True,
-        ),
-        "sha256": attr.string(
-            doc = "The SHA256 integrity hash for the Python interpreter tarball.",
-            mandatory = True,
-        ),
-        "strip_prefix": attr.string(
-            doc = "A directory prefix to strip from the extracted files.",
-        ),
-        "url": attr.string(
-            doc = "The URL of the interpreter to download. Exactly one of url and urls must be set.",
-        ),
-        "urls": attr.string_list(
-            doc = "The URL of the interpreter to download. Exactly one of url and urls must be set.",
-        ),
-        "zstd_sha256": attr.string(
-            default = "7c42d56fac126929a6a85dbc73ff1db2411d04f104fae9bdea51305663a83fd0",
-        ),
-        "zstd_url": attr.string(
-            default = "https://github.com/facebook/zstd/releases/download/v{version}/zstd-{version}.tar.gz",
-        ),
-        "zstd_version": attr.string(
-            default = "1.5.2",
-        ),
-    },
-)
-
-# Wrapper macro around everything above, this is the primary API.
-def python_register_toolchains(
-        name,
-        python_version,
-        distutils = None,
-        distutils_content = None,
-        register_toolchains = True,
-        register_coverage_tool = False,
-        set_python_version_constraint = False,
-        tool_versions = TOOL_VERSIONS,
-        **kwargs):
-    """Convenience macro for users which does typical setup.
-
-    - Create a repository for each built-in platform like "python_linux_amd64" -
-      this repository is lazily fetched when Python is needed for that platform.
-    - Create a repository exposing toolchains for each platform like
-      "python_platforms".
-    - Register a toolchain pointing at each platform.
-    Users can avoid this macro and do these steps themselves, if they want more
-    control.
-    Args:
-        name: base name for all created repos, like "python38".
-        python_version: the Python version.
-        distutils: see the distutils attribute in the python_repository repository rule.
-        distutils_content: see the distutils_content attribute in the python_repository repository rule.
-        register_toolchains: Whether or not to register the downloaded toolchains.
-        register_coverage_tool: Whether or not to register the downloaded coverage tool to the toolchains.
-            NOTE: Coverage support using the toolchain is only supported in Bazel 6 and higher.
-
-        set_python_version_constraint: When set to true, target_compatible_with for the toolchains will include a version constraint.
-        tool_versions: a dict containing a mapping of version with SHASUM and platform info. If not supplied, the defaults
-            in python/versions.bzl will be used.
-        **kwargs: passed to each python_repositories call.
-    """
-
-    if BZLMOD_ENABLED:
-        # you cannot used native.register_toolchains when using bzlmod.
-        register_toolchains = False
-
-    base_url = kwargs.pop("base_url", DEFAULT_RELEASE_BASE_URL)
-
-    python_version = full_version(python_version)
-
-    toolchain_repo_name = "{name}_toolchains".format(name = name)
-
-    # When using unreleased Bazel versions, the version is an empty string
-    if native.bazel_version:
-        bazel_major = int(native.bazel_version.split(".")[0])
-        if bazel_major < 6:
-            if register_coverage_tool:
-                # buildifier: disable=print
-                print((
-                    "WARNING: ignoring register_coverage_tool=True when " +
-                    "registering @{name}: Bazel 6+ required, got {version}"
-                ).format(
-                    name = name,
-                    version = native.bazel_version,
-                ))
-            register_coverage_tool = False
-
-    loaded_platforms = []
-    for platform in PLATFORMS.keys():
-        sha256 = tool_versions[python_version]["sha256"].get(platform, None)
-        if not sha256:
-            continue
-
-        loaded_platforms.append(platform)
-        (release_filename, urls, strip_prefix, patches) = get_release_info(platform, python_version, base_url, tool_versions)
-
-        # allow passing in a tool version
-        coverage_tool = None
-        coverage_tool = tool_versions[python_version].get("coverage_tool", {}).get(platform, None)
-        if register_coverage_tool and coverage_tool == None:
-            coverage_tool = coverage_dep(
-                name = "{name}_{platform}_coverage".format(
-                    name = name,
-                    platform = platform,
-                ),
-                python_version = python_version,
-                platform = platform,
-                visibility = ["@{name}_{platform}//:__subpackages__".format(
-                    name = name,
-                    platform = platform,
-                )],
-            )
-
-        python_repository(
-            name = "{name}_{platform}".format(
-                name = name,
-                platform = platform,
-            ),
-            sha256 = sha256,
-            patches = patches,
-            platform = platform,
-            python_version = python_version,
-            release_filename = release_filename,
-            urls = urls,
-            distutils = distutils,
-            distutils_content = distutils_content,
-            strip_prefix = strip_prefix,
-            coverage_tool = coverage_tool,
-            **kwargs
-        )
-        if register_toolchains:
-            native.register_toolchains("@{toolchain_repo_name}//:{platform}_toolchain".format(
-                toolchain_repo_name = toolchain_repo_name,
-                platform = platform,
-            ))
-            native.register_toolchains("@{toolchain_repo_name}//:{platform}_py_cc_toolchain".format(
-                toolchain_repo_name = toolchain_repo_name,
-                platform = platform,
-            ))
-
-    toolchain_aliases(
-        name = name,
-        python_version = python_version,
-        user_repository_name = name,
-        platforms = loaded_platforms,
-    )
-
-    # in bzlmod we write out our own toolchain repos
-    if BZLMOD_ENABLED:
-        return
-
-    toolchains_repo(
-        name = toolchain_repo_name,
-        python_version = python_version,
-        set_python_version_constraint = set_python_version_constraint,
-        user_repository_name = name,
-    )
-
-def python_register_multi_toolchains(
-        name,
-        python_versions,
-        default_version = None,
-        **kwargs):
-    """Convenience macro for registering multiple Python toolchains.
-
-    Args:
-        name: base name for each name in python_register_toolchains call.
-        python_versions: the Python version.
-        default_version: the default Python version. If not set, the first version in
-            python_versions is used.
-        **kwargs: passed to each python_register_toolchains call.
-    """
-    if len(python_versions) == 0:
-        fail("python_versions must not be empty")
-
-    if not default_version:
-        default_version = python_versions.pop(0)
-    for python_version in python_versions:
-        if python_version == default_version:
-            # We register the default version lastly so that it's not picked first when --platforms
-            # is set with a constraint during toolchain resolution. This is due to the fact that
-            # Bazel will match the unconstrained toolchain if we register it before the constrained
-            # ones.
-            continue
-        python_register_toolchains(
-            name = name + "_" + python_version.replace(".", "_"),
-            python_version = python_version,
-            set_python_version_constraint = True,
-            **kwargs
-        )
-    python_register_toolchains(
-        name = name + "_" + default_version.replace(".", "_"),
-        python_version = default_version,
-        set_python_version_constraint = False,
-        **kwargs
-    )
-
-    multi_toolchain_aliases(
-        name = name,
-        python_versions = {
-            python_version: name + "_" + python_version.replace(".", "_")
-            for python_version in (python_versions + [default_version])
-        },
-    )
+# These symbols are of questionable public visibility. They were probably
+# not intended to be actually public.
+STANDALONE_INTERPRETER_FILENAME = _STANDALONE_INTERPRETER_FILENAME
+http_archive = _http_archive
+is_standalone_interpreter = _is_standalone_interpreter
+python_repository = _python_repository
diff --git a/python/runfiles/BUILD.bazel b/python/runfiles/BUILD.bazel
index 55c25c8..c1fc027 100644
--- a/python/runfiles/BUILD.bazel
+++ b/python/runfiles/BUILD.bazel
@@ -14,6 +14,7 @@
 
 load("//python:defs.bzl", "py_library")
 load("//python:packaging.bzl", "py_wheel")
+load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
 
 filegroup(
     name = "distribution",
@@ -46,13 +47,13 @@
         "Development Status :: 5 - Production/Stable",
         "License :: OSI Approved :: Apache Software License",
     ],
-    description_file = "README.rst",
+    description_file = "README.md",
     dist_folder = "dist",
     distribution = "bazel_runfiles",
     homepage = "https://github.com/bazelbuild/rules_python",
     python_requires = ">=3.7",
     strip_path_prefixes = ["python"],
-    twine = "@publish_deps_twine//:pkg",
+    twine = None if BZLMOD_ENABLED else "@rules_python_publish_deps_twine//:pkg",
     # this can be replaced by building with --stamp --embed_label=1.2.3
     version = "{BUILD_EMBED_LABEL}",
     visibility = ["//visibility:public"],
diff --git a/python/runfiles/README.md b/python/runfiles/README.md
new file mode 100644
index 0000000..2a57c76
--- /dev/null
+++ b/python/runfiles/README.md
@@ -0,0 +1,90 @@
+# bazel-runfiles library
+
+This is a Bazel Runfiles lookup library for Bazel-built Python binaries and tests.
+
+Learn about runfiles: read [Runfiles guide](https://bazel.build/extending/rules#runfiles)
+or watch [Fabian's BazelCon talk](https://www.youtube.com/watch?v=5NbgUMH1OGo).
+
+## Importing
+
+The Runfiles API is available from two sources, a direct Bazel target, and a [pypi](https://pypi.org/) package.
+
+## Pure Bazel imports
+
+1. Depend on this runfiles library from your build rule, like you would other third-party libraries:
+
+    ```python
+    py_binary(
+        name = "my_binary",
+        # ...
+        deps = ["@rules_python//python/runfiles"],
+    )
+    ```
+
+2. Import the runfiles library:
+
+    ```python
+        from python.runfiles import Runfiles
+    ```
+
+## Pypi imports
+
+1. Add the 'bazel-runfiles' dependency along with other third-party dependencies, for example in your `requirements.txt` file.
+
+2. Depend on this runfiles library from your build rule, like you would other third-party libraries:
+    ```python
+    load("@pip_deps//:requirements.bzl", "requirement")
+    
+    py_binary(
+        name = "my_binary",
+        ...
+        deps = [requirement("bazel-runfiles")],
+    )
+    ```
+
+3. Import the runfiles library:
+    ```python
+    from runfiles import Runfiles
+    ```
+
+## Typical Usage
+
+Create a `Runfiles` object and use `Rlocation` to look up runfile paths:
+
+```python
+r = Runfiles.Create()
+# ...
+with open(r.Rlocation("my_workspace/path/to/my/data.txt"), "r") as f:
+    contents = f.readlines()
+    # ...
+```
+
+The code above creates a manifest- or directory-based implementation based on the environment variables in `os.environ`. See `Runfiles.Create()` for more info.
+
+If you want to explicitly create a manifest- or directory-based
+implementation, you can do so as follows:
+
+```python
+r1 = Runfiles.CreateManifestBased("path/to/foo.runfiles_manifest")
+
+r2 = Runfiles.CreateDirectoryBased("path/to/foo.runfiles/")
+```
+
+If you want to start subprocesses, and the subprocess can't automatically
+find the correct runfiles directory, you can explicitly set the right
+environment variables for them:
+
+```python
+import subprocess
+from python.runfiles import Runfiles
+
+r = Runfiles.Create()
+env = {}
+# ...
+env.update(r.EnvVars())
+p = subprocess.run(
+    [r.Rlocation("path/to/binary")],
+    env=env,
+    # ...
+)
+```
diff --git a/python/runfiles/README.rst b/python/runfiles/README.rst
deleted file mode 100644
index ac61d2d..0000000
--- a/python/runfiles/README.rst
+++ /dev/null
@@ -1,56 +0,0 @@
-bazel-runfiles library
-======================
-
-This is a Bazel Runfiles lookup library for Bazel-built Python binaries and tests.
-
-Learn about runfiles: read `Runfiles guide <https://bazel.build/extending/rules#runfiles>`_
-or watch `Fabian's BazelCon talk <https://www.youtube.com/watch?v=5NbgUMH1OGo>`_.
-
-Typical Usage
--------------
-
-1.  Add the 'bazel-runfiles' dependency along with other third-party dependencies, for example in your
-    ``requirements.txt`` file.
-
-2.  Depend on this runfiles library from your build rule, like you would other third-party libraries::
-
-      py_binary(
-          name = "my_binary",
-          ...
-          deps = [requirement("bazel-runfiles")],
-      )
-
-3.  Import the runfiles library::
-
-      import runfiles  # not "from runfiles import runfiles"
-
-4.  Create a Runfiles object and use rlocation to look up runfile paths::
-
-      r = runfiles.Create()
-      ...
-      with open(r.Rlocation("my_workspace/path/to/my/data.txt"), "r") as f:
-        contents = f.readlines()
-        ...
-
-    The code above creates a manifest- or directory-based implementations based
-    on the environment variables in os.environ. See `Create()` for more info.
-
-    If you want to explicitly create a manifest- or directory-based
-    implementations, you can do so as follows::
-
-      r1 = runfiles.CreateManifestBased("path/to/foo.runfiles_manifest")
-
-      r2 = runfiles.CreateDirectoryBased("path/to/foo.runfiles/")
-
-    If you want to start subprocesses, and the subprocess can't automatically
-    find the correct runfiles directory, you can explicitly set the right
-    environment variables for them::
-
-      import subprocess
-      import runfiles
-
-      r = runfiles.Create()
-      env = {}
-      ...
-      env.update(r.EnvVars())
-      p = subprocess.Popen([r.Rlocation("path/to/binary")], env, ...)
\ No newline at end of file
diff --git a/python/runfiles/runfiles.py b/python/runfiles/runfiles.py
index 2240979..6d47d24 100644
--- a/python/runfiles/runfiles.py
+++ b/python/runfiles/runfiles.py
@@ -14,7 +14,7 @@
 
 """Runfiles lookup library for Bazel-built Python binaries and tests.
 
-See @rules_python//python/runfiles/README.rst for usage instructions.
+See @rules_python//python/runfiles/README.md for usage instructions.
 """
 import inspect
 import os
@@ -247,6 +247,20 @@
             raise ValueError("failed to determine caller's file path") from exc
         caller_runfiles_path = os.path.relpath(caller_path, self._python_runfiles_root)
         if caller_runfiles_path.startswith(".." + os.path.sep):
+            # With Python 3.10 and earlier, sys.path contains the directory
+            # of the script, which can result in a module being loaded from
+            # outside the runfiles tree. In this case, assume that the module is
+            # located in the main repository.
+            # With Python 3.11 and higher, the Python launcher sets
+            # PYTHONSAFEPATH, which prevents this behavior.
+            # TODO: This doesn't cover the case of a script being run from an
+            #       external repository, which could be heuristically detected
+            #       by parsing the script's path.
+            if (
+                sys.version_info.minor <= 10
+                and sys.path[0] != self._python_runfiles_root
+            ):
+                return ""
             raise ValueError(
                 "{} does not lie under the runfiles root {}".format(
                     caller_path, self._python_runfiles_root
@@ -267,6 +281,56 @@
         # canonical name.
         return caller_runfiles_directory
 
+    # TODO: Update return type to Self when 3.11 is the min version
+    # https://peps.python.org/pep-0673/
+    @staticmethod
+    def CreateManifestBased(manifest_path: str) -> "Runfiles":
+        return Runfiles(_ManifestBased(manifest_path))
+
+    # TODO: Update return type to Self when 3.11 is the min version
+    # https://peps.python.org/pep-0673/
+    @staticmethod
+    def CreateDirectoryBased(runfiles_dir_path: str) -> "Runfiles":
+        return Runfiles(_DirectoryBased(runfiles_dir_path))
+
+    # TODO: Update return type to Self when 3.11 is the min version
+    # https://peps.python.org/pep-0673/
+    @staticmethod
+    def Create(env: Optional[Dict[str, str]] = None) -> Optional["Runfiles"]:
+        """Returns a new `Runfiles` instance.
+
+        The returned object is either:
+        - manifest-based, meaning it looks up runfile paths from a manifest file, or
+        - directory-based, meaning it looks up runfile paths under a given directory
+        path
+
+        If `env` contains "RUNFILES_MANIFEST_FILE" with non-empty value, this method
+        returns a manifest-based implementation. The object eagerly reads and caches
+        the whole manifest file upon instantiation; this may be relevant for
+        performance consideration.
+
+        Otherwise, if `env` contains "RUNFILES_DIR" with non-empty value (checked in
+        this priority order), this method returns a directory-based implementation.
+
+        If neither cases apply, this method returns null.
+
+        Args:
+        env: {string: string}; optional; the map of environment variables. If None,
+            this function uses the environment variable map of this process.
+        Raises:
+        IOError: if some IO error occurs.
+        """
+        env_map = os.environ if env is None else env
+        manifest = env_map.get("RUNFILES_MANIFEST_FILE")
+        if manifest:
+            return CreateManifestBased(manifest)
+
+        directory = env_map.get("RUNFILES_DIR")
+        if directory:
+            return CreateDirectoryBased(directory)
+
+        return None
+
 
 # Support legacy imports by defining a private symbol.
 _Runfiles = Runfiles
@@ -309,44 +373,12 @@
 
 
 def CreateManifestBased(manifest_path: str) -> Runfiles:
-    return Runfiles(_ManifestBased(manifest_path))
+    return Runfiles.CreateManifestBased(manifest_path)
 
 
 def CreateDirectoryBased(runfiles_dir_path: str) -> Runfiles:
-    return Runfiles(_DirectoryBased(runfiles_dir_path))
+    return Runfiles.CreateDirectoryBased(runfiles_dir_path)
 
 
 def Create(env: Optional[Dict[str, str]] = None) -> Optional[Runfiles]:
-    """Returns a new `Runfiles` instance.
-
-    The returned object is either:
-    - manifest-based, meaning it looks up runfile paths from a manifest file, or
-    - directory-based, meaning it looks up runfile paths under a given directory
-      path
-
-    If `env` contains "RUNFILES_MANIFEST_FILE" with non-empty value, this method
-    returns a manifest-based implementation. The object eagerly reads and caches
-    the whole manifest file upon instantiation; this may be relevant for
-    performance consideration.
-
-    Otherwise, if `env` contains "RUNFILES_DIR" with non-empty value (checked in
-    this priority order), this method returns a directory-based implementation.
-
-    If neither cases apply, this method returns null.
-
-    Args:
-      env: {string: string}; optional; the map of environment variables. If None,
-          this function uses the environment variable map of this process.
-    Raises:
-      IOError: if some IO error occurs.
-    """
-    env_map = os.environ if env is None else env
-    manifest = env_map.get("RUNFILES_MANIFEST_FILE")
-    if manifest:
-        return CreateManifestBased(manifest)
-
-    directory = env_map.get("RUNFILES_DIR")
-    if directory:
-        return CreateDirectoryBased(directory)
-
-    return None
+    return Runfiles.Create(env)
diff --git a/tests/toolchains/workspace_template/python_version_test.py b/python/runtime_env_toolchains/BUILD.bazel
similarity index 63%
copy from tests/toolchains/workspace_template/python_version_test.py
copy to python/runtime_env_toolchains/BUILD.bazel
index c82611c..21355ac 100644
--- a/tests/toolchains/workspace_template/python_version_test.py
+++ b/python/runtime_env_toolchains/BUILD.bazel
@@ -1,4 +1,4 @@
-# Copyright 2022 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,15 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
-import platform
-import unittest
+load("//python/private:runtime_env_toolchain.bzl", "define_runtime_env_toolchain")
 
+package(default_visibility = ["//:__subpackages__"])
 
-class TestPythonVersion(unittest.TestCase):
-    def test_match_toolchain(self):
-        self.assertEqual(platform.python_version(), os.getenv("PYTHON_VERSION"))
-
-
-if __name__ == "__main__":
-    unittest.main()
+define_runtime_env_toolchain(name = "runtime_env_toolchain")
diff --git a/python/uv/BUILD.bazel b/python/uv/BUILD.bazel
new file mode 100644
index 0000000..383bdfc
--- /dev/null
+++ b/python/uv/BUILD.bazel
@@ -0,0 +1,81 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# EXPERIMENTAL: This is experimental and may be removed without notice
+
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("//python/uv/private:current_toolchain.bzl", "current_toolchain")
+
+package(default_visibility = ["//:__subpackages__"])
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["**"]) + [
+        "//python/uv/private:distribution",
+    ],
+    visibility = ["//:__subpackages__"],
+)
+
+# For stardoc to reference the files
+exports_files(["defs.bzl"])
+
+toolchain_type(
+    name = "uv_toolchain_type",
+    visibility = ["//visibility:public"],
+)
+
+current_toolchain(
+    name = "current_toolchain",
+    # Marked manual so that `bazel test //...` passes
+    # even if no toolchain is registered.
+    tags = ["manual"],
+    # EXPERIMENTAL: Visibility is restricted to allow for changes.
+    visibility = [
+        "//:__subpackages__",
+        "@rules_python//examples:__subpackages__",
+    ],
+)
+
+bzl_library(
+    name = "defs",
+    srcs = ["defs.bzl"],
+    # EXPERIMENTAL: Visibility is restricted to allow for changes.
+    visibility = ["//:__subpackages__"],
+)
+
+bzl_library(
+    name = "extensions",
+    srcs = ["extensions.bzl"],
+    # EXPERIMENTAL: Visibility is restricted to allow for changes.
+    visibility = ["//:__subpackages__"],
+    deps = [":repositories"],
+)
+
+bzl_library(
+    name = "repositories",
+    srcs = ["repositories.bzl"],
+    # EXPERIMENTAL: Visibility is restricted to allow for changes.
+    visibility = ["//:__subpackages__"],
+    deps = [
+        "//python/uv/private:toolchains_repo",
+        "//python/uv/private:versions",
+    ],
+)
+
+bzl_library(
+    name = "toolchain",
+    srcs = ["toolchain.bzl"],
+    # EXPERIMENTAL: Visibility is restricted to allow for changes.
+    visibility = ["//:__subpackages__"],
+)
diff --git a/tests/toolchains/workspace_template/python_version_test.py b/python/uv/defs.bzl
similarity index 63%
copy from tests/toolchains/workspace_template/python_version_test.py
copy to python/uv/defs.bzl
index c82611c..20b426a 100644
--- a/tests/toolchains/workspace_template/python_version_test.py
+++ b/python/uv/defs.bzl
@@ -1,4 +1,4 @@
-# Copyright 2022 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,15 +12,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
-import platform
-import unittest
+"""
+EXPERIMENTAL: This is experimental and may be removed without notice
 
+A toolchain for uv
+"""
 
-class TestPythonVersion(unittest.TestCase):
-    def test_match_toolchain(self):
-        self.assertEqual(platform.python_version(), os.getenv("PYTHON_VERSION"))
+load("//python/uv/private:providers.bzl", _UvToolchainInfo = "UvToolchainInfo")
 
-
-if __name__ == "__main__":
-    unittest.main()
+UvToolchainInfo = _UvToolchainInfo
diff --git a/python/uv/extensions.bzl b/python/uv/extensions.bzl
new file mode 100644
index 0000000..82560eb
--- /dev/null
+++ b/python/uv/extensions.bzl
@@ -0,0 +1,50 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+EXPERIMENTAL: This is experimental and may be removed without notice
+
+A module extension for working with uv.
+"""
+
+load("//python/uv:repositories.bzl", "uv_register_toolchains")
+
+_DOC = """\
+A module extension for working with uv.
+"""
+
+uv_toolchain = tag_class(attrs = {
+    "uv_version": attr.string(doc = "Explicit version of uv.", mandatory = True),
+})
+
+def _uv_toolchain_extension(module_ctx):
+    for mod in module_ctx.modules:
+        for toolchain in mod.tags.toolchain:
+            if not mod.is_root:
+                fail(
+                    "Only the root module may configure the uv toolchain.",
+                    "This prevents conflicting registrations with any other modules.",
+                    "NOTE: We may wish to enforce a policy where toolchain configuration is only allowed in the root module, or in rules_python. See https://github.com/bazelbuild/bazel/discussions/22024",
+                )
+
+            uv_register_toolchains(
+                uv_version = toolchain.uv_version,
+                register_toolchains = False,
+            )
+
+uv = module_extension(
+    doc = _DOC,
+    implementation = _uv_toolchain_extension,
+    tag_classes = {"toolchain": uv_toolchain},
+)
diff --git a/python/uv/private/BUILD.bazel b/python/uv/private/BUILD.bazel
new file mode 100644
index 0000000..80fd239
--- /dev/null
+++ b/python/uv/private/BUILD.bazel
@@ -0,0 +1,48 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["**"]),
+    visibility = ["//python/uv:__pkg__"],
+)
+
+bzl_library(
+    name = "current_toolchain",
+    srcs = ["current_toolchain.bzl"],
+    visibility = ["//python/uv:__subpackages__"],
+)
+
+bzl_library(
+    name = "toolchain_types",
+    srcs = ["toolchain_types.bzl"],
+    visibility = ["//python/uv:__subpackages__"],
+)
+
+bzl_library(
+    name = "toolchains_repo",
+    srcs = ["toolchains_repo.bzl"],
+    visibility = ["//python/uv:__subpackages__"],
+    deps = [
+        "//python/private:text_util_bzl",
+    ],
+)
+
+bzl_library(
+    name = "versions",
+    srcs = ["versions.bzl"],
+    visibility = ["//python/uv:__subpackages__"],
+)
diff --git a/python/uv/private/current_toolchain.bzl b/python/uv/private/current_toolchain.bzl
new file mode 100644
index 0000000..91a25cb
--- /dev/null
+++ b/python/uv/private/current_toolchain.bzl
@@ -0,0 +1,62 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module implements an alias rule to the resolved toolchain.
+"""
+
+load("//python/uv/private:toolchain_types.bzl", "UV_TOOLCHAIN_TYPE")
+
+_DOC = """\
+Exposes a concrete toolchain which is the result of Bazel resolving the
+toolchain for the execution or target platform.
+Workaround for https://github.com/bazelbuild/bazel/issues/14009
+"""
+
+# Forward all the providers
+def _current_toolchain_impl(ctx):
+    toolchain_info = ctx.toolchains[UV_TOOLCHAIN_TYPE]
+
+    # Bazel requires executable rules to create the executable themselves,
+    # so we create a symlink in this rule so that it appears this rule created its executable.
+    original_uv_executable = toolchain_info.uv_toolchain_info.uv[DefaultInfo].files_to_run.executable
+
+    # Use `uv` as the name of the binary to make the help message well formatted
+    symlink_uv_executable = ctx.actions.declare_file("current_toolchain/uv".format(original_uv_executable.basename))
+    ctx.actions.symlink(output = symlink_uv_executable, target_file = original_uv_executable)
+
+    new_default_info = DefaultInfo(
+        files = depset([symlink_uv_executable]),
+        runfiles = toolchain_info.default_info.default_runfiles,
+        executable = symlink_uv_executable,
+    )
+
+    template_variable_info = platform_common.TemplateVariableInfo({
+        "UV_BIN": symlink_uv_executable.path,
+    })
+
+    return [
+        toolchain_info,
+        new_default_info,
+        template_variable_info,
+        toolchain_info.uv_toolchain_info,
+    ]
+
+# Copied from java_toolchain_alias
+# https://cs.opensource.google/bazel/bazel/+/master:tools/jdk/java_toolchain_alias.bzl
+current_toolchain = rule(
+    implementation = _current_toolchain_impl,
+    toolchains = [UV_TOOLCHAIN_TYPE],
+    doc = _DOC,
+    executable = True,
+)
diff --git a/tests/toolchains/workspace_template/python_version_test.py b/python/uv/private/providers.bzl
similarity index 60%
copy from tests/toolchains/workspace_template/python_version_test.py
copy to python/uv/private/providers.bzl
index c82611c..ac1ef31 100644
--- a/tests/toolchains/workspace_template/python_version_test.py
+++ b/python/uv/private/providers.bzl
@@ -1,4 +1,4 @@
-# Copyright 2022 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,15 +12,20 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
-import platform
-import unittest
+"""This module contains the definitions of all providers."""
 
+UvToolchainInfo = provider(
+    doc = "Information about how to invoke the uv executable.",
+    fields = {
+        "uv": """
+:type: Target
 
-class TestPythonVersion(unittest.TestCase):
-    def test_match_toolchain(self):
-        self.assertEqual(platform.python_version(), os.getenv("PYTHON_VERSION"))
+The uv binary `Target`
+""",
+        "version": """
+:type: str
 
-
-if __name__ == "__main__":
-    unittest.main()
+The uv version
+""",
+    },
+)
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/python/uv/private/toolchain_types.bzl
similarity index 61%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to python/uv/private/toolchain_types.bzl
index bbdfb4c..031e1ab 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/python/uv/private/toolchain_types.bzl
@@ -1,4 +1,4 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +12,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+"""Labels to identify toolchain types.
+
+This is a separate file because things needing the toolchain types (in
+particular, toolchain() registrations) shouldn't need to load the entire
+implementation of the toolchain.
+"""
+
+UV_TOOLCHAIN_TYPE = Label("//python/uv:uv_toolchain_type")
diff --git a/python/uv/private/toolchains_repo.bzl b/python/uv/private/toolchains_repo.bzl
new file mode 100644
index 0000000..9a8858f
--- /dev/null
+++ b/python/uv/private/toolchains_repo.bzl
@@ -0,0 +1,52 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"Creates a repository to hold toolchains"
+
+load("//python/private:text_util.bzl", "render")
+
+_TOOLCHAIN_TEMPLATE = """
+toolchain(
+    name = "{name}",
+    target_compatible_with = {compatible_with},
+    toolchain = "{toolchain_label}",
+    toolchain_type = "{toolchain_type}",
+)
+"""
+
+def _toolchains_repo_impl(repository_ctx):
+    build_content = ""
+    for toolchain_name in repository_ctx.attr.toolchain_names:
+        toolchain_label = repository_ctx.attr.toolchain_labels[toolchain_name]
+        toolchain_compatible_with = repository_ctx.attr.toolchain_compatible_with[toolchain_name]
+
+        build_content += _TOOLCHAIN_TEMPLATE.format(
+            name = toolchain_name,
+            toolchain_type = repository_ctx.attr.toolchain_type,
+            toolchain_label = toolchain_label,
+            compatible_with = render.list(toolchain_compatible_with),
+        )
+
+    repository_ctx.file("BUILD.bazel", build_content)
+
+uv_toolchains_repo = repository_rule(
+    _toolchains_repo_impl,
+    doc = "Generates a toolchain hub repository",
+    attrs = {
+        "toolchain_compatible_with": attr.string_list_dict(doc = "A list of platform constraints for this toolchain, keyed by toolchain name.", mandatory = True),
+        "toolchain_labels": attr.string_dict(doc = "The name of the toolchain implementation target, keyed by toolchain name.", mandatory = True),
+        "toolchain_names": attr.string_list(doc = "List of toolchain names", mandatory = True),
+        "toolchain_type": attr.string(doc = "The toolchain type of the toolchains", mandatory = True),
+    },
+)
diff --git a/python/uv/private/versions.bzl b/python/uv/private/versions.bzl
new file mode 100644
index 0000000..6e7091b
--- /dev/null
+++ b/python/uv/private/versions.bzl
@@ -0,0 +1,94 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Version and integrity information for downloaded artifacts"""
+
+UV_PLATFORMS = {
+    "aarch64-apple-darwin": struct(
+        default_repo_name = "uv_darwin_aarch64",
+        compatible_with = [
+            "@platforms//os:macos",
+            "@platforms//cpu:aarch64",
+        ],
+    ),
+    "aarch64-unknown-linux-gnu": struct(
+        default_repo_name = "uv_linux_aarch64",
+        compatible_with = [
+            "@platforms//os:linux",
+            "@platforms//cpu:aarch64",
+        ],
+    ),
+    "powerpc64le-unknown-linux-gnu": struct(
+        default_repo_name = "uv_linux_ppc",
+        compatible_with = [
+            "@platforms//os:linux",
+            "@platforms//cpu:ppc",
+        ],
+    ),
+    "s390x-unknown-linux-gnu": struct(
+        default_repo_name = "uv_linux_s390x",
+        compatible_with = [
+            "@platforms//os:linux",
+            "@platforms//cpu:s390x",
+        ],
+    ),
+    "x86_64-apple-darwin": struct(
+        default_repo_name = "uv_darwin_x86_64",
+        compatible_with = [
+            "@platforms//os:macos",
+            "@platforms//cpu:x86_64",
+        ],
+    ),
+    "x86_64-pc-windows-msvc": struct(
+        default_repo_name = "uv_windows_x86_64",
+        compatible_with = [
+            "@platforms//os:windows",
+            "@platforms//cpu:x86_64",
+        ],
+    ),
+    "x86_64-unknown-linux-gnu": struct(
+        default_repo_name = "uv_linux_x86_64",
+        compatible_with = [
+            "@platforms//os:linux",
+            "@platforms//cpu:x86_64",
+        ],
+    ),
+}
+
+# From: https://github.com/astral-sh/uv/releases
+UV_TOOL_VERSIONS = {
+    "0.2.23": {
+        "aarch64-apple-darwin": struct(
+            sha256 = "1d41beb151ace9621a0e729d661cfb04d6375bffdaaf0e366d1653576ce3a687",
+        ),
+        "aarch64-unknown-linux-gnu": struct(
+            sha256 = "c35042255239b75d29b9fd4b0845894b91284ed3ff90c2595d0518b4c8902329",
+        ),
+        "powerpc64le-unknown-linux-gnu": struct(
+            sha256 = "ca16c9456d297e623164e3089d76259c6d70ac40c037dd2068accc3bb1b09d5e",
+        ),
+        "s390x-unknown-linux-gnu": struct(
+            sha256 = "55f8c2aa089f382645fce9eed3ee002f2cd48de4696568e7fd63105a02da568c",
+        ),
+        "x86_64-apple-darwin": struct(
+            sha256 = "960d2ae6ec31bcf5da3f66083dedc527712115b97ee43eae903d74a43874fa72",
+        ),
+        "x86_64-pc-windows-msvc": struct(
+            sha256 = "66f80537301c686a801b91468a43dbeb0881bd6d51857078c24f29e5dca8ecf1",
+        ),
+        "x86_64-unknown-linux-gnu": struct(
+            sha256 = "4384db514959beb4de1dcdf7f1f2d5faf664f7180820b0e7a521ef2147e33d1d",
+        ),
+    },
+}
diff --git a/python/uv/repositories.bzl b/python/uv/repositories.bzl
new file mode 100644
index 0000000..0125b20
--- /dev/null
+++ b/python/uv/repositories.bzl
@@ -0,0 +1,120 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+EXPERIMENTAL: This is experimental and may be removed without notice
+
+Create repositories for uv toolchain dependencies
+"""
+
+load("//python/uv/private:toolchain_types.bzl", "UV_TOOLCHAIN_TYPE")
+load("//python/uv/private:toolchains_repo.bzl", "uv_toolchains_repo")
+load("//python/uv/private:versions.bzl", "UV_PLATFORMS", "UV_TOOL_VERSIONS")
+
+UV_BUILD_TMPL = """\
+# Generated by repositories.bzl
+load("@rules_python//python/uv:toolchain.bzl", "uv_toolchain")
+
+uv_toolchain(
+    name = "uv_toolchain",
+    uv = "{binary}",
+    version = "{version}",
+)
+"""
+
+def _uv_repo_impl(repository_ctx):
+    platform = repository_ctx.attr.platform
+    uv_version = repository_ctx.attr.uv_version
+
+    is_windows = "windows" in platform
+
+    suffix = ".zip" if is_windows else ".tar.gz"
+    filename = "uv-{platform}{suffix}".format(
+        platform = platform,
+        suffix = suffix,
+    )
+    url = "https://github.com/astral-sh/uv/releases/download/{version}/{filename}".format(
+        version = uv_version,
+        filename = filename,
+    )
+    if filename.endswith(".tar.gz"):
+        strip_prefix = filename[:-len(".tar.gz")]
+    else:
+        strip_prefix = ""
+
+    repository_ctx.download_and_extract(
+        url = url,
+        sha256 = UV_TOOL_VERSIONS[repository_ctx.attr.uv_version][repository_ctx.attr.platform].sha256,
+        stripPrefix = strip_prefix,
+    )
+
+    binary = "uv.exe" if is_windows else "uv"
+    repository_ctx.file(
+        "BUILD.bazel",
+        UV_BUILD_TMPL.format(
+            binary = binary,
+            version = uv_version,
+        ),
+    )
+
+uv_repository = repository_rule(
+    _uv_repo_impl,
+    doc = "Fetch external tools needed for uv toolchain",
+    attrs = {
+        "platform": attr.string(mandatory = True, values = UV_PLATFORMS.keys()),
+        "uv_version": attr.string(mandatory = True, values = UV_TOOL_VERSIONS.keys()),
+    },
+)
+
+# buildifier: disable=unnamed-macro
+def uv_register_toolchains(uv_version = None, register_toolchains = True):
+    """Convenience macro which does typical toolchain setup
+
+    Skip this macro if you need more control over the toolchain setup.
+
+    Args:
+        uv_version: The uv toolchain version to download.
+        register_toolchains: If true, repositories will be generated to produce and register `uv_toolchain` targets.
+    """
+    if not uv_version:
+        fail("uv_version is required")
+
+    toolchain_names = []
+    toolchain_labels_by_toolchain = {}
+    toolchain_compatible_with_by_toolchain = {}
+
+    for platform in UV_PLATFORMS.keys():
+        uv_repository_name = UV_PLATFORMS[platform].default_repo_name
+
+        uv_repository(
+            name = uv_repository_name,
+            uv_version = uv_version,
+            platform = platform,
+        )
+
+        toolchain_name = uv_repository_name + "_toolchain"
+        toolchain_names.append(toolchain_name)
+        toolchain_labels_by_toolchain[toolchain_name] = "@{}//:uv_toolchain".format(uv_repository_name)
+        toolchain_compatible_with_by_toolchain[toolchain_name] = UV_PLATFORMS[platform].compatible_with
+
+    uv_toolchains_repo(
+        name = "uv_toolchains",
+        toolchain_type = str(UV_TOOLCHAIN_TYPE),
+        toolchain_names = toolchain_names,
+        toolchain_labels = toolchain_labels_by_toolchain,
+        toolchain_compatible_with = toolchain_compatible_with_by_toolchain,
+    )
+
+    if register_toolchains:
+        native.register_toolchains("@uv_toolchains//:all")
diff --git a/python/uv/toolchain.bzl b/python/uv/toolchain.bzl
new file mode 100644
index 0000000..3cd5850
--- /dev/null
+++ b/python/uv/toolchain.bzl
@@ -0,0 +1,59 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+EXPERIMENTAL: This is experimental and may be removed without notice
+
+This module implements the uv toolchain rule
+"""
+
+load("//python/uv/private:providers.bzl", "UvToolchainInfo")
+
+def _uv_toolchain_impl(ctx):
+    uv = ctx.attr.uv
+
+    default_info = DefaultInfo(
+        files = uv.files,
+        runfiles = uv[DefaultInfo].default_runfiles,
+    )
+    uv_toolchain_info = UvToolchainInfo(
+        uv = uv,
+        version = ctx.attr.version,
+    )
+
+    # Export all the providers inside our ToolchainInfo
+    # so the current_toolchain rule can grab and re-export them.
+    toolchain_info = platform_common.ToolchainInfo(
+        default_info = default_info,
+        uv_toolchain_info = uv_toolchain_info,
+    )
+    return [
+        default_info,
+        toolchain_info,
+    ]
+
+uv_toolchain = rule(
+    implementation = _uv_toolchain_impl,
+    attrs = {
+        "uv": attr.label(
+            doc = "A static uv binary.",
+            mandatory = True,
+            allow_single_file = True,
+            executable = True,
+            cfg = "target",
+        ),
+        "version": attr.string(mandatory = True, doc = "Version of the uv binary."),
+    },
+    doc = "Defines a uv toolchain.",
+)
diff --git a/python/versions.bzl b/python/versions.bzl
index 6c9bf25..2cf9b39 100644
--- a/python/versions.bzl
+++ b/python/versions.bzl
@@ -109,13 +109,24 @@
         "strip_prefix": "python",
     },
     "3.8.18": {
-        "url": "20231002/cpython-{python_version}+20231002-{platform}-{build}.tar.gz",
+        "url": "20240224/cpython-{python_version}+20240224-{platform}-{build}.tar.gz",
         "sha256": {
-            "aarch64-apple-darwin": "1825b1f7220bc93ff143f2e70b5c6a79c6469e0eeb40824e07a7277f59aabfda",
-            "aarch64-unknown-linux-gnu": "236a300f386ead02ca98dbddbc026ff4ef4de6701a394106e291ff8b75445ee1",
-            "x86_64-apple-darwin": "fcf04532e644644213977242cd724fe5e84c0a5ac92ae038e07f1b01b474fca3",
-            "x86_64-pc-windows-msvc": "a9d203e78caed94de368d154e841610cef6f6b484738573f4ae9059d37e898a5",
-            "x86_64-unknown-linux-gnu": "1e8a3babd1500111359b0f5675d770984bcbcb2cc8890b117394f0ed342fb9ec",
+            "aarch64-apple-darwin": "4d493a1792bf211f37f98404cc1468f09bd781adc2602dea0df82ad264c11abc",
+            "aarch64-unknown-linux-gnu": "6588c9eed93833d9483d01fe40ac8935f691a1af8e583d404ec7666631b52487",
+            "x86_64-apple-darwin": "7d2cd8d289d5e3cdd0a8c06c028c7c621d3d00ce44b7e2f08c1724ae0471c626",
+            "x86_64-pc-windows-msvc": "dba923ee5df8f99db04f599e826be92880746c02247c8d8e4d955d4bc711af11",
+            "x86_64-unknown-linux-gnu": "5ae36825492372554c02708bdd26b8dcd57e3dbf34b3d6d599ad91d93540b2b7",
+        },
+        "strip_prefix": "python",
+    },
+    "3.8.19": {
+        "url": "20240726/cpython-{python_version}+20240726-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "fe4af1b6bc59478d027ede43f6249cf7b9143558e171bdf8711247337623af57",
+            "aarch64-unknown-linux-gnu": "8dc598aca7ad43ea20119324af98862d198d8990151c734a69f0fc9d16384b46",
+            "x86_64-apple-darwin": "4bc990b35384c83b5b0b3071e91455ec203517e569f29f691b159f1a6b2a19b2",
+            "x86_64-pc-windows-msvc": "4e8e9ddda82062d6e111108ab72f439acac4ba41b77d694548ef5dbf6b2b3319",
+            "x86_64-unknown-linux-gnu": "e81ea4dd16e6057c8121bdbcb7b64e2956068ca019f244c814bc3ad907cb2765",
         },
         "strip_prefix": "python",
     },
@@ -189,15 +200,28 @@
         "strip_prefix": "python",
     },
     "3.9.18": {
-        "url": "20231002/cpython-{python_version}+20231002-{platform}-{build}.tar.gz",
+        "url": "20240224/cpython-{python_version}+20240224-{platform}-{build}.tar.gz",
         "sha256": {
-            "aarch64-apple-darwin": "fdc4054837e37b69798c2ef796222a480bc1f80e8ad3a01a95d0168d8282a007",
-            "aarch64-unknown-linux-gnu": "1e0a3e8ce8e58901a259748c0ab640d2b8294713782d14229e882c6898b2fb36",
-            "ppc64le-unknown-linux-gnu": "101c38b22fb2f5a0945156da4259c8e9efa0c08de9d7f59afa51e7ce6e22a1cc",
-            "s390x-unknown-linux-gnu": "eee31e55ffbc1f460d7b17f05dd89e45a2636f374a6f8dc29ea13d0497f7f586",
-            "x86_64-apple-darwin": "82231cb77d4a5c8081a1a1d5b8ae440abe6993514eb77a926c826e9a69a94fb1",
-            "x86_64-pc-windows-msvc": "02ea7bb64524886bd2b05d6b6be4401035e4ba4319146f274f0bcd992822cd75",
-            "x86_64-unknown-linux-gnu": "f3ff38b1ccae7dcebd8bbf2e533c9a984fac881de0ffd1636fbb61842bd924de",
+            "aarch64-apple-darwin": "2548f911a6e316575c303ba42bb51540dc9b47a9f76a06a2a37460d93b177aa2",
+            "aarch64-unknown-linux-gnu": "e5bc5196baa603d635ee6b0cd141e359752ad3e8ea76127eb9141a3155c51200",
+            "ppc64le-unknown-linux-gnu": "d6b18df7a25fe034fd5ce4e64216df2cc78b2d4d908d2a1c94058ae700d73d22",
+            "s390x-unknown-linux-gnu": "15d059507c7e900e9665f31e8d903e5a24a68ceed24f9a1c5ac06ab42a354f3f",
+            "x86_64-apple-darwin": "171d8b472fce0295be0e28bb702c43d5a2a39feccb3e72efe620ac3843c3e402",
+            "x86_64-pc-windows-msvc": "a9bdbd728ed4c353a4157ecf74386117fb2a2769a9353f491c528371cfe7f6cd",
+            "x86_64-unknown-linux-gnu": "0e5663025121186bd17d331538a44f48b41baff247891d014f3f962cbe2716b4",
+        },
+        "strip_prefix": "python",
+    },
+    "3.9.19": {
+        "url": "20240726/cpython-{python_version}+20240726-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "0e5a7aae57c53d7a849bc7f67764a947b626e3fe8d4d41a8eed11d9e4be0b1c6",
+            "aarch64-unknown-linux-gnu": "05ec896db9a9d4fe8004b4e4b6a6fdc588a015fedbddb475490885b0d9c7d9b3",
+            "ppc64le-unknown-linux-gnu": "bfff0e3d536b2f0c315e85926cc317b7b756701b6de781a8972cefbdbc991ca2",
+            "s390x-unknown-linux-gnu": "059ec97080b205ea5f1ddf71c18e22b691e8d68192bd37d13ad8f4359915299d",
+            "x86_64-apple-darwin": "f2ae9fcac044a329739b8c1676245e8cb6b3094416220e71823d2673bdea0bdb",
+            "x86_64-pc-windows-msvc": "a8df6a00140055c9accb0be632e7add951d587bbe3d63c40827bbd5145d8f557",
+            "x86_64-unknown-linux-gnu": "cbf94cb1c9d4b5501d9b3652f6e8400c2cab7c41dfea48d344d9e7f29692b91b",
         },
         "strip_prefix": "python",
     },
@@ -282,15 +306,28 @@
         "strip_prefix": "python",
     },
     "3.10.13": {
-        "url": "20231002/cpython-{python_version}+20231002-{platform}-{build}.tar.gz",
+        "url": "20240224/cpython-{python_version}+20240224-{platform}-{build}.tar.gz",
         "sha256": {
-            "aarch64-apple-darwin": "fd027b1dedf1ea034cdaa272e91771bdf75ddef4c8653b05d224a0645aa2ca3c",
-            "aarch64-unknown-linux-gnu": "8675915ff454ed2f1597e27794bc7df44f5933c26b94aa06af510fe91b58bb97",
-            "ppc64le-unknown-linux-gnu": "f3f9c43eec1a0c3f72845d0b705da17a336d3906b7df212d2640b8f47e8ff375",
-            "s390x-unknown-linux-gnu": "859f6cfe9aedb6e8858892fdc124037e83ab05f28d42a7acd314c6a16d6bd66c",
-            "x86_64-apple-darwin": "be0b19b6af1f7d8c667e5abef5505ad06cf72e5a11bb5844970c395a7e5b1275",
-            "x86_64-pc-windows-msvc": "b8d930ce0d04bda83037ad3653d7450f8907c88e24bb8255a29b8dab8930d6f1",
-            "x86_64-unknown-linux-gnu": "5d0429c67c992da19ba3eb58b3acd0b35ec5e915b8cae9a4aa8ca565c423847a",
+            "aarch64-apple-darwin": "5fdc0f6a5b5a90fd3c528e8b1da8e3aac931ea8690126c2fdb4254c84a3ff04a",
+            "aarch64-unknown-linux-gnu": "a898a88705611b372297bb8fe4d23cc16b8603ce5f24494c3a8cfa65d83787f9",
+            "ppc64le-unknown-linux-gnu": "c23706e138a0351fc1e9def2974af7b8206bac7ecbbb98a78f5aa9e7535fee42",
+            "s390x-unknown-linux-gnu": "09be8fb2cdfbb4a93d555f268f244dbe4d8ff1854b2658e8043aa4ec08aede3e",
+            "x86_64-apple-darwin": "6378dfd22f58bb553ddb02be28304d739cd730c1f95c15c74955c923a1bc3d6a",
+            "x86_64-pc-windows-msvc": "086f7fe9156b897bb401273db8359017104168ac36f60f3af4e31ac7acd6634e",
+            "x86_64-unknown-linux-gnu": "d995d032ca702afd2fc3a689c1f84a6c64972ecd82bba76a61d525f08eb0e195",
+        },
+        "strip_prefix": "python",
+    },
+    "3.10.14": {
+        "url": "20240726/cpython-{python_version}+20240726-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "164d89f0df2feb689981864ecc1dffb19e6aa3696c8880166de555494fe92607",
+            "aarch64-unknown-linux-gnu": "39bcd46b4d70e40da177c55259be16d5c2be7a3f7f93f1e3bde47e71b4833f29",
+            "ppc64le-unknown-linux-gnu": "549d38b9ef59cba9ab2990025255231bfa1cb32b4bc5eac321667640fdee19d1",
+            "s390x-unknown-linux-gnu": "de4bc878a8666c734f983db971610980870148f333bda8b0c34abfaeae88d7ec",
+            "x86_64-apple-darwin": "1a1455838cd1e8ed0da14a152a2d559a2fd3a6047ba7013e841db4a35a228c1d",
+            "x86_64-pc-windows-msvc": "7f68821a8b5445267eca480660364ebd06ec84632b336770c6e39de07ac0f6c3",
+            "x86_64-unknown-linux-gnu": "32b34cd13d9d745b3db3f3b8398ab2c07de74544829915dbebd8dce39bdc405e",
         },
         "strip_prefix": "python",
     },
@@ -356,6 +393,45 @@
         },
         "strip_prefix": "python",
     },
+    "3.11.7": {
+        "url": "20240107/cpython-{python_version}+20240107-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "b042c966920cf8465385ca3522986b12d745151a72c060991088977ca36d3883",
+            "aarch64-unknown-linux-gnu": "b102eaf865eb715aa98a8a2ef19037b6cc3ae7dfd4a632802650f29de635aa13",
+            "ppc64le-unknown-linux-gnu": "b44e1b74afe75c7b19143413632c4386708ae229117f8f950c2094e9681d34c7",
+            "s390x-unknown-linux-gnu": "49520e3ff494708020f306e30b0964f079170be83e956be4504f850557378a22",
+            "x86_64-apple-darwin": "a0e615eef1fafdc742da0008425a9030b7ea68a4ae4e73ac557ef27b112836d4",
+            "x86_64-pc-windows-msvc": "67077e6fa918e4f4fd60ba169820b00be7c390c497bf9bc9cab2c255ea8e6f3e",
+            "x86_64-unknown-linux-gnu": "4a51ce60007a6facf64e5495f4cf322e311ba9f39a8cd3f3e4c026eae488e140",
+        },
+        "strip_prefix": "python",
+    },
+    "3.11.8": {
+        "url": "20240224/cpython-{python_version}+20240224-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "389a51139f5abe071a0d70091ca5df3e7a3dfcfcbe3e0ba6ad85fb4c5638421e",
+            "aarch64-unknown-linux-gnu": "389b9005fb78dd5a6f68df5ea45ab7b30d9a4b3222af96999e94fd20d4ad0c6a",
+            "ppc64le-unknown-linux-gnu": "eb2b31f8e50309aae493c6a359c32b723a676f07c641f5e8fe4b6aa4dbb50946",
+            "s390x-unknown-linux-gnu": "844f64f4c16e24965778281da61d1e0e6cd1358a581df1662da814b1eed096b9",
+            "x86_64-apple-darwin": "097f467b0c36706bfec13f199a2eaf924e668f70c6e2bd1f1366806962f7e86e",
+            "x86_64-pc-windows-msvc": "b618f1f047349770ee1ef11d1b05899840abd53884b820fd25c7dfe2ec1664d4",
+            "x86_64-unknown-linux-gnu": "94e13d0e5ad417035b80580f3e893a72e094b0900d5d64e7e34ab08e95439987",
+        },
+        "strip_prefix": "python",
+    },
+    "3.11.9": {
+        "url": "20240726/cpython-{python_version}+20240726-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "cbdac9462bab9671c8e84650e425d3f43b775752a930a2ef954a0d457d5c00c3",
+            "aarch64-unknown-linux-gnu": "4d17cf988abe24449d649aad3ef974091ab76807904d41839907061925b4c9e3",
+            "ppc64le-unknown-linux-gnu": "fc4f3c9ef9bfac2ed0282126ff376e544697ad04a5408d6429d46899d7d3bf21",
+            "s390x-unknown-linux-gnu": "e69b66e53e926460df044f44846eef3fea642f630e829719e1a4112fc370dc56",
+            "x86_64-apple-darwin": "dc3174666a30f4c38d04e79a80c3159b4b3aa69597c4676701c8386696811611",
+            "x86_64-pc-windows-msvc": "f694be48bdfec1dace6d69a19906b6083f4dd7c7c61f1138ba520e433e5598f8",
+            "x86_64-unknown-linux-gnu": "f6e955dc9ddfcad74e77abe6f439dac48ebca14b101ed7c85a5bf3206ed2c53d",
+        },
+        "strip_prefix": "python",
+    },
     "3.12.0": {
         "url": "20231002/cpython-{python_version}+20231002-{platform}-{build}.tar.gz",
         "sha256": {
@@ -369,15 +445,67 @@
         },
         "strip_prefix": "python",
     },
+    "3.12.1": {
+        "url": "20240107/cpython-{python_version}+20240107-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "f93f8375ca6ac0a35d58ff007043cbd3a88d9609113f1cb59cf7c8d215f064af",
+            "aarch64-unknown-linux-gnu": "236533ef20e665007a111c2f36efb59c87ae195ad7dca223b6dc03fb07064f0b",
+            "ppc64le-unknown-linux-gnu": "78051f0d1411ee62bc2af5edfccf6e8400ac4ef82887a2affc19a7ace6a05267",
+            "s390x-unknown-linux-gnu": "60631211c701f8d2c56e5dd7b154e68868128a019b9db1d53a264f56c0d4aee2",
+            "x86_64-apple-darwin": "eca96158c1568dedd9a0b3425375637a83764d1fa74446438293089a8bfac1f8",
+            "x86_64-pc-windows-msvc": "fd5a9e0f41959d0341246d3643f2b8794f638adc0cec8dd5e1b6465198eae08a",
+            "x86_64-unknown-linux-gnu": "74e330b8212ca22fd4d9a2003b9eec14892155566738febc8e5e572f267b9472",
+        },
+        "strip_prefix": "python",
+    },
+    "3.12.2": {
+        "url": "20240224/cpython-{python_version}+20240224-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "01c064c00013b0175c7858b159989819ead53f4746d40580b5b0b35b6e80fba6",
+            "aarch64-unknown-linux-gnu": "e52550379e7c4ac27a87de832d172658bc04150e4e27d4e858e6d8cbb96fd709",
+            "ppc64le-unknown-linux-gnu": "74bc02c4bbbd26245c37b29b9e12d0a9c1b7ab93477fed8b651c988b6a9a6251",
+            "s390x-unknown-linux-gnu": "ecd6b0285e5eef94deb784b588b4b425a15a43ae671bf206556659dc141a9825",
+            "x86_64-apple-darwin": "a53a6670a202c96fec0b8c55ccc780ea3af5307eb89268d5b41a9775b109c094",
+            "x86_64-pc-windows-msvc": "1e5655a6ccb1a64a78460e4e3ee21036c70246800f176a6c91043a3fe3654a3b",
+            "x86_64-unknown-linux-gnu": "57a37b57f8243caa4cdac016176189573ad7620f0b6da5941c5e40660f9468ab",
+        },
+        "strip_prefix": "python",
+    },
+    "3.12.3": {
+        "url": "20240415/cpython-{python_version}+20240415-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "ccc40e5af329ef2af81350db2a88bbd6c17b56676e82d62048c15d548401519e",
+            "aarch64-unknown-linux-gnu": "ec8126de97945e629cca9aedc80a29c4ae2992c9d69f2655e27ae73906ba187d",
+            "ppc64le-unknown-linux-gnu": "c5dcf08b8077e617d949bda23027c49712f583120b3ed744f9b143da1d580572",
+            "s390x-unknown-linux-gnu": "872fc321363b8cdd826fd2cb1adfd1ceb813bc1281f9d410c1c2c4e177e8df86",
+            "x86_64-apple-darwin": "c37a22fca8f57d4471e3708de6d13097668c5f160067f264bb2b18f524c890c8",
+            "x86_64-pc-windows-msvc": "f7cfa4ad072feb4578c8afca5ba9a54ad591d665a441dd0d63aa366edbe19279",
+            "x86_64-unknown-linux-gnu": "a73ba777b5d55ca89edef709e6b8521e3f3d4289581f174c8699adfb608d09d6",
+        },
+        "strip_prefix": "python",
+    },
+    "3.12.4": {
+        "url": "20240726/cpython-{python_version}+20240726-{platform}-{build}.tar.gz",
+        "sha256": {
+            "aarch64-apple-darwin": "1801025e825c04b3907e4ef6220a13607bc0397628c9485897073110ef7fde15",
+            "aarch64-unknown-linux-gnu": "a098b18b7e9fea0c66867b76c0124fce9465765017572b2e7b522154c87c78d7",
+            "ppc64le-unknown-linux-gnu": "04011c4c5b7fe34b0b895edf4ad8748e410686c1d69aaee11d6688d481023bcb",
+            "s390x-unknown-linux-gnu": "8f8f3e29cf0c2facdbcfee70660939fda7667ac24fee8656d3388fc72f3acc7c",
+            "x86_64-apple-darwin": "4c325838c1b0ed13698506fcd515be25c73dcbe195f8522cf98f9148a97601ed",
+            "x86_64-pc-windows-msvc": "74309b0f322716409883d38c621743ea7fa0376eb00927b8ee1e1671d3aff450",
+            "x86_64-unknown-linux-gnu": "e133dd6fc6a2d0033e2658637cc22e9c95f9d7073b80115037ee1f16417a54ac",
+        },
+        "strip_prefix": "python",
+    },
 }
 
 # buildifier: disable=unsorted-dict-items
 MINOR_MAPPING = {
-    "3.8": "3.8.18",
-    "3.9": "3.9.18",
-    "3.10": "3.10.13",
-    "3.11": "3.11.6",
-    "3.12": "3.12.0",
+    "3.8": "3.8.19",
+    "3.9": "3.9.19",
+    "3.10": "3.10.14",
+    "3.11": "3.11.9",
+    "3.12": "3.12.4",
 }
 
 PLATFORMS = {
@@ -386,6 +514,7 @@
             "@platforms//os:macos",
             "@platforms//cpu:aarch64",
         ],
+        flag_values = {},
         os_name = MACOS_NAME,
         # Matches the value returned from:
         # repository_ctx.execute(["uname", "-m"]).stdout.strip()
@@ -396,28 +525,70 @@
             "@platforms//os:linux",
             "@platforms//cpu:aarch64",
         ],
+        flag_values = {
+            Label("//python/config_settings:py_linux_libc"): "glibc",
+        },
         os_name = LINUX_NAME,
         # Note: this string differs between OSX and Linux
         # Matches the value returned from:
         # repository_ctx.execute(["uname", "-m"]).stdout.strip()
         arch = "aarch64",
     ),
+    "armv7-unknown-linux-gnu": struct(
+        compatible_with = [
+            "@platforms//os:linux",
+            "@platforms//cpu:armv7",
+        ],
+        flag_values = {
+            Label("//python/config_settings:py_linux_libc"): "glibc",
+        },
+        os_name = LINUX_NAME,
+        arch = "armv7",
+    ),
+    "i386-unknown-linux-gnu": struct(
+        compatible_with = [
+            "@platforms//os:linux",
+            "@platforms//cpu:i386",
+        ],
+        flag_values = {
+            Label("//python/config_settings:py_linux_libc"): "glibc",
+        },
+        os_name = LINUX_NAME,
+        arch = "i386",
+    ),
     "ppc64le-unknown-linux-gnu": struct(
         compatible_with = [
             "@platforms//os:linux",
             "@platforms//cpu:ppc",
         ],
+        flag_values = {
+            Label("//python/config_settings:py_linux_libc"): "glibc",
+        },
         os_name = LINUX_NAME,
         # Note: this string differs between OSX and Linux
         # Matches the value returned from:
         # repository_ctx.execute(["uname", "-m"]).stdout.strip()
         arch = "ppc64le",
     ),
+    "riscv64-unknown-linux-gnu": struct(
+        compatible_with = [
+            "@platforms//os:linux",
+            "@platforms//cpu:riscv64",
+        ],
+        flag_values = {
+            Label("//python/config_settings:py_linux_libc"): "glibc",
+        },
+        os_name = LINUX_NAME,
+        arch = "riscv64",
+    ),
     "s390x-unknown-linux-gnu": struct(
         compatible_with = [
             "@platforms//os:linux",
             "@platforms//cpu:s390x",
         ],
+        flag_values = {
+            Label("//python/config_settings:py_linux_libc"): "glibc",
+        },
         os_name = LINUX_NAME,
         # Note: this string differs between OSX and Linux
         # Matches the value returned from:
@@ -429,6 +600,7 @@
             "@platforms//os:macos",
             "@platforms//cpu:x86_64",
         ],
+        flag_values = {},
         os_name = MACOS_NAME,
         arch = "x86_64",
     ),
@@ -437,6 +609,7 @@
             "@platforms//os:windows",
             "@platforms//cpu:x86_64",
         ],
+        flag_values = {},
         os_name = WINDOWS_NAME,
         arch = "x86_64",
     ),
@@ -445,6 +618,9 @@
             "@platforms//os:linux",
             "@platforms//cpu:x86_64",
         ],
+        flag_values = {
+            Label("//python/config_settings:py_linux_libc"): "glibc",
+        },
         os_name = LINUX_NAME,
         arch = "x86_64",
     ),
@@ -455,7 +631,7 @@
 
     Args:
         platform: The platform string for the interpreter
-        python_version: The version of the intterpreter to get
+        python_version: The version of the interpreter to get
         base_url: The URL to prepend to the 'url' attr in the tool_versions dict
         tool_versions: A dict listing the interpreter versions, their SHAs and URL
 
diff --git a/renovate.json b/renovate.json
deleted file mode 100644
index ee8c906..0000000
--- a/renovate.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
-    "extends": [
-        "config:base"
-    ]
-}
diff --git a/sphinxdocs/BUILD.bazel b/sphinxdocs/BUILD.bazel
index cd1a1fb..6cb69ba 100644
--- a/sphinxdocs/BUILD.bazel
+++ b/sphinxdocs/BUILD.bazel
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("@bazel_skylib//rules:common_settings.bzl", "bool_flag")
 load("//sphinxdocs/private:sphinx.bzl", "repeated_string_list_flag")
 
 package(
@@ -31,6 +32,15 @@
     build_setting_default = [],
 )
 
+# Whether to add the `-q` arg to Sphinx invocations, which determines if
+# stdout has any output or not (logging INFO messages and progress messages).
+# If true, add `-q`. If false, don't add `-q`. This is mostly useful for
+# debugging invocations or developing extensions.
+bool_flag(
+    name = "quiet",
+    build_setting_default = True,
+)
+
 bzl_library(
     name = "sphinx_bzl",
     srcs = ["sphinx.bzl"],
diff --git a/tests/private/whl_target_platforms/BUILD.bazel b/sphinxdocs/inventories/BUILD.bazel
similarity index 66%
copy from tests/private/whl_target_platforms/BUILD.bazel
copy to sphinxdocs/inventories/BUILD.bazel
index fec25af..9ed7698 100644
--- a/tests/private/whl_target_platforms/BUILD.bazel
+++ b/sphinxdocs/inventories/BUILD.bazel
@@ -1,4 +1,4 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,6 +12,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-load(":whl_target_platforms_tests.bzl", "whl_target_platforms_test_suite")
+load("//sphinxdocs:sphinx.bzl", "sphinx_inventory")
 
-whl_target_platforms_test_suite(name = "whl_target_platforms_tests")
+# Inventory for the current Bazel version
+sphinx_inventory(
+    name = "bazel_inventory",
+    src = "bazel_inventory.txt",
+    visibility = ["//visibility:public"],
+)
diff --git a/sphinxdocs/inventories/bazel_inventory.txt b/sphinxdocs/inventories/bazel_inventory.txt
new file mode 100644
index 0000000..a7f0222
--- /dev/null
+++ b/sphinxdocs/inventories/bazel_inventory.txt
@@ -0,0 +1,27 @@
+# Sphinx inventory version 2
+# Project: Bazel
+# Version: 7.0.0
+# The remainder of this file is compressed using zlib
+Action bzl:type 1 rules/lib/Action -
+File bzl:type 1 rules/lib/File -
+Label bzl:type 1 rules/lib/Label -
+Target bzl:type 1 rules/lib/builtins/Target -
+bool bzl:type 1 rules/lib/bool -
+int bzl:type 1 rules/lib/int -
+depset bzl:type 1 rules/lib/depset -
+dict bzl:type 1 rules/lib/dict -
+label bzl:doc 1 concepts/labels -
+attr.bool bzl:type 1 rules/lib/toplevel/attr#bool -
+attr.int bzl:type 1 rules/lib/toplevel/attr#int -
+attr.label bzl:type 1 rules/lib/toplevel/attr#label -
+attr.label_list bzl:type 1 rules/lib/toplevel/attr#label_list -
+attr.string bzl:type 1 rules/lib/toplevel/attr#string -
+attr.string_list bzl:type 1 rules/lib/toplevel/attr#string_list -
+list bzl:type 1 rules/lib/list -
+python bzl:doc 1 reference/be/python -
+str bzl:type 1 rules/lib/string -
+struct bzl:type 1 rules/lib/builtins/struct -
+Name bzl:type 1 concepts/labels#target-names -
+CcInfo bzl:provider 1 rules/lib/providers/CcInfo -
+CcInfo.linking_context bzl:provider-field 1 rules/lib/providers/CcInfo#linking_context -
+ToolchainInfo bzl:type 1 rules/lib/providers/ToolchainInfo.html -
diff --git a/sphinxdocs/private/BUILD.bazel b/sphinxdocs/private/BUILD.bazel
index 01758b3..ec6a945 100644
--- a/sphinxdocs/private/BUILD.bazel
+++ b/sphinxdocs/private/BUILD.bazel
@@ -34,7 +34,7 @@
         "sphinx_build.py",
         "sphinx_server.py",
     ],
-    visibility = ["//:__subpackages__"],
+    visibility = ["//visibility:public"],
 )
 
 bzl_library(
@@ -70,14 +70,14 @@
     name = "inventory_builder",
     srcs = ["inventory_builder.py"],
     # Only public because it's an implicit attribute
-    visibility = ["//:__subpackages__"],
+    visibility = ["//visibility:public"],
 )
 
 py_binary(
     name = "proto_to_markdown",
     srcs = ["proto_to_markdown.py"],
     # Only public because it's an implicit attribute
-    visibility = ["//:__subpackages__"],
+    visibility = ["//visibility:public"],
     deps = [":proto_to_markdown_lib"],
 )
 
@@ -85,7 +85,7 @@
     name = "proto_to_markdown_lib",
     srcs = ["proto_to_markdown.py"],
     # Only public because it's an implicit attribute
-    visibility = ["//:__subpackages__"],
+    visibility = ["//visibility:public"],
     deps = [
         ":stardoc_output_proto_py_pb2",
     ],
diff --git a/sphinxdocs/private/proto_to_markdown.py b/sphinxdocs/private/proto_to_markdown.py
index 18d4e1e..d667eec 100644
--- a/sphinxdocs/private/proto_to_markdown.py
+++ b/sphinxdocs/private/proto_to_markdown.py
@@ -80,6 +80,12 @@
         yield i == 0, i == len(values) - 1, value
 
 
+def _sort_attributes_inplace(attributes):
+    # Sort attributes so the iteration order results in a Python-syntax
+    # valid signature. Keep name first because that's convention.
+    attributes.sort(key=lambda a: (a.name != "name", bool(a.default_value), a.name))
+
+
 class _MySTRenderer:
     def __init__(
         self,
@@ -99,6 +105,9 @@
             bzl_path = self._public_load_path
         else:
             bzl_path = "//" + self._module.file.split("//")[1]
+
+        self._write(":::{default-domain} bzl\n:::\n")
+        self._write(":::{bzl:currentfile} ", bzl_path, "\n:::\n\n")
         self._write(
             f"# {bzl_path}\n",
             "\n",
@@ -129,320 +138,344 @@
             self._write("\n")
 
     def _render_aspect(self, aspect: stardoc_output_pb2.AspectInfo):
-        aspect_anchor = _anchor_id(aspect.aspect_name)
-        self._write(
-            _block_attrs(".starlark-object"),
-            f"## {aspect.aspect_name}\n\n",
-            "_Propagates on attributes:_ ",  # todo add link here
-            ", ".join(sorted(f"`{attr}`" for attr in aspect.aspect_attribute)),
-            "\n\n",
-            aspect.doc_string.strip(),
-            "\n\n",
-        )
+        _sort_attributes_inplace(aspect.attribute)
+        self._write("::::::{bzl:aspect} ", aspect.aspect_name, "\n\n")
+        edges = ", ".join(sorted(f"`{attr}`" for attr in aspect.aspect_attribute))
+        self._write(":aspect-attributes: ", edges, "\n\n")
+        self._write(aspect.doc_string.strip(), "\n\n")
 
         if aspect.attribute:
-            self._render_attributes(aspect_anchor, aspect.attribute)
-        self._write("\n")
+            self._render_attributes(aspect.attribute)
+            self._write("\n")
+        self._write("::::::\n")
 
     def _render_module_extension(self, mod_ext: stardoc_output_pb2.ModuleExtensionInfo):
-        self._write(
-            _block_attrs(".starlark-object"),
-            f"## {mod_ext.extension_name}\n\n",
-        )
-
+        self._write("::::::{bzl:module-extension} ", mod_ext.extension_name, "\n\n")
         self._write(mod_ext.doc_string.strip(), "\n\n")
 
-        mod_ext_anchor = _anchor_id(mod_ext.extension_name)
         for tag in mod_ext.tag_class:
             tag_name = f"{mod_ext.extension_name}.{tag.tag_name}"
-            tag_anchor = f"{mod_ext_anchor}_{tag.tag_name}"
-            self._write(
-                _block_attrs(".starlark-module-extension-tag-class"),
-                f"### {tag_name}\n\n",
-            )
+            tag_name = f"{tag.tag_name}"
+            self._write(":::::{bzl:tag-class} ", tag_name, "\n\n")
+
+            _sort_attributes_inplace(tag.attribute)
             self._render_signature(
                 tag_name,
-                tag_anchor,
                 tag.attribute,
                 get_name=lambda a: a.name,
                 get_default=lambda a: a.default_value,
             )
 
             self._write(tag.doc_string.strip(), "\n\n")
-            self._render_attributes(tag_anchor, tag.attribute)
-            self._write("\n")
+            self._render_attributes(tag.attribute)
+            self._write(":::::\n")
+        self._write("::::::\n")
 
     def _render_repository_rule(self, repo_rule: stardoc_output_pb2.RepositoryRuleInfo):
-        self._write(
-            _block_attrs(".starlark-object"),
-            f"## {repo_rule.rule_name}\n\n",
-        )
-        repo_anchor = _anchor_id(repo_rule.rule_name)
+        self._write("::::::{bzl:repo-rule} ")
+        _sort_attributes_inplace(repo_rule.attribute)
         self._render_signature(
             repo_rule.rule_name,
-            repo_anchor,
             repo_rule.attribute,
             get_name=lambda a: a.name,
             get_default=lambda a: a.default_value,
         )
         self._write(repo_rule.doc_string.strip(), "\n\n")
         if repo_rule.attribute:
-            self._render_attributes(repo_anchor, repo_rule.attribute)
+            self._render_attributes(repo_rule.attribute)
         if repo_rule.environ:
-            self._write(
-                "**ENVIRONMENT VARIABLES** ",
-                _link_here_icon(repo_anchor + "_env"),
-                "\n",
-            )
-            for name in sorted(repo_rule.environ):
-                self._write(f"* `{name}`\n")
+            self._write(":envvars: ", ", ".join(sorted(repo_rule.environ)))
         self._write("\n")
 
     def _render_rule(self, rule: stardoc_output_pb2.RuleInfo):
         rule_name = rule.rule_name
-        rule_anchor = _anchor_id(rule_name)
-        self._write(
-            _block_attrs(".starlark-object"),
-            f"## {rule_name}\n\n",
-        )
-
+        _sort_attributes_inplace(rule.attribute)
+        self._write("::::{bzl:rule} ")
         self._render_signature(
             rule_name,
-            rule_anchor,
             rule.attribute,
             get_name=lambda r: r.name,
             get_default=lambda r: r.default_value,
         )
-
         self._write(rule.doc_string.strip(), "\n\n")
 
-        if len(rule.advertised_providers.provider_name) == 0:
-            self._write("_Provides_: no providers advertised.")
-        else:
-            self._write(
-                "_Provides_: ",
-                ", ".join(rule.advertised_providers.provider_name),
-            )
-        self._write("\n\n")
+        if rule.advertised_providers.provider_name:
+            self._write(":provides: ")
+            self._write(" | ".join(rule.advertised_providers.provider_name))
+            self._write("\n")
+        self._write("\n")
 
         if rule.attribute:
-            self._render_attributes(rule_anchor, rule.attribute)
+            self._render_attributes(rule.attribute)
+            self._write("\n")
+        self._write("::::\n")
 
     def _rule_attr_type_string(self, attr: stardoc_output_pb2.AttributeInfo) -> str:
         if attr.type == _AttributeType.NAME:
-            return _link("Name", ref="target-name")
+            return "Name"
         elif attr.type == _AttributeType.INT:
-            return _link("int", ref="int")
+            return "int"
         elif attr.type == _AttributeType.LABEL:
-            return _link("label", ref="attr-label")
+            return "label"
         elif attr.type == _AttributeType.STRING:
-            return _link("string", ref="str")
+            return "str"
         elif attr.type == _AttributeType.STRING_LIST:
-            return "list of " + _link("string", ref="str")
+            return "list[str]"
         elif attr.type == _AttributeType.INT_LIST:
-            return "list of " + _link("int", ref="int")
+            return "list[int]"
         elif attr.type == _AttributeType.LABEL_LIST:
-            return "list of " + _link("label", ref="attr-label") + "s"
+            return "list[label]"
         elif attr.type == _AttributeType.BOOLEAN:
-            return _link("bool", ref="bool")
+            return "bool"
         elif attr.type == _AttributeType.LABEL_STRING_DICT:
-            return "dict of {key} to {value}".format(
-                key=_link("label", ref="attr-label"), value=_link("string", ref="str")
-            )
+            return "dict[label, str]"
         elif attr.type == _AttributeType.STRING_DICT:
-            return "dict of {key} to {value}".format(
-                key=_link("string", ref="str"), value=_link("string", ref="str")
-            )
+            return "dict[str, str]"
         elif attr.type == _AttributeType.STRING_LIST_DICT:
-            return "dict of {key} to list of {value}".format(
-                key=_link("string", ref="str"), value=_link("string", ref="str")
-            )
+            return "dict[str, list[str]]"
         elif attr.type == _AttributeType.OUTPUT:
-            return _link("label", ref="attr-label")
+            return "label"
         elif attr.type == _AttributeType.OUTPUT_LIST:
-            return "list of " + _link("label", ref="attr-label")
+            return "list[label]"
         else:
             # If we get here, it means the value was unknown for some reason.
             # Rather than error, give some somewhat understandable value.
             return _AttributeType.Name(attr.type)
 
     def _render_func(self, func: stardoc_output_pb2.StarlarkFunctionInfo):
-        func_name = func.function_name
-        func_anchor = _anchor_id(func_name)
-        self._write(
-            _block_attrs(".starlark-object"),
-            f"## {func_name}\n\n",
-        )
+        self._write("::::::{bzl:function} ")
 
-        parameters = [param for param in func.parameter if param.name != "self"]
-
-        self._render_signature(
-            func_name,
-            func_anchor,
-            parameters,
-            get_name=lambda p: p.name,
-            get_default=lambda p: p.default_value,
-        )
+        parameters = self._render_func_signature(func)
 
         self._write(func.doc_string.strip(), "\n\n")
 
         if parameters:
-            self._write(
-                _block_attrs(f"{func_anchor}_parameters"),
-                "**PARAMETERS** ",
-                _link_here_icon(f"{func_anchor}_parameters"),
-                "\n\n",
-            )
-            entries = []
             for param in parameters:
-                entries.append(
-                    [
-                        f"{func_anchor}_{param.name}",
-                        param.name,
-                        f"(_default `{param.default_value}`_) "
-                        if param.default_value
-                        else "",
-                        param.doc_string if param.doc_string else "_undocumented_",
-                    ]
-                )
-            self._render_field_list(entries)
+                self._write(f":arg {param.name}:\n")
+                if param.default_value:
+                    default_value = self._format_default_value(param.default_value)
+                    self._write("  {default-value}`", default_value, "`\n")
+                if param.doc_string:
+                    self._write("  ", _indent_block_text(param.doc_string), "\n")
+                else:
+                    self._write("  _undocumented_\n")
+                self._write("\n")
 
-        if getattr(func, "return").doc_string:
-            return_doc = _indent_block_text(getattr(func, "return").doc_string)
-            self._write(
-                _block_attrs(f"{func_anchor}_returns"),
-                "RETURNS",
-                _link_here_icon(func_anchor + "_returns"),
-                "\n",
-                ": ",
-                return_doc,
-                "\n",
-            )
+        if return_doc := getattr(func, "return").doc_string:
+            self._write(":returns:\n")
+            self._write("  ", _indent_block_text(return_doc), "\n")
         if func.deprecated.doc_string:
-            self._write(
-                "\n\n**DEPRECATED**\n\n", func.deprecated.doc_string.strip(), "\n"
-            )
+            self._write(":::::{deprecated}: unknown\n")
+            self._write("  ", _indent_block_text(func.deprecated.doc_string), "\n")
+            self._write(":::::\n")
+        self._write("::::::\n")
+
+    def _render_func_signature(self, func):
+        self._write(f"{func.function_name}(")
+        # TODO: Have an "is method" directive in the docstring to decide if
+        # the self parameter should be removed.
+        parameters = [param for param in func.parameter if param.name != "self"]
+
+        # Unfortunately, the stardoc info is incomplete and inaccurate:
+        # * The position of the `*args` param is wrong; it'll always
+        #   be last (or second to last, if kwargs is present).
+        # * Stardoc doesn't explicitly tell us if an arg is `*args` or
+        #   `**kwargs`. Hence f(*args) or f(**kwargs) is ambigiguous.
+        # See these issues:
+        # https://github.com/bazelbuild/stardoc/issues/226
+        # https://github.com/bazelbuild/stardoc/issues/225
+        #
+        # Below, we try to take what info we have and infer what the original
+        # signature was. In short:
+        # * A default=empty, mandatory=false arg is either *args or **kwargs
+        # * If two of those are seen, the first is *args and the second is
+        #   **kwargs. Recall, however, the position of *args is mis-represented.
+        # * If a single default=empty, mandatory=false arg is found, then
+        #   it's ambiguous as to whether its *args or **kwargs. To figure
+        #   that out, we:
+        #   * If it's not the last arg, then it must be *args. In practice,
+        #     this never occurs due to #226 above.
+        #   * If we saw a mandatory arg after an optional arg, then *args
+        #     was supposed to be between them (otherwise it wouldn't be
+        #     valid syntax).
+        #   * Otherwise, it's ambiguous. We just guess by looking at the
+        #     parameter name.
+        var_args = None
+        var_kwargs = None
+        saw_mandatory_after_optional = False
+        first_mandatory_after_optional_index = None
+        optionals_started = False
+        for i, p in enumerate(parameters):
+            optionals_started = optionals_started or not p.mandatory
+            if p.mandatory and optionals_started:
+                saw_mandatory_after_optional = True
+                if first_mandatory_after_optional_index is None:
+                    first_mandatory_after_optional_index = i
+
+            if not p.default_value and not p.mandatory:
+                if var_args is None:
+                    var_args = (i, p)
+                else:
+                    var_kwargs = p
+
+        if var_args and not var_kwargs:
+            if var_args[0] != len(parameters) - 1:
+                pass
+            elif saw_mandatory_after_optional:
+                var_kwargs = var_args[1]
+                var_args = None
+            elif var_args[1].name in ("kwargs", "attrs"):
+                var_kwargs = var_args[1]
+                var_args = None
+
+        # Partial workaround for
+        # https://github.com/bazelbuild/stardoc/issues/226: `*args` renders last
+        if var_args and var_kwargs and first_mandatory_after_optional_index is not None:
+            parameters.pop(var_args[0])
+            parameters.insert(first_mandatory_after_optional_index, var_args[1])
+
+        # The only way a mandatory-after-optional can occur is
+        # if there was `*args` before it. But if we didn't see it,
+        # it must have been the unbound `*` symbol, which stardoc doesn't
+        # tell us exists.
+        if saw_mandatory_after_optional and not var_args:
+            self._write("*, ")
+        for _, is_last, p in _position_iter(parameters):
+            if var_args and p.name == var_args[1].name:
+                self._write("*")
+            elif var_kwargs and p.name == var_kwargs.name:
+                self._write("**")
+            self._write(p.name)
+            if p.default_value:
+                self._write("=", self._format_default_value(p.default_value))
+            if not is_last:
+                self._write(", ")
+        self._write(")\n")
+        return parameters
 
     def _render_provider(self, provider: stardoc_output_pb2.ProviderInfo):
-        self._write(
-            _block_attrs(".starlark-object"),
-            f"## {provider.provider_name}\n\n",
-        )
-
-        provider_anchor = _anchor_id(provider.provider_name)
-        self._render_signature(
-            provider.provider_name,
-            provider_anchor,
-            provider.field_info,
-            get_name=lambda f: f.name,
-        )
+        self._write("::::::{bzl:provider} ", provider.provider_name, "\n")
+        if provider.origin_key:
+            self._render_origin_key_option(provider.origin_key)
+        self._write("\n")
 
         self._write(provider.doc_string.strip(), "\n\n")
 
-        if provider.field_info:
-            self._write(
-                _block_attrs(provider_anchor),
-                "**FIELDS** ",
-                _link_here_icon(provider_anchor + "_fields"),
-                "\n",
-                "\n",
-            )
-            entries = []
-            for field in provider.field_info:
-                entries.append(
-                    [
-                        f"{provider_anchor}_{field.name}",
-                        field.name,
-                        field.doc_string,
-                    ]
-                )
-            self._render_field_list(entries)
-
-    def _render_attributes(
-        self, base_anchor: str, attributes: list[stardoc_output_pb2.AttributeInfo]
-    ):
-        self._write(
-            _block_attrs(f"{base_anchor}_attributes"),
-            "**ATTRIBUTES** ",
-            _link_here_icon(f"{base_anchor}_attributes"),
-            "\n",
+        self._write(":::::{bzl:function} ")
+        provider.field_info.sort(key=lambda f: f.name)
+        self._render_signature(
+            "<init>",
+            provider.field_info,
+            get_name=lambda f: f.name,
         )
-        entries = []
-        for attr in attributes:
-            anchor = f"{base_anchor}_{attr.name}"
-            required = "required" if attr.mandatory else "optional"
-            attr_type = self._rule_attr_type_string(attr)
-            default = f", default `{attr.default_value}`" if attr.default_value else ""
-            providers_parts = []
-            if attr.provider_name_group:
-                providers_parts.append("\n\n_Required providers_: ")
-            if len(attr.provider_name_group) == 1:
-                provider_group = attr.provider_name_group[0]
-                if len(provider_group.provider_name) == 1:
-                    providers_parts.append(provider_group.provider_name[0])
-                else:
-                    providers_parts.extend(
-                        ["all of ", _join_csv_and(provider_group.provider_name)]
-                    )
-            elif len(attr.provider_name_group) > 1:
-                providers_parts.append("any of \n")
-                for group in attr.provider_name_group:
-                    providers_parts.extend(["* ", _join_csv_and(group.provider_name)])
-            if providers_parts:
-                providers_parts.append("\n")
+        # TODO: Add support for provider.init once our Bazel version supports
+        # that field
+        self._write(":::::\n")
 
-            entries.append(
-                [
-                    anchor,
-                    attr.name,
-                    f"_({required} {attr_type}{default})_\n",
-                    attr.doc_string,
-                    *providers_parts,
-                ]
-            )
-        self._render_field_list(entries)
+        for field in provider.field_info:
+            self._write(":::::{bzl:provider-field} ", field.name, "\n")
+            self._write(field.doc_string.strip())
+            self._write("\n")
+            self._write(":::::\n")
+        self._write("::::::\n")
+
+    def _render_attributes(self, attributes: list[stardoc_output_pb2.AttributeInfo]):
+        for attr in attributes:
+            attr_type = self._rule_attr_type_string(attr)
+            self._write(f":attr {attr.name}:\n")
+            if attr.default_value:
+                self._write("  {bzl:default-value}`%s`\n" % attr.default_value)
+            self._write("  {type}`%s`\n" % attr_type)
+            self._write("  ", _indent_block_text(attr.doc_string), "\n")
+            self._write("  :::{bzl:attr-info} Info\n")
+            if attr.mandatory:
+                self._write("  :mandatory:\n")
+            self._write("  :::\n")
+            self._write("\n")
+
+            if attr.provider_name_group:
+                self._write("  {required-providers}`")
+                for _, outer_is_last, provider_group in _position_iter(
+                    attr.provider_name_group
+                ):
+                    pairs = list(
+                        zip(
+                            provider_group.origin_key,
+                            provider_group.provider_name,
+                            strict=True,
+                        )
+                    )
+                    if len(pairs) > 1:
+                        self._write("[")
+                    for _, inner_is_last, (origin_key, name) in _position_iter(pairs):
+                        if origin_key.file == "<native>":
+                            origin = origin_key.name
+                        else:
+                            origin = f"{origin_key.file}%{origin_key.name}"
+                        # We have to use "title <ref>" syntax because the same
+                        # name might map to different origins. Stardoc gives us
+                        # the provider's actual name, not the name of the symbol
+                        # used in the source.
+                        self._write(f"'{name} <{origin}>'")
+                        if not inner_is_last:
+                            self._write(", ")
+
+                    if len(pairs) > 1:
+                        self._write("]")
+
+                    if not outer_is_last:
+                        self._write(" | ")
+                self._write("`\n")
+
+            self._write("\n")
 
     def _render_signature(
         self,
         name: str,
-        base_anchor: str,
         parameters: list[_T],
         *,
         get_name: Callable[_T, str],
         get_default: Callable[_T, str] = lambda v: None,
     ):
-        self._write(_block_attrs(".starlark-signature"), name, "(")
+        self._write(name, "(")
         for _, is_last, param in _position_iter(parameters):
             param_name = get_name(param)
-            self._write(_link(param_name, f"{base_anchor}_{param_name}"))
+            self._write(f"{param_name}")
             default_value = get_default(param)
             if default_value:
+                default_value = self._format_default_value(default_value)
                 self._write(f"={default_value}")
             if not is_last:
-                self._write(",\n")
+                self._write(", ")
         self._write(")\n\n")
 
-    def _render_field_list(self, entries: list[list[str]]):
-        """Render a list of field lists.
+    def _render_origin_key_option(self, origin_key, indent=""):
+        self._write(
+            indent,
+            ":origin-key: ",
+            self._format_option_value(f"{origin_key.file}%{origin_key.name}"),
+            "\n",
+        )
 
-        Args:
-            entries: list of field list entries. Each element is 3
-                pieces: an anchor, field description, and one or more
-                text strings for the body of the field list entry.
-        """
-        for anchor, description, *body_pieces in entries:
-            body_pieces = [_block_attrs(anchor), *body_pieces]
-            self._write(
-                ":",
-                _span(description + _link_here_icon(anchor)),
-                ":\n  ",
-                # The text has to be indented to be associated with the block correctly.
-                "".join(body_pieces).strip().replace("\n", "\n  "),
-                "\n",
-            )
-        # Ensure there is an empty line after the field list, otherwise
-        # the next line of content will fold into the field list
-        self._write("\n")
+    def _format_default_value(self, default_value):
+        # Handle <function foo from //baz:bar.bzl>
+        # For now, just use quotes for lack of a better option
+        if default_value.startswith("<"):
+            return f"'{default_value}'"
+        elif default_value.startswith("Label("):
+            # Handle Label(*, "@some//label:target")
+            start_quote = default_value.find('"')
+            end_quote = default_value.rfind('"')
+            return default_value[start_quote : end_quote + 1]
+        else:
+            return default_value
+
+    def _format_option_value(self, value):
+        # Leading @ symbols are special markup; escape them.
+        if value.startswith("@"):
+            return "\\" + value
+        else:
+            return value
 
     def _write(self, *lines: str):
         self._out_stream.writelines(lines)
@@ -452,21 +485,15 @@
     *,
     proto: pathlib.Path,
     output: pathlib.Path,
-    footer: pathlib.Path,
     public_load_path: str,
 ):
-    if footer:
-        footer_content = footer.read_text()
-
     module = stardoc_output_pb2.ModuleInfo.FromString(proto.read_bytes())
     with output.open("wt", encoding="utf8") as out_stream:
         _MySTRenderer(module, out_stream, public_load_path).render()
-        out_stream.write(footer_content)
 
 
 def _create_parser():
     parser = argparse.ArgumentParser(fromfile_prefix_chars="@")
-    parser.add_argument("--footer", dest="footer", type=pathlib.Path)
     parser.add_argument("--proto", dest="proto", type=pathlib.Path)
     parser.add_argument("--output", dest="output", type=pathlib.Path)
     parser.add_argument("--public-load-path", dest="public_load_path")
@@ -478,7 +505,6 @@
     _convert(
         proto=options.proto,
         output=options.output,
-        footer=options.footer,
         public_load_path=options.public_load_path,
     )
     return 0
diff --git a/sphinxdocs/private/readthedocs.bzl b/sphinxdocs/private/readthedocs.bzl
index 3cab75b..ee8e7aa 100644
--- a/sphinxdocs/private/readthedocs.bzl
+++ b/sphinxdocs/private/readthedocs.bzl
@@ -43,6 +43,6 @@
             "$(rlocationpaths {})".format(d)
             for d in docs
         ],
-        deps = ["//python/runfiles"],
+        deps = [Label("//python/runfiles")],
         **kwargs
     )
diff --git a/sphinxdocs/private/sphinx.bzl b/sphinxdocs/private/sphinx.bzl
index daff02d..a5ac831 100644
--- a/sphinxdocs/private/sphinx.bzl
+++ b/sphinxdocs/private/sphinx.bzl
@@ -15,6 +15,7 @@
 """Implementation of sphinx rules."""
 
 load("@bazel_skylib//lib:paths.bzl", "paths")
+load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
 load("//python:py_binary.bzl", "py_binary")
 load("//python/private:util.bzl", "add_tag", "copy_propagating_kwargs")  # buildifier: disable=bzl-visibility
 
@@ -55,6 +56,7 @@
         formats,
         strip_prefix = "",
         extra_opts = [],
+        tools = [],
         **kwargs):
     """Generate docs using Sphinx.
 
@@ -88,6 +90,11 @@
             source files. e.g., given `//docs:foo.md`, stripping `docs/`
             makes Sphinx see `foo.md` in its generated source directory.
         extra_opts: (list[str]) Additional options to pass onto Sphinx building.
+            On each provided option, a location expansion is performed.
+            See `ctx.expand_location()`.
+        tools: (list[label]) Additional tools that are used by Sphinx and its plugins.
+            This just makes the tools available during Sphinx execution. To locate
+            them, use `extra_opts` and `$(location)`.
         **kwargs: (dict) Common attributes to pass onto rules.
     """
     add_tag(kwargs, "@rules_python//sphinxdocs:sphinx_docs")
@@ -102,6 +109,7 @@
         formats = formats,
         strip_prefix = strip_prefix,
         extra_opts = extra_opts,
+        tools = tools,
         **kwargs
     )
 
@@ -112,6 +120,7 @@
         output_group = "html",
         **common_kwargs
     )
+
     py_binary(
         name = name + ".serve",
         srcs = [_SPHINX_SERVE_MAIN_SRC],
@@ -174,8 +183,13 @@
             doc = "Doc source files for Sphinx.",
         ),
         "strip_prefix": attr.string(doc = "Prefix to remove from input file paths."),
+        "tools": attr.label_list(
+            cfg = "exec",
+            doc = "Additional tools that are used by Sphinx and its plugins.",
+        ),
         "_extra_defines_flag": attr.label(default = "//sphinxdocs:extra_defines"),
         "_extra_env_flag": attr.label(default = "//sphinxdocs:extra_env"),
+        "_quiet_flag": attr.label(default = "//sphinxdocs:quiet"),
     },
 )
 
@@ -234,11 +248,14 @@
     args = ctx.actions.args()
     args.add("-T")  # Full tracebacks on error
     args.add("-b", format)
-    args.add("-q")  # Suppress stdout informational text
+
+    if ctx.attr._quiet_flag[BuildSettingInfo].value:
+        args.add("-q")  # Suppress stdout informational text
     args.add("-j", "auto")  # Build in parallel, if possible
     args.add("-E")  # Don't try to use cache files. Bazel can't make use of them.
     args.add("-a")  # Write all files; don't try to detect "changed" files
-    args.add_all(ctx.attr.extra_opts)
+    for opt in ctx.attr.extra_opts:
+        args.add(ctx.expand_location(opt))
     args.add_all(ctx.attr._extra_defines_flag[_FlagInfo].value, before_each = "-D")
     args.add(source_path)
     args.add(output_dir.path)
@@ -248,11 +265,16 @@
         for v in ctx.attr._extra_env_flag[_FlagInfo].value
     ])
 
+    tools = []
+    for tool in ctx.attr.tools:
+        tools.append(tool[DefaultInfo].files_to_run)
+
     ctx.actions.run(
         executable = ctx.executable.sphinx,
         arguments = [args],
         inputs = inputs,
         outputs = [output_dir],
+        tools = tools,
         mnemonic = "SphinxBuildDocs",
         progress_message = "Sphinx building {} for %{{label}}".format(format),
         env = env,
diff --git a/sphinxdocs/private/sphinx_server.py b/sphinxdocs/private/sphinx_server.py
index e71889a..1f4fae8 100644
--- a/sphinxdocs/private/sphinx_server.py
+++ b/sphinxdocs/private/sphinx_server.py
@@ -2,6 +2,7 @@
 import errno
 import os
 import sys
+import time
 from http import server
 
 
@@ -17,17 +18,33 @@
     address = ("0.0.0.0", 8000)
     # with server.ThreadingHTTPServer(address, DirectoryHandler) as (ip, port, httpd):
     with _start_server(DirectoryHandler, "0.0.0.0", 8000) as (ip, port, httpd):
-        print(f"Serving...")
-        print(f"  Address: http://{ip}:{port}")
-        print(f"  Serving directory: {serve_directory}")
-        print(f"  CWD: {os.getcwd()}")
-        print()
-        print("*** You do not need to restart this server to see changes ***")
-        print()
-        try:
-            httpd.serve_forever()
-        except KeyboardInterrupt:
-            pass
+
+        def _print_server_info():
+            print(f"Serving...")
+            print(f"  Address: http://{ip}:{port}")
+            print(f"  Serving directory: {serve_directory}")
+            print(f"      url: file://{serve_directory}")
+            print(f"  Server CWD: {os.getcwd()}")
+            print()
+            print("*** You do not need to restart this server to see changes ***")
+            print("*** CTRL+C once to reprint this info ***")
+            print("*** CTRL+C twice to exit ***")
+            print()
+
+        while True:
+            _print_server_info()
+            try:
+                httpd.serve_forever()
+            except KeyboardInterrupt:
+                _print_server_info()
+                print(
+                    "*** KeyboardInterrupt received: CTRL+C again to terminate server ***"
+                )
+                try:
+                    time.sleep(1)
+                    print("Restarting serving ...")
+                except KeyboardInterrupt:
+                    break
     return 0
 
 
@@ -37,6 +54,7 @@
         try:
             with server.ThreadingHTTPServer((ip, port), handler) as httpd:
                 yield ip, port, httpd
+                return
         except OSError as e:
             if e.errno == errno.EADDRINUSE:
                 pass
diff --git a/sphinxdocs/private/sphinx_stardoc.bzl b/sphinxdocs/private/sphinx_stardoc.bzl
index 810dca3..e2b1756 100644
--- a/sphinxdocs/private/sphinx_stardoc.bzl
+++ b/sphinxdocs/private/sphinx_stardoc.bzl
@@ -19,7 +19,7 @@
 load("@io_bazel_stardoc//stardoc:stardoc.bzl", "stardoc")
 load("//python/private:util.bzl", "add_tag", "copy_propagating_kwargs")  # buildifier: disable=bzl-visibility
 
-def sphinx_stardocs(name, docs, footer = None, **kwargs):
+def sphinx_stardocs(name, docs, **kwargs):
     """Generate Sphinx-friendly Markdown docs using Stardoc for bzl libraries.
 
     A `build_test` for the docs is also generated to ensure Stardoc is able
@@ -39,7 +39,6 @@
             * A `dict` with keys `input` and `dep`. The `input` key is a string
               label to the bzl file to generate docs for. The `dep` key is a
               string label to a `bzl_library` providing the necessary dependencies.
-        footer: optional [`label`] File to append to generated docs.
         **kwargs: Additional kwargs to pass onto each `sphinx_stardoc` target
     """
     add_tag(kwargs, "@rules_python//sphinxdocs:sphinx_stardocs")
@@ -60,7 +59,6 @@
         doc_name = "_{}_{}".format(name.lstrip("_"), out_name.replace("/", "_"))
         _sphinx_stardoc(
             name = doc_name,
-            footer = footer,
             out = out_name,
             **stardoc_kwargs
         )
@@ -77,7 +75,7 @@
         **common_kwargs
     )
 
-def _sphinx_stardoc(*, name, out, footer = None, public_load_path = None, **kwargs):
+def _sphinx_stardoc(*, name, out, public_load_path = None, **kwargs):
     stardoc_name = "_{}_stardoc".format(name.lstrip("_"))
     stardoc_pb = stardoc_name + ".binaryproto"
 
@@ -95,7 +93,6 @@
         name = name,
         src = stardoc_pb,
         output = out,
-        footer = footer,
         public_load_path = public_load_path,
     )
 
@@ -108,9 +105,6 @@
     args.add("--proto", ctx.file.src)
     args.add("--output", ctx.outputs.output)
 
-    if ctx.file.footer:
-        args.add("--footer", ctx.file.footer)
-        inputs.append(ctx.file.footer)
     if ctx.attr.public_load_path:
         args.add("--public-load-path={}".format(ctx.attr.public_load_path))
 
@@ -126,7 +120,6 @@
 _stardoc_proto_to_markdown = rule(
     implementation = _stardoc_proto_to_markdown_impl,
     attrs = {
-        "footer": attr.label(allow_single_file = True),
         "output": attr.output(mandatory = True),
         "public_load_path": attr.string(),
         "src": attr.label(allow_single_file = True, mandatory = True),
diff --git a/sphinxdocs/sphinx.bzl b/sphinxdocs/sphinx.bzl
index a0b1a05..d9385bd 100644
--- a/sphinxdocs/sphinx.bzl
+++ b/sphinxdocs/sphinx.bzl
@@ -23,6 +23,8 @@
 
 Defining your own `sphinx-build` binary is necessary because Sphinx uses
 a plugin model to support extensibility.
+
+The Sphinx integration is still experimental.
 """
 
 load(
diff --git a/sphinxdocs/src/sphinx_bzl/BUILD.bazel b/sphinxdocs/src/sphinx_bzl/BUILD.bazel
new file mode 100644
index 0000000..8830315
--- /dev/null
+++ b/sphinxdocs/src/sphinx_bzl/BUILD.bazel
@@ -0,0 +1,14 @@
+load("//python:py_library.bzl", "py_library")
+
+package(
+    default_visibility = ["//:__subpackages__"],
+)
+
+# NOTE: This provides the library on its own, not its dependencies.
+py_library(
+    name = "sphinx_bzl",
+    srcs = glob(["*.py"]),
+    imports = [".."],
+    # Allow depending on it in sphinx_binary targets
+    visibility = ["//visibility:public"],
+)
diff --git a/tests/pip_install/BUILD.bazel b/sphinxdocs/src/sphinx_bzl/__init__.py
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to sphinxdocs/src/sphinx_bzl/__init__.py
diff --git a/sphinxdocs/src/sphinx_bzl/bzl.py b/sphinxdocs/src/sphinx_bzl/bzl.py
new file mode 100644
index 0000000..be38d8a
--- /dev/null
+++ b/sphinxdocs/src/sphinx_bzl/bzl.py
@@ -0,0 +1,1641 @@
+"""Sphinx extension for documenting Bazel/Starlark objects."""
+
+import ast
+import collections
+import enum
+import os
+import typing
+from collections.abc import Collection
+from typing import Callable, Iterable, TypeVar
+
+from docutils import nodes as docutils_nodes
+from docutils.parsers.rst import directives as docutils_directives
+from docutils.parsers.rst import states
+from sphinx import addnodes, builders
+from sphinx import directives as sphinx_directives
+from sphinx import domains, environment, roles
+from sphinx.highlighting import lexer_classes
+from sphinx.locale import _
+from sphinx.util import docfields
+from sphinx.util import docutils as sphinx_docutils
+from sphinx.util import inspect, logging
+from sphinx.util import nodes as sphinx_nodes
+from sphinx.util import typing as sphinx_typing
+from typing_extensions import override
+
+_logger = logging.getLogger(__name__)
+_LOG_PREFIX = f"[{_logger.name}] "
+
+_INDEX_SUBTYPE_NORMAL = 0
+_INDEX_SUBTYPE_ENTRY_WITH_SUB_ENTRIES = 1
+_INDEX_SUBTYPE_SUB_ENTRY = 2
+
+_T = TypeVar("_T")
+
+# See https://www.sphinx-doc.org/en/master/extdev/domainapi.html#sphinx.domains.Domain.get_objects
+_GetObjectsTuple: typing.TypeAlias = tuple[str, str, str, str, str, int]
+
+# See SphinxRole.run definition; the docs for role classes are pretty sparse.
+_RoleRunResult: typing.TypeAlias = tuple[
+    list[docutils_nodes.Node], list[docutils_nodes.system_message]
+]
+
+
+def _log_debug(message, *args):
+    # NOTE: Non-warning log messages go to stdout and are only
+    # visible when -q isn't passed to Sphinx. Note that the sphinx_docs build
+    # rule passes -q by default; use --//sphinxdocs:quiet=false to disable it.
+    _logger.debug("%s" + message, _LOG_PREFIX, *args)
+
+
+def _position_iter(values: Collection[_T]) -> tuple[bool, bool, _T]:
+    last_i = len(values) - 1
+    for i, value in enumerate(values):
+        yield i == 0, i == last_i, value
+
+
+# TODO: Remove this. Use @repo//pkg:file.bzl%symbol to identify things instead
+# of dots. This more directly reflects the bzl concept and avoids issues with
+# e.g. repos, directories, or files containing dots themselves.
+def _label_to_dotted_name(label: str) -> str:
+    """Convert an absolute label to a dotted name.
+
+    Args:
+        label: Absolute label with optional repo prefix, e.g. `@a//b:c.bzl`
+            or `//b:c.bzl`
+
+    Returns:
+        Label converted to a dotted notation for easier writing of object
+        references.
+    """
+    if label.endswith(".bzl"):
+        label = label[: -len(".bzl")]
+    elif ":BUILD" in label:
+        label = label[: label.find(":BUILD")]
+    else:
+        raise InvalidValueError(
+            f"Malformed label: Label must end with .bzl or :BUILD*, got {label}"
+        )
+
+    # Make a //foo:bar.bzl convert to foo.bar, not .foo.bar
+    if label.startswith("//"):
+        label = label.lstrip("/")
+    return label.replace("@", "").replace("//", "/").replace(":", "/").replace("/", ".")
+
+
+class InvalidValueError(Exception):
+    """Generic error for an invalid value instead of ValueError.
+
+    Sphinx treats regular ValueError to mean abort parsing the current
+    chunk and continue on as best it can. Their error means a more
+    fundamental problem that should cause a failure.
+    """
+
+
+class _ObjectEntry:
+    """Metadata about a known object."""
+
+    def __init__(
+        self,
+        full_id: str,
+        display_name: str,
+        object_type: str,
+        search_priority: int,
+        index_entry: domains.IndexEntry,
+    ):
+        """Creates an instance.
+
+        Args:
+            full_id: The fully qualified id of the object. Should be
+                globally unique, even between projects.
+            display_name: What to display the object as in casual context.
+            object_type: The type of object, typically one of the values
+                known to the domain.
+            search_priority: The search priority, see
+                https://www.sphinx-doc.org/en/master/extdev/domainapi.html#sphinx.domains.Domain.get_objects
+                for valid values.
+            index_entry: Metadata about the object for the domain index.
+        """
+        self.full_id = full_id
+        self.display_name = display_name
+        self.object_type = object_type
+        self.search_priority = search_priority
+        self.index_entry = index_entry
+
+    def to_get_objects_tuple(self) -> _GetObjectsTuple:
+        # For the tuple definition
+        return (
+            self.full_id,
+            self.display_name,
+            self.object_type,
+            self.index_entry.docname,
+            self.index_entry.anchor,
+            self.search_priority,
+        )
+
+    def __repr__(self):
+        return f"ObjectEntry({self.full_id=}, {self.object_type=}, {self.display_name=}, {self.index_entry.docname=})"
+
+
+# A simple helper just to document what the index tuple nodes are.
+def _index_node_tuple(
+    entry_type: str,
+    entry_name: str,
+    target: str,
+    main: str | None = None,
+    category_key: str | None = None,
+) -> tuple[str, str, str, str | None, str | None]:
+    # For this tuple definition, see:
+    # https://www.sphinx-doc.org/en/master/extdev/nodes.html#sphinx.addnodes.index
+    # For the definition of entry_type, see:
+    # And https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-index
+    return (entry_type, entry_name, target, main, category_key)
+
+
+class _BzlObjectId:
+    def __init__(
+        self,
+        *,
+        repo: str,
+        bzl_file: str = None,
+        namespace: str = None,
+        symbol: str = None,
+        target: str = None,
+    ):
+        """Creates an instance.
+
+        Args:
+            repo: repository name, including leading "@".
+            bzl_file: label of file containing the object, e.g. //foo:bar.bzl
+            namespace: dotted name of the namespace the symbol is within.
+            symbol: dotted name, relative to `namespace` of the symbol.
+        """
+        if not repo:
+            raise InvalidValueError("repo cannot be empty")
+        if not bzl_file:
+            raise InvalidValueError("bzl_file cannot be empty")
+        if not symbol:
+            raise InvalidvalueError("symbol cannot be empty")
+
+        self.repo = repo
+        self.bzl_file = bzl_file
+        self.namespace = namespace
+        self.symbol = symbol  # Relative to namespace
+
+        clean_repo = repo.replace("@", "")
+        package = _label_to_dotted_name(bzl_file)
+        self.full_id = ".".join(filter(None, [clean_repo, package, namespace, symbol]))
+
+    @classmethod
+    def from_env(
+        cls, env: environment.BuildEnvironment, symbol: str = None, target: str = None
+    ) -> "_BzlObjectId":
+        if target:
+            symbol = target.lstrip("/:").replace(":", ".")
+        return cls(
+            repo=env.ref_context["bzl:repo"],
+            bzl_file=env.ref_context["bzl:file"],
+            namespace=".".join(env.ref_context["bzl:doc_id_stack"]),
+            symbol=symbol,
+        )
+
+
+class _TypeExprParser(ast.NodeVisitor):
+    """Parsers a string description of types to doc nodes."""
+
+    def __init__(self, make_xref: Callable[[str], docutils_nodes.Node]):
+        self.root_node = addnodes.desc_inline("bzl", classes=["type-expr"])
+        self.make_xref = make_xref
+        self._doc_node_stack = [self.root_node]
+
+    @classmethod
+    def xrefs_from_type_expr(
+        cls,
+        type_expr_str: str,
+        make_xref: Callable[[str], docutils_nodes.Node],
+    ) -> docutils_nodes.Node:
+        module = ast.parse(type_expr_str)
+        visitor = cls(make_xref)
+        visitor.visit(module.body[0])
+        return visitor.root_node
+
+    def _append(self, node: docutils_nodes.Node):
+        self._doc_node_stack[-1] += node
+
+    def _append_and_push(self, node: docutils_nodes.Node):
+        self._append(node)
+        self._doc_node_stack.append(node)
+
+    def visit_Attribute(self, node: ast.Attribute):
+        current = node
+        parts = []
+        while current:
+            if isinstance(current, ast.Attribute):
+                parts.append(current.attr)
+                current = current.value
+            elif isinstance(current, ast.Name):
+                parts.append(current.id)
+                break
+            else:
+                raise InvalidValueError(f"Unexpected Attribute.value node: {current}")
+        dotted_name = ".".join(reversed(parts))
+        self._append(self.make_xref(dotted_name))
+
+    def visit_Constant(self, node: ast.Constant):
+        if node.value is None:
+            self._append(self.make_xref("None"))
+        elif isinstance(node.value, str):
+            self._append(self.make_xref(node.value))
+        else:
+            raise InvalidValueError(
+                f"Unexpected Constant node value: ({type(node.value)}) {node.value=}"
+            )
+
+    def visit_Name(self, node: ast.Name):
+        xref_node = self.make_xref(node.id)
+        self._append(xref_node)
+
+    def visit_BinOp(self, node: ast.BinOp):
+        self.visit(node.left)
+        self._append(addnodes.desc_sig_space())
+        if isinstance(node.op, ast.BitOr):
+            self._append(addnodes.desc_sig_punctuation("", "|"))
+        else:
+            raise InvalidValueError(f"Unexpected BinOp: {node}")
+        self._append(addnodes.desc_sig_space())
+        self.visit(node.right)
+
+    def visit_Expr(self, node: ast.Expr):
+        self.visit(node.value)
+
+    def visit_Subscript(self, node: ast.Subscript):
+        self.visit(node.value)
+        self._append_and_push(addnodes.desc_type_parameter_list())
+        self.visit(node.slice)
+        self._doc_node_stack.pop()
+
+    def visit_Tuple(self, node: ast.Tuple):
+        for element in node.elts:
+            self._append_and_push(addnodes.desc_type_parameter())
+            self.visit(element)
+            self._doc_node_stack.pop()
+
+    def visit_List(self, node: ast.List):
+        self._append_and_push(addnodes.desc_type_parameter_list())
+        for element in node.elts:
+            self._append_and_push(addnodes.desc_type_parameter())
+            self.visit(element)
+            self._doc_node_stack.pop()
+
+    @override
+    def generic_visit(self, node):
+        raise InvalidValueError(f"Unexpected ast node: {type(node)} {node}")
+
+
+class _BzlXrefField(docfields.Field):
+    """Abstract base class to create cross references for fields."""
+
+    @override
+    def make_xrefs(
+        self,
+        rolename: str,
+        domain: str,
+        target: str,
+        innernode: type[sphinx_typing.TextlikeNode] = addnodes.literal_emphasis,
+        contnode: docutils_nodes.Node | None = None,
+        env: environment.BuildEnvironment | None = None,
+        inliner: states.Inliner | None = None,
+        location: docutils_nodes.Element | None = None,
+    ) -> list[docutils_nodes.Node]:
+        if rolename in ("arg", "attr"):
+            return self._make_xrefs_for_arg_attr(
+                rolename, domain, target, innernode, contnode, env, inliner, location
+            )
+        else:
+            return super().make_xrefs(
+                rolename, domain, target, innernode, contnode, env, inliner, location
+            )
+
+    def _make_xrefs_for_arg_attr(
+        self,
+        rolename: str,
+        domain: str,
+        arg_name: str,
+        innernode: type[sphinx_typing.TextlikeNode] = addnodes.literal_emphasis,
+        contnode: docutils_nodes.Node | None = None,
+        env: environment.BuildEnvironment | None = None,
+        inliner: states.Inliner | None = None,
+        location: docutils_nodes.Element | None = None,
+    ) -> list[docutils_nodes.Node]:
+        bzl_file = env.ref_context["bzl:file"]
+        anchor_prefix = ".".join(env.ref_context["bzl:doc_id_stack"])
+        if not anchor_prefix:
+            raise InvalidValueError(
+                f"doc_id_stack empty when processing arg {arg_name}"
+            )
+        index_description = f"{arg_name} ({self.name} in {bzl_file}%{anchor_prefix})"
+        anchor_id = f"{anchor_prefix}.{arg_name}"
+        full_id = ".".join(env.ref_context["bzl:object_id_stack"] + [arg_name])
+
+        env.get_domain(domain).add_object(
+            _ObjectEntry(
+                full_id=full_id,
+                display_name=arg_name,
+                object_type=self.name,
+                search_priority=1,
+                index_entry=domains.IndexEntry(
+                    name=arg_name,
+                    subtype=_INDEX_SUBTYPE_NORMAL,
+                    docname=env.docname,
+                    anchor=anchor_id,
+                    extra="",
+                    qualifier="",
+                    descr=index_description,
+                ),
+            ),
+            # This allows referencing an arg as e.g `funcname.argname`
+            alt_names=[anchor_id],
+        )
+
+        # Two changes to how arg xrefs are created:
+        # 2. Use the full id instead of base name. This makes it unambiguous
+        #    as to what it's referencing.
+        pending_xref = super().make_xref(
+            # The full_id is used as the target so its unambiguious.
+            rolename,
+            domain,
+            f"{arg_name} <{full_id}>",
+            innernode,
+            contnode,
+            env,
+            inliner,
+            location,
+        )
+
+        wrapper = docutils_nodes.inline(ids=[anchor_id])
+
+        index_node = addnodes.index(
+            entries=[
+                _index_node_tuple(
+                    "single", f"{self.name}; {index_description}", anchor_id
+                ),
+                _index_node_tuple("single", index_description, anchor_id),
+            ]
+        )
+        wrapper += index_node
+        wrapper += pending_xref
+        return [wrapper]
+
+
+class _BzlField(_BzlXrefField, docfields.Field):
+    """A non-repeated field with xref support."""
+
+
+class _BzlGroupedField(_BzlXrefField, docfields.GroupedField):
+    """A repeated fieled grouped as a list with xref support."""
+
+
+class _BzlCsvField(_BzlXrefField):
+    """Field with a CSV list of values."""
+
+    def __init__(self, *args, body_domain: str = "", **kwargs):
+        super().__init__(*args, **kwargs)
+        self._body_domain = body_domain
+
+    def make_field(
+        self,
+        types: dict[str, list[docutils_nodes.Node]],
+        domain: str,
+        item: tuple,
+        env: environment.BuildEnvironment = None,
+        inliner: states.Inliner | None = None,
+        location: docutils_nodes.Element | None = None,
+    ) -> docutils_nodes.field:
+        field_text = item[1][0].astext()
+        parts = [p.strip() for p in field_text.split(",")]
+        field_body = docutils_nodes.field_body()
+        for _, is_last, part in _position_iter(parts):
+            node = self.make_xref(
+                self.bodyrolename,
+                self._body_domain or domain,
+                part,
+                env=env,
+                inliner=inliner,
+                location=location,
+            )
+            field_body += node
+            if not is_last:
+                field_body += docutils_nodes.Text(", ")
+
+        field_name = docutils_nodes.field_name("", self.label)
+        return docutils_nodes.field("", field_name, field_body)
+
+
+class _BzlCurrentFile(sphinx_docutils.SphinxDirective):
+    """Sets what bzl file following directives are defined in.
+
+    The directive's argument is an absolute Bazel label, e.g. `//foo:bar.bzl`
+    or `@repo//foo:bar.bzl`. The repository portion is optional; if specified,
+    it will override the `bzl_default_repository_name` configuration setting.
+
+    Example MyST usage
+
+    ```
+    :::{bzl:currentfile} //my:file.bzl
+    :::
+    ```
+    """
+
+    has_content = False
+    required_arguments = 1
+    final_argument_whitespace = False
+
+    @override
+    def run(self) -> list[docutils_nodes.Node]:
+        label = self.arguments[0].strip()
+        repo, slashes, file_label = label.partition("//")
+        file_label = slashes + file_label
+        if not repo:
+            repo = self.env.config.bzl_default_repository_name
+        self.env.ref_context["bzl:repo"] = repo
+        self.env.ref_context["bzl:file"] = file_label
+        self.env.ref_context["bzl:object_id_stack"] = [
+            _label_to_dotted_name(repo + file_label)
+        ]
+        self.env.ref_context["bzl:doc_id_stack"] = []
+        return []
+
+
+class _BzlAttrInfo(sphinx_docutils.SphinxDirective):
+    has_content = False
+    required_arguments = 1
+    optional_arguments = 0
+    option_spec = {
+        "executable": docutils_directives.flag,
+        "mandatory": docutils_directives.flag,
+    }
+
+    def run(self):
+        content_node = docutils_nodes.paragraph("", "")
+        content_node += docutils_nodes.paragraph(
+            "", "mandatory" if "mandatory" in self.options else "optional"
+        )
+        if "executable" in self.options:
+            content_node += docutils_nodes.paragraph("", "Must be an executable")
+
+        return [content_node]
+
+
+class _BzlObject(sphinx_directives.ObjectDescription[_BzlObjectId]):
+    """Base class for describing a Bazel/Starlark object.
+
+    This directive takes a single argument: a string name with optional
+    function signature.
+
+    * The name can be a dotted name, e.g. `a.b.foo`
+    * The signature is in Python signature syntax, e.g. `foo(a=x) -> R`
+    * The signature supports default values.
+    * Arg type annotations are not supported; use `{bzl:type}` instead as
+      part of arg/attr documentation.
+
+    Example signatures:
+      * `foo`
+      * `foo(arg1, arg2)`
+      * `foo(arg1, arg2=default) -> returntype`
+    """
+
+    option_spec = sphinx_directives.ObjectDescription.option_spec | {
+        "origin-key": docutils_directives.unchanged,
+    }
+
+    @override
+    def before_content(self) -> None:
+        symbol_name = self.names[-1].symbol
+        self.env.ref_context["bzl:object_id_stack"].append(symbol_name)
+        self.env.ref_context["bzl:doc_id_stack"].append(symbol_name)
+
+    @override
+    def transform_content(self, content_node: addnodes.desc_content) -> None:
+        def first_child_with_class_name(root, class_name) -> "None | Element":
+            matches = root.findall(
+                lambda node: isinstance(node, docutils_nodes.Element)
+                and class_name in node["classes"]
+            )
+            found = next(matches, None)
+            return found
+
+        def match_arg_field_name(node):
+            # fmt: off
+            return (
+                isinstance(node, docutils_nodes.field_name)
+                and node.astext().startswith(("arg ", "attr "))
+            )
+            # fmt: on
+
+        # Move the spans for the arg type and default value to be first.
+        arg_name_fields = list(content_node.findall(match_arg_field_name))
+        for arg_name_field in arg_name_fields:
+            arg_body_field = arg_name_field.next_node(descend=False, siblings=True)
+            # arg_type_node = first_child_with_class_name(arg_body_field, "arg-type-span")
+            arg_type_node = first_child_with_class_name(arg_body_field, "type-expr")
+            arg_default_node = first_child_with_class_name(
+                arg_body_field, "default-value-span"
+            )
+
+            # Inserting into the body field itself causes the elements
+            # to be grouped into the paragraph node containing the arg
+            # name (as opposed to the paragraph node containing the
+            # doc text)
+
+            if arg_default_node:
+                arg_default_node.parent.remove(arg_default_node)
+                arg_body_field.insert(0, arg_default_node)
+
+            if arg_type_node:
+                arg_type_node.parent.remove(arg_type_node)
+                decorated_arg_type_node = docutils_nodes.inline(
+                    "",
+                    "",
+                    docutils_nodes.Text("("),
+                    arg_type_node,
+                    docutils_nodes.Text(") "),
+                    classes=["arg-type-span"],
+                )
+                # arg_body_field.insert(0, arg_type_node)
+                arg_body_field.insert(0, decorated_arg_type_node)
+
+    @override
+    def after_content(self) -> None:
+        self.env.ref_context["bzl:object_id_stack"].pop()
+        self.env.ref_context["bzl:doc_id_stack"].pop()
+
+    # docs on how to build signatures:
+    # https://www.sphinx-doc.org/en/master/extdev/nodes.html#sphinx.addnodes.desc_signature
+    @override
+    def handle_signature(
+        self, sig_text: str, sig_node: addnodes.desc_signature
+    ) -> _BzlObjectId:
+        self._signature_add_object_type(sig_node)
+
+        relative_name, lparen, params_text = sig_text.partition("(")
+        if lparen:
+            params_text = lparen + params_text
+
+        relative_name = relative_name.strip()
+
+        name_prefix, _, base_symbol_name = relative_name.rpartition(".")
+        if name_prefix:
+            # Respect whatever the signature wanted
+            display_prefix = name_prefix
+        else:
+            # Otherwise, show the outermost name. This makes ctrl+f finding
+            # for a symbol a bit easier.
+            display_prefix = ".".join(self.env.ref_context["bzl:doc_id_stack"])
+            _, _, display_prefix = display_prefix.rpartition(".")
+
+        if display_prefix:
+            display_prefix = display_prefix + "."
+            sig_node += addnodes.desc_addname(display_prefix, display_prefix)
+        sig_node += addnodes.desc_name(base_symbol_name, base_symbol_name)
+
+        if type_expr := self.options.get("type"):
+
+            def make_xref(name, title=None):
+                content_node = addnodes.desc_type(name, name)
+                return addnodes.pending_xref(
+                    "",
+                    content_node,
+                    refdomain="bzl",
+                    reftype="type",
+                    reftarget=name,
+                )
+
+            attr_annotation_node = addnodes.desc_annotation(
+                type_expr,
+                "",
+                addnodes.desc_sig_punctuation("", ":"),
+                addnodes.desc_sig_space(),
+                _TypeExprParser.xrefs_from_type_expr(type_expr, make_xref),
+            )
+            sig_node += attr_annotation_node
+
+        if params_text:
+            try:
+                signature = inspect.signature_from_str(params_text)
+            except SyntaxError:
+                # Stardoc doesn't provide accurate info, so the reconstructed
+                # signature might not be valid syntax. Rather than fail, just
+                # provide a plain-text description of the approximate signature.
+                # See https://github.com/bazelbuild/stardoc/issues/225
+                sig_node += addnodes.desc_parameterlist(
+                    # Offset by 1 to remove the surrounding parentheses
+                    params_text[1:-1],
+                    params_text[1:-1],
+                )
+            else:
+                last_kind = None
+                paramlist_node = addnodes.desc_parameterlist()
+                for param in signature.parameters.values():
+                    if param.kind == param.KEYWORD_ONLY and last_kind in (
+                        param.POSITIONAL_OR_KEYWORD,
+                        param.POSITIONAL_ONLY,
+                        None,
+                    ):
+                        # Add separator for keyword only parameter: *
+                        paramlist_node += addnodes.desc_parameter(
+                            "", "", addnodes.desc_sig_operator("", "*")
+                        )
+
+                    last_kind = param.kind
+                    node = addnodes.desc_parameter()
+                    if param.kind == param.VAR_POSITIONAL:
+                        node += addnodes.desc_sig_operator("", "*")
+                    elif param.kind == param.VAR_KEYWORD:
+                        node += addnodes.desc_sig_operator("", "**")
+
+                    node += addnodes.desc_sig_name(rawsource="", text=param.name)
+                    if param.default is not param.empty:
+                        node += addnodes.desc_sig_operator("", "=")
+                        node += docutils_nodes.inline(
+                            "",
+                            param.default,
+                            classes=["default_value"],
+                            support_smartquotes=False,
+                        )
+                    paramlist_node += node
+                sig_node += paramlist_node
+
+                if signature.return_annotation is not signature.empty:
+                    sig_node += addnodes.desc_returns("", signature.return_annotation)
+
+        obj_id = _BzlObjectId.from_env(self.env, relative_name)
+
+        sig_node["bzl:object_id"] = obj_id.full_id
+        return obj_id
+
+    def _signature_add_object_type(self, sig_node: addnodes.desc_signature):
+        if sig_object_type := self._get_signature_object_type():
+            sig_node += addnodes.desc_annotation("", self._get_signature_object_type())
+            sig_node += addnodes.desc_sig_space()
+
+    @override
+    def add_target_and_index(
+        self, obj_desc: _BzlObjectId, sig: str, sig_node: addnodes.desc_signature
+    ) -> None:
+        super().add_target_and_index(obj_desc, sig, sig_node)
+        symbol_name = obj_desc.symbol
+        display_name = sig_node.get("bzl:index_display_name", symbol_name)
+
+        anchor_prefix = ".".join(self.env.ref_context["bzl:doc_id_stack"])
+        if anchor_prefix:
+            anchor_id = f"{anchor_prefix}.{symbol_name}"
+            file_location = "%" + anchor_prefix
+        else:
+            anchor_id = symbol_name
+            file_location = ""
+
+        sig_node["ids"].append(anchor_id)
+
+        object_type_display = self._get_object_type_display_name()
+        index_description = (
+            f"{display_name} ({object_type_display} in "
+            f"{obj_desc.bzl_file}{file_location})"
+        )
+        self.indexnode["entries"].extend(
+            _index_node_tuple("single", f"{index_type}; {index_description}", anchor_id)
+            for index_type in [object_type_display] + self._get_additional_index_types()
+        )
+        self.indexnode["entries"].append(
+            _index_node_tuple("single", index_description, anchor_id),
+        )
+
+        object_entry = _ObjectEntry(
+            full_id=obj_desc.full_id,
+            display_name=display_name,
+            object_type=self.objtype,
+            search_priority=1,
+            index_entry=domains.IndexEntry(
+                name=symbol_name,
+                subtype=_INDEX_SUBTYPE_NORMAL,
+                docname=self.env.docname,
+                anchor=anchor_id,
+                extra="",
+                qualifier="",
+                descr=index_description,
+            ),
+        )
+
+        alt_names = []
+        if origin_key := self.options.get("origin-key"):
+            alt_names.append(
+                origin_key
+                # Options require \@ for leading @, but don't
+                # remove the escaping slash, so we have to do it manually
+                .lstrip("\\")
+                .lstrip("@")
+                .replace("//", "/")
+                .replace(".bzl%", ".")
+                .replace("/", ".")
+                .replace(":", ".")
+            )
+        alt_names.extend(self._get_alt_names(object_entry))
+
+        self.env.get_domain(self.domain).add_object(object_entry, alt_names=alt_names)
+
+    def _get_additional_index_types(self):
+        return []
+
+    @override
+    def _object_hierarchy_parts(
+        self, sig_node: addnodes.desc_signature
+    ) -> tuple[str, ...]:
+        return tuple(sig_node["bzl:object_id"].split("."))
+
+    @override
+    def _toc_entry_name(self, sig_node: addnodes.desc_signature) -> str:
+        return sig_node["_toc_parts"][-1]
+
+    def _get_object_type_display_name(self) -> str:
+        return self.env.get_domain(self.domain).object_types[self.objtype].lname
+
+    def _get_signature_object_type(self) -> str:
+        return self._get_object_type_display_name()
+
+    def _get_alt_names(self, object_entry):
+        return [object_entry.full_id.split(".")[-1]]
+
+
+class _BzlCallable(_BzlObject):
+    """Abstract base class for objects that are callable."""
+
+    @override
+    def _get_alt_names(self, object_entry):
+        return [object_entry.full_id.split(".")[-1]]
+
+
+class _BzlProvider(_BzlObject):
+    """Documents a provider type.
+
+    Example MyST usage
+
+    ```
+    ::::{bzl:provider} MyInfo
+
+    Docs about MyInfo
+
+    :::{bzl:provider-field} some_field
+    :type: depset[str]
+    :::
+    ::::
+    ```
+    """
+
+    @override
+    def _get_alt_names(self, object_entry):
+        return [object_entry.full_id.split(".")[-1]]
+
+
+class _BzlProviderField(_BzlObject):
+    """Documents a field of a provider.
+
+    Fields can optionally have a type specified using the `:type:` option.
+
+    The type can be any type expression understood by the `{bzl:type}` role.
+
+    ```
+    :::{bzl:provider-field} foo
+    :type: str
+    :::
+    ```
+    """
+
+    option_spec = _BzlObject.option_spec.copy()
+    option_spec.update(
+        {
+            "type": docutils_directives.unchanged,
+        }
+    )
+
+    @override
+    def _get_signature_object_type(self) -> str:
+        return ""
+
+    @override
+    def _get_alt_names(self, object_entry):
+        return [".".join(object_entry.full_id.split(".")[-2:])]
+
+
+class _BzlRepositoryRule(_BzlCallable):
+    """Documents a repository rule.
+
+    Doc fields:
+    * attr: Documents attributes of the rule. Takes a single arg, the
+      attribute name. Can be repeated. The special roles `{default-value}`
+      and `{arg-type}` can be used to indicate the default value and
+      type of attribute, respectively.
+    * environment-variables: a CSV list of environment variable names.
+      They will be cross referenced with matching environment variables.
+
+    Example MyST usage
+
+    ```
+    :::{bzl:repo-rule} myrule(foo)
+
+    :attr foo: {default-value}`"foo"` {arg-type}`attr.string` foo doc string
+
+    :environment-variables: FOO, BAR
+    :::
+    ```
+    """
+
+    doc_field_types = [
+        _BzlGroupedField(
+            "attr",
+            label=_("Attributes"),
+            names=["attr"],
+            rolename="attr",
+            can_collapse=False,
+        ),
+        _BzlCsvField(
+            "environment-variables",
+            label=_("Environment Variables"),
+            names=["environment-variables"],
+            body_domain="std",
+            bodyrolename="envvar",
+            has_arg=False,
+        ),
+    ]
+
+    @override
+    def _get_signature_object_type(self) -> str:
+        return "repo rule"
+
+
+class _BzlRule(_BzlCallable):
+    """Documents a rule.
+
+    Doc fields:
+    * attr: Documents attributes of the rule. Takes a single arg, the
+      attribute name. Can be repeated. The special roles `{default-value}`
+      and `{arg-type}` can be used to indicate the default value and
+      type of attribute, respectively.
+    * provides: A type expression of the provider types the rule provides.
+      To indicate different groupings, use `|` and `[]`. For example,
+      `FooInfo | [BarInfo, BazInfo]` means it provides either `FooInfo`
+      or both of `BarInfo` and `BazInfo`.
+
+    Example MyST usage
+
+    ```
+    :::{bzl:repo-rule} myrule(foo)
+
+    :attr foo: {default-value}`"foo"` {arg-type}`attr.string` foo doc string
+
+    :provides: FooInfo | BarInfo
+    :::
+    ```
+    """
+
+    doc_field_types = [
+        _BzlGroupedField(
+            "attr",
+            label=_("Attributes"),
+            names=["attr"],
+            rolename="attr",
+            can_collapse=False,
+        ),
+        _BzlField(
+            "provides",
+            label="Provides",
+            has_arg=False,
+            names=["provides"],
+            bodyrolename="type",
+        ),
+    ]
+
+
+class _BzlAspect(_BzlObject):
+    """Documents an aspect.
+
+    Doc fields:
+    * attr: Documents attributes of the aspect. Takes a single arg, the
+      attribute name. Can be repeated. The special roles `{default-value}`
+      and `{arg-type}` can be used to indicate the default value and
+      type of attribute, respectively.
+    * aspect-attributes: A CSV list of attribute names the aspect
+      propagates along.
+
+    Example MyST usage
+
+    ```
+    :::{bzl:repo-rule} myaspect
+
+    :attr foo: {default-value}`"foo"` {arg-type}`attr.string` foo doc string
+
+    :aspect-attributes: srcs, deps
+    :::
+    ```
+    """
+
+    doc_field_types = [
+        _BzlGroupedField(
+            "attr",
+            label=_("Attributes"),
+            names=["attr"],
+            rolename="attr",
+            can_collapse=False,
+        ),
+        _BzlCsvField(
+            "aspect-attributes",
+            label=_("Aspect Attributes"),
+            names=["aspect-attributes"],
+            has_arg=False,
+        ),
+    ]
+
+
+class _BzlFunction(_BzlCallable):
+    """Documents a general purpose function.
+
+    Doc fields:
+    * arg: Documents the arguments of the function. Takes a single arg, the
+      arg name. Can be repeated. The special roles `{default-value}`
+      and `{arg-type}` can be used to indicate the default value and
+      type of attribute, respectively.
+    * returns: Documents what the function returns. The special role
+      `{return-type}` can be used to indicate the return type of the function.
+
+    Example MyST usage
+
+    ```
+    :::{bzl:function} myfunc(a, b=None) -> bool
+
+    :arg a: {arg-type}`str` some arg doc
+    :arg b: {arg-type}`int | None` {default-value}`42` more arg doc
+    :returns: {return-type}`bool` doc about return value.
+    :::
+    ```
+    """
+
+    doc_field_types = [
+        _BzlGroupedField(
+            "arg",
+            label=_("Args"),
+            names=["arg"],
+            rolename="arg",
+            can_collapse=False,
+        ),
+        docfields.Field(
+            "returns",
+            label=_("Returns"),
+            has_arg=False,
+            names=["returns"],
+        ),
+    ]
+
+    @override
+    def _get_signature_object_type(self) -> str:
+        return ""
+
+
+class _BzlModuleExtension(_BzlObject):
+    """Documents a module_extension.
+
+    Doc fields:
+    * os-dependent: Documents if the module extension depends on the host
+      architecture.
+    * arch-dependent: Documents if the module extension depends on the host
+      architecture.
+    * environment-variables: a CSV list of environment variable names.
+      They will be cross referenced with matching environment variables.
+
+    Tag classes are documented using the bzl:tag-class directives within
+    this directive.
+
+    Example MyST usage:
+
+    ```
+    ::::{bzl:module-extension} myext
+
+    :os-dependent: True
+    :arch-dependent: False
+
+    :::{bzl:tag-class} mytag(myattr)
+
+    :attr myattr:
+      {arg-type}`attr.string_list`
+      doc for attribute
+    :::
+    ::::
+    ```
+    """
+
+    doc_field_types = [
+        _BzlField(
+            "os-dependent",
+            label="OS Dependent",
+            has_arg=False,
+            names=["os-dependent"],
+        ),
+        _BzlField(
+            "arch-dependent",
+            label="Arch Dependent",
+            has_arg=False,
+            names=["arch-dependent"],
+        ),
+        _BzlCsvField(
+            "environment-variables",
+            label=_("Environment Variables"),
+            names=["environment-variables"],
+            body_domain="std",
+            bodyrolename="envvar",
+            has_arg=False,
+        ),
+    ]
+
+    @override
+    def _get_signature_object_type(self) -> str:
+        return "module ext"
+
+
+class _BzlTagClass(_BzlCallable):
+    """Documents a tag class for a module extension.
+
+    Doc fields:
+    * attr: Documents attributes of the tag class. Takes a single arg, the
+      attribute name. Can be repeated. The special roles `{default-value}`
+      and `{arg-type}` can be used to indicate the default value and
+      type of attribute, respectively.
+
+    Example MyST usage, note that this directive should be nested with
+    a `bzl:module-extension` directive.
+
+    ```
+    :::{bzl:tag-class} mytag(myattr)
+
+    :attr myattr:
+      {arg-type}`attr.string_list`
+      doc for attribute
+    :::
+    ```
+    """
+
+    doc_field_types = [
+        _BzlGroupedField(
+            "arg",
+            label=_("Attributes"),
+            names=["attr"],
+            rolename="arg",
+            can_collapse=False,
+        ),
+    ]
+
+    @override
+    def _get_signature_object_type(self) -> str:
+        return ""
+
+
+class _TargetType(enum.Enum):
+    TARGET = "target"
+    FLAG = "flag"
+
+
+class _BzlTarget(_BzlObject):
+    """Documents an arbitrary target."""
+
+    _TARGET_TYPE = _TargetType.TARGET
+
+    def handle_signature(self, sig_text, sig_node):
+        self._signature_add_object_type(sig_node)
+        if ":" in sig_text:
+            package, target_name = sig_text.split(":", 1)
+        else:
+            target_name = sig_text
+            package = self.env.ref_context["bzl:file"]
+            package = package[: package.find(":BUILD")]
+
+        package = package + ":"
+        if self._TARGET_TYPE == _TargetType.FLAG:
+            sig_node += addnodes.desc_addname("--", "--")
+        sig_node += addnodes.desc_addname(package, package)
+        sig_node += addnodes.desc_name(target_name, target_name)
+
+        obj_id = _BzlObjectId.from_env(self.env, target=sig_text)
+        sig_node["bzl:object_id"] = obj_id.full_id
+        sig_node["bzl:index_display_name"] = f"{package}{target_name}"
+        return obj_id
+
+    @override
+    def _get_signature_object_type(self) -> str:
+        # We purposely return empty here because having "target" in front
+        # of every label isn't very helpful
+        return ""
+
+
+# TODO: Integrate with the option directive, since flags are options, afterall.
+# https://www.sphinx-doc.org/en/master/usage/domains/standard.html#directive-option
+class _BzlFlag(_BzlTarget):
+    """Documents a flag"""
+
+    _TARGET_TYPE = _TargetType.FLAG
+
+    @override
+    def _get_signature_object_type(self) -> str:
+        return "flag"
+
+    def _get_additional_index_types(self):
+        return ["target"]
+
+
+class _DefaultValueRole(sphinx_docutils.SphinxRole):
+    """Documents the default value for an arg or attribute.
+
+    This is a special role used within `:arg:` and `:attr:` doc fields to
+    indicate the default value. The rendering process looks for this role
+    and reformats and moves its content for better display.
+
+    Styling can be customized by matching the `.default_value` class.
+    """
+
+    def run(self) -> _RoleRunResult:
+        node = docutils_nodes.emphasis(
+            "",
+            "(default ",
+            docutils_nodes.inline("", self.text, classes=["sig", "default_value"]),
+            docutils_nodes.Text(") "),
+            classes=["default-value-span"],
+        )
+        return ([node], [])
+
+
+class _TypeRole(sphinx_docutils.SphinxRole):
+    """Documents a type (or type expression) with crossreferencing.
+
+    This is an inline role used to create cross references to other types.
+
+    The content is interpreted as a reference to a type or an expression
+    of types. The syntax uses Python-style sytax with `|` and `[]`, e.g.
+    `foo.MyType | str | list[str] | dict[str, int]`. Each symbolic name
+    will be turned into a cross reference; see the domain's documentation
+    for how to reference objects.
+
+    Example MyST usage:
+
+    ```
+    This function accepts {bzl:type}`str | list[str]` for usernames
+    ```
+    """
+
+    def __init__(self):
+        super().__init__()
+        self._xref = roles.XRefRole()
+
+    def run(self) -> _RoleRunResult:
+        outer_messages = []
+
+        def make_xref(name):
+            nodes, msgs = self._xref(
+                "bzl:type",
+                name,
+                name,
+                self.lineno,
+                self.inliner,
+                self.options,
+                self.content,
+            )
+            outer_messages.extend(msgs)
+            if len(nodes) == 1:
+                return nodes[0]
+            else:
+                return docutils_nodes.inline("", "", nodes)
+
+        root = _TypeExprParser.xrefs_from_type_expr(self.text, make_xref)
+        return ([root], outer_messages)
+
+
+class _ReturnTypeRole(_TypeRole):
+    """Documents the return type for function.
+
+    This is a special role used within `:returns:` doc fields to
+    indicate the return type of the function. The rendering process looks for
+    this role and reformats and moves its content for better display.
+
+    Example MyST Usage
+
+    ```
+    :::{bzl:function} foo()
+
+    :returns: {return-type}`list[str]`
+    :::
+    ```
+    """
+
+    def run(self) -> _RoleRunResult:
+        nodes, messages = super().run()
+        nodes.append(docutils_nodes.Text(" -- "))
+        return nodes, messages
+
+
+class _RequiredProvidersRole(_TypeRole):
+    """Documents the providers an attribute requires.
+
+    This is a special role used within `:arg:` or `:attr:` doc fields to
+    indicate the types of providers that are required. The rendering process
+    looks for this role and reformats its content for better display, but its
+    position is left as-is; typically it would be its own paragraph near the
+    end of the doc.
+
+    The syntax is a pipe (`|`) delimited list of types or groups of types,
+    where groups are indicated using `[...]`. e.g, to express that FooInfo OR
+    (both of BarInfo and BazInfo) are supported, write `FooInfo | [BarInfo,
+    BazInfo]`
+
+    Example MyST Usage
+
+    ```
+    :::{bzl:rule} foo(bar)
+
+    :attr bar: My attribute doc
+
+      {required-providers}`CcInfo | [PyInfo, JavaInfo]`
+    :::
+    ```
+    """
+
+    def run(self) -> _RoleRunResult:
+        xref_nodes, messages = super().run()
+        nodes = [
+            docutils_nodes.emphasis("", "Required providers: "),
+        ] + xref_nodes
+        return nodes, messages
+
+
+class _BzlIndex(domains.Index):
+    """An index of a bzl file's objects.
+
+    NOTE: This generates the entries for the *domain specific* index
+    (bzl-index.html), not the general index (genindex.html). To affect
+    the general index, index nodes and directives must be used (grep
+    for `self.indexnode`).
+    """
+
+    name = "index"
+    localname = "Bazel/Starlark Object Index"
+    shortname = "Bzl"
+
+    def generate(
+        self, docnames: Iterable[str] = None
+    ) -> tuple[list[tuple[str, list[domains.IndexEntry]]], bool]:
+        content = collections.defaultdict(list)
+
+        # sort the list of objects in alphabetical order
+        objects = self.domain.data["objects"].values()
+        objects = sorted(objects, key=lambda obj: obj.index_entry.name)
+
+        # Group by first letter
+        for entry in objects:
+            index_entry = entry.index_entry
+            content[index_entry.name[0].lower()].append(index_entry)
+
+        # convert the dict to the sorted list of tuples expected
+        content = sorted(content.items())
+
+        return content, True
+
+
+class _BzlDomain(domains.Domain):
+    """Domain for Bazel/Starlark objects.
+
+    Directives
+
+    There are directives for defining Bazel objects and their functionality.
+    See the respective directive classes for details.
+
+    Public Crossreferencing Roles
+
+    These are roles that can be used in docs to create cross references.
+
+    Objects are fully identified using dotted notation converted from the Bazel
+    label and symbol name within a `.bzl` file. The `@`, `/` and `:` characters
+    are converted to dots (with runs removed), and `.bzl` is removed from file
+    names. The dotted path of a symbol in the bzl file is appended. For example,
+    the `paths.join` function in `@bazel_skylib//lib:paths.bzl` would be
+    identified as `bazel_skylib.lib.paths.paths.join`.
+
+    Shorter identifiers can be used. Within a project, the repo name portion
+    can be omitted. Within a file, file-relative names can be used.
+
+    * obj: Used to reference a single object without concern for its type.
+      This roles searches all object types for a name that matches the given
+      value. Example usage in MyST:
+      ```
+      {bzl:obj}`repo.pkg.file.my_function`
+      ```
+
+    * type: Transforms a type expression into cross references for objects
+      with object type "type". For example, it parses `int | list[str]` into
+      three links for each component part.
+
+    Public Typography Roles
+
+    These are roles used for special purposes to aid documentation.
+
+    * default-value: The default value for an argument or attribute. Only valid
+      to use within arg or attribute documentation. See `_DefaultValueRole` for
+      details.
+    * required-providers: The providers an attribute requires. Only
+      valud to use within an attribute documentation. See
+      `_RequiredProvidersRole` for details.
+    * return-type: The type of value a function returns. Only valid
+      within a function's return doc field. See `_ReturnTypeRole` for details.
+
+    Object Types
+
+    These are the types of objects that this domain keeps in its index.
+
+    * arg: An argument to a function or macro.
+    * aspect: A Bazel `aspect`.
+    * attribute: An input to a rule (regular, repository, aspect, or module
+      extension).
+    * method: A function bound to an instance of a struct acting as a type.
+    * module-extension: A Bazel `module_extension`.
+    * provider: A Bazel `provider`.
+    * provider-field: A field of a provider.
+    * repo-rule: A Bazel `repository_rule`.
+    * rule: A regular Bazel `rule`.
+    * tag-class: A Bazel `tag_class` of a `module_extension`.
+    * target: A Bazel target.
+    * type: A builtin Bazel type or user-defined structural type. User defined
+      structual types are typically instances `struct` created using a function
+      that acts as a constructor with implicit state bound using closures.
+    """
+
+    name = "bzl"
+    label = "Bzl"
+
+    # NOTE: Most every object type has "obj" as one of the roles because
+    # an object type's role determine what reftypes (cross referencing) can
+    # refer to it. By having "obj" for all of them, it allows writing
+    # :bzl:obj`foo` to restrict object searching to the bzl domain. Under the
+    # hood, this domain translates requests for the :any: role as lookups for
+    # :obj:.
+    # NOTE: We also use these object types for categorizing things in the
+    # generated index page.
+    object_types = {
+        "arg": domains.ObjType("arg", "arg", "obj"),  # macro/function arg
+        "aspect": domains.ObjType("aspect", "aspect", "obj"),
+        "attribute": domains.ObjType("attribute", "attribute", "obj"),  # rule attribute
+        "function": domains.ObjType("function", "func", "obj"),
+        "method": domains.ObjType("method", "method", "obj"),
+        "module-extension": domains.ObjType(
+            "module extension", "module_extension", "obj"
+        ),
+        # Providers are close enough to types that we include "type". This
+        # also makes :type: Foo work in directive options.
+        "provider": domains.ObjType("provider", "provider", "type", "obj"),
+        "provider-field": domains.ObjType("provider field", "field", "obj"),
+        "repo-rule": domains.ObjType("repository rule", "repo_rule", "obj"),
+        "rule": domains.ObjType("rule", "rule", "obj"),
+        "tag-class": domains.ObjType("tag class", "tag_class", "obj"),
+        "target": domains.ObjType("target", "target", "obj"),  # target in a build file
+        # Flags are also targets, so include "target" for xref'ing
+        "flag": domains.ObjType("flag", "flag", "target", "obj"),
+        # types are objects that have a constructor and methods/attrs
+        "type": domains.ObjType("type", "type", "obj"),
+    }
+    # This controls:
+    # * What is recognized when parsing, e.g. ":bzl:ref:`foo`" requires
+    # "ref" to be in the role dict below.
+    roles = {
+        "arg": roles.XRefRole(),
+        "attr": roles.XRefRole(),
+        "default-value": _DefaultValueRole(),
+        "obj": roles.XRefRole(),
+        "required-providers": _RequiredProvidersRole(),
+        "return-type": _ReturnTypeRole(),
+        "target": roles.XRefRole(),
+        "type": _TypeRole(),
+    }
+    # NOTE: Directives that have a corresponding object type should use
+    # the same key for both directive and object type. Some directives
+    # look up their corresponding object type.
+    directives = {
+        "aspect": _BzlAspect,
+        "currentfile": _BzlCurrentFile,
+        "function": _BzlFunction,
+        "module-extension": _BzlModuleExtension,
+        "provider": _BzlProvider,
+        "provider-field": _BzlProviderField,
+        "repo-rule": _BzlRepositoryRule,
+        "rule": _BzlRule,
+        "tag-class": _BzlTagClass,
+        "target": _BzlTarget,
+        "flag": _BzlFlag,
+        "attr-info": _BzlAttrInfo,
+    }
+    indices = {
+        _BzlIndex,
+    }
+
+    # NOTE: When adding additional data keys, make sure to update
+    # merge_domaindata
+    initial_data = {
+        # All objects; keyed by full id
+        # dict[str, _ObjectEntry]
+        "objects": {},
+        #  dict[str, dict[str, _ObjectEntry]]
+        "objects_by_type": {},
+        # Objects within each doc
+        # dict[str, dict[str, _ObjectEntry]]
+        "doc_names": {},
+        # Objects by a shorter or alternative name
+        # dict[str, _ObjectEntry]
+        "alt_names": {},
+    }
+
+    @override
+    def get_full_qualified_name(self, node: docutils_nodes.Element) -> str | None:
+        bzl_file = node.get("bzl:file")
+        symbol_name = node.get("bzl:symbol")
+        ref_target = node.get("reftarget")
+        return ".".join(filter(None, [bzl_file, symbol_name, ref_target]))
+
+    @override
+    def get_objects(self) -> Iterable[_GetObjectsTuple]:
+        for entry in self.data["objects"].values():
+            yield entry.to_get_objects_tuple()
+
+    @override
+    def resolve_any_xref(
+        self,
+        env: environment.BuildEnvironment,
+        fromdocname: str,
+        builder: builders.Builder,
+        target: str,
+        node: addnodes.pending_xref,
+        contnode: docutils_nodes.Element,
+    ) -> list[tuple[str, docutils_nodes.Element]]:
+        del env, node  # Unused
+        entry = self._find_entry_for_xref(fromdocname, "obj", target)
+        if not entry:
+            return []
+        to_docname = entry.index_entry.docname
+        to_anchor = entry.index_entry.anchor
+        ref_node = sphinx_nodes.make_refnode(
+            builder, fromdocname, to_docname, to_anchor, contnode, title=to_anchor
+        )
+
+        matches = [(f"bzl:{entry.object_type}", ref_node)]
+        return matches
+
+    @override
+    def resolve_xref(
+        self,
+        env: environment.BuildEnvironment,
+        fromdocname: str,
+        builder: builders.Builder,
+        typ: str,
+        target: str,
+        node: addnodes.pending_xref,
+        contnode: docutils_nodes.Element,
+    ) -> docutils_nodes.Element | None:
+        _log_debug(
+            "resolve_xref: fromdocname=%s, typ=%s, target=%s", fromdocname, typ, target
+        )
+        del env, node  # Unused
+        entry = self._find_entry_for_xref(fromdocname, typ, target)
+        if not entry:
+            return None
+
+        to_docname = entry.index_entry.docname
+        to_anchor = entry.index_entry.anchor
+        return sphinx_nodes.make_refnode(
+            builder, fromdocname, to_docname, to_anchor, contnode, title=to_anchor
+        )
+
+    def _find_entry_for_xref(
+        self, fromdocname: str, object_type: str, target: str
+    ) -> _ObjectEntry | None:
+        # Normalize a variety of formats to the dotted format used internally.
+        # --@foo//:bar flags
+        # --@foo//:bar=value labels
+        # //foo:bar.bzl labels
+        target = (
+            target.lstrip("@/:-")
+            .replace("//", "/")
+            .replace(".bzl%", ".")
+            .replace("/", ".")
+            .replace(":", ".")
+        )
+        # Elide the value part of --foo=bar flags
+        # Note that the flag value could contain `=`
+        if "=" in target:
+            target = target[: target.find("=")]
+        if target in self.data["doc_names"].get(fromdocname, {}):
+            return self.data["doc_names"][fromdocname][target]
+
+        if object_type == "obj":
+            search_space = self.data["objects"]
+        else:
+            search_space = self.data["objects_by_type"].get(object_type, {})
+        if target in search_space:
+            return search_space[target]
+
+        _log_debug("find_entry: alt_names=%s", sorted(self.data["alt_names"].keys()))
+        if target in self.data["alt_names"]:
+            return self.data["alt_names"][target]
+
+        return None
+
+    def add_object(self, entry: _ObjectEntry, alt_names=None) -> None:
+        _log_debug(
+            "add_object: full_id=%s, object_type=%s, alt_names=%s",
+            entry.full_id,
+            entry.object_type,
+            alt_names,
+        )
+        if entry.full_id in self.data["objects"]:
+            existing = self.data["objects"][entry.full_id]
+            raise Exception(
+                f"Object {entry.full_id} already registered: "
+                + f"existing={existing}, incoming={entry}"
+            )
+        self.data["objects"][entry.full_id] = entry
+        self.data["objects_by_type"].setdefault(entry.object_type, {})
+        self.data["objects_by_type"][entry.object_type][entry.full_id] = entry
+
+        base_name = entry.full_id.split(".")[-1]
+
+        without_repo = entry.full_id.split(".", 1)[1]
+
+        if alt_names is not None:
+            alt_names = list(alt_names)
+        alt_names.append(without_repo)
+
+        for alt_name in alt_names:
+            if alt_name in self.data["alt_names"]:
+                existing = self.data["alt_names"][alt_name]
+                # This situation usually occurs for the constructor function
+                # of a provider, but could occur for e.g. an exported struct
+                # with an attribute the same name as the struct. For lack
+                # of a better option, take the shorter entry, on the assumption
+                # it refers to some container of the longer entry.
+                if len(entry.full_id) < len(existing.full_id):
+                    self.data["alt_names"][alt_name] = entry
+            else:
+                self.data["alt_names"][alt_name] = entry
+
+        docname = entry.index_entry.docname
+        self.data["doc_names"].setdefault(docname, {})
+        self.data["doc_names"][docname][base_name] = entry
+
+    def merge_domaindata(
+        self, docnames: list[str], otherdata: dict[str, typing.Any]
+    ) -> None:
+        # Merge in simple dict[key, value] data
+        for top_key in ("objects", "alt_names"):
+            self.data[top_key].update(otherdata.get(top_key, {}))
+
+        # Merge in two-level dict[top_key, dict[sub_key, value]] data
+        for top_key in ("objects_by_type", "doc_names"):
+            existing_top_map = self.data[top_key]
+            for sub_key, sub_values in otherdata.get(top_key, {}).items():
+                if sub_key not in existing_top_map:
+                    existing_top_map[sub_key] = sub_values
+                else:
+                    existing_top_map[sub_key].update(sub_values)
+
+
+def _on_missing_reference(app, env: environment.BuildEnvironment, node, contnode):
+    if node["refdomain"] != "bzl":
+        return None
+    if node["reftype"] != "type":
+        return None
+
+    # There's no Bazel docs for None, so prevent missing xrefs warning
+    if node["reftarget"] == "None":
+        return contnode
+    return None
+
+
+def setup(app):
+    app.add_domain(_BzlDomain)
+
+    app.add_config_value(
+        "bzl_default_repository_name",
+        default=os.environ.get("SPHINX_BZL_DEFAULT_REPOSITORY_NAME", "@_main"),
+        rebuild="env",
+        types=[str],
+    )
+    app.connect("missing-reference", _on_missing_reference)
+
+    # Pygments says it supports starlark, but it doesn't seem to actually
+    # recognize `starlark` as a name. So just manually map it to python.
+    app.add_lexer("starlark", lexer_classes["python"])
+    app.add_lexer("bzl", lexer_classes["python"])
+
+    return {
+        "version": "1.0.0",
+        "parallel_read_safe": True,
+        "parallel_write_safe": True,
+    }
diff --git a/sphinxdocs/tests/proto_to_markdown/BUILD.bazel b/sphinxdocs/tests/proto_to_markdown/BUILD.bazel
index 2964785..09f5374 100644
--- a/sphinxdocs/tests/proto_to_markdown/BUILD.bazel
+++ b/sphinxdocs/tests/proto_to_markdown/BUILD.bazel
@@ -13,10 +13,12 @@
 # limitations under the License.
 
 load("//python:py_test.bzl", "py_test")
+load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")  # buildifier: disable=bzl-visibility
 
 py_test(
     name = "proto_to_markdown_test",
     srcs = ["proto_to_markdown_test.py"],
+    target_compatible_with = [] if IS_BAZEL_7_OR_HIGHER else ["@platforms//:incompatible"],
     deps = [
         "//sphinxdocs/private:proto_to_markdown_lib",
         "@dev_pip//absl_py",
diff --git a/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py b/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py
index 2f5b22e..3b664a5 100644
--- a/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py
+++ b/sphinxdocs/tests/proto_to_markdown/proto_to_markdown_test.py
@@ -114,21 +114,22 @@
     def test_basic_rendering_everything(self):
         actual = self._render(_EVERYTHING_MODULE)
 
+        self.assertIn("{bzl:currentfile} //pkg:foo.bzl", actual)
         self.assertRegex(actual, "# //pkg:foo.bzl")
         self.assertRegex(actual, "MODULE_DOC_STRING")
 
-        self.assertRegex(actual, "## rule_1.*")
+        self.assertRegex(actual, "{bzl:rule} rule_1.*")
         self.assertRegex(actual, "RULE_1_DOC_STRING")
         self.assertRegex(actual, "rule_1_attr_1")
         self.assertRegex(actual, "RULE_1_ATTR_1_DOC_STRING")
         self.assertRegex(actual, "RULE_1_ATTR_1_DEFAULT_VALUE")
 
-        self.assertRegex(actual, "## ProviderAlpha")
+        self.assertRegex(actual, "{bzl:provider} ProviderAlpha")
         self.assertRegex(actual, "PROVIDER_ALPHA_DOC_STRING")
         self.assertRegex(actual, "ProviderAlpha_field_a")
         self.assertRegex(actual, "PROVIDER_ALPHA_FIELD_A_DOC_STRING")
 
-        self.assertRegex(actual, "## function_1")
+        self.assertRegex(actual, "{bzl:function} function_1")
         self.assertRegex(actual, "FUNCTION_1_DOC_STRING")
         self.assertRegex(actual, "function_1_param_a")
         self.assertRegex(actual, "FUNCTION_1_PARAM_A_DOC_STRING")
@@ -136,22 +137,22 @@
         self.assertRegex(actual, "FUNCTION_1_RETURN_DOC_STRING")
         self.assertRegex(actual, "FUNCTION_1_DEPRECATED_DOC_STRING")
 
-        self.assertRegex(actual, "## aspect_1")
+        self.assertRegex(actual, "{bzl:aspect} aspect_1")
         self.assertRegex(actual, "ASPECT_1_DOC_STRING")
         self.assertRegex(actual, "aspect_1_aspect_attribute_a")
         self.assertRegex(actual, "aspect_1_attribute_a")
         self.assertRegex(actual, "ASPECT_1_ATTRIBUTE_A_DOC_STRING")
         self.assertRegex(actual, "694638")
 
-        self.assertRegex(actual, "## bzlmod_ext")
+        self.assertRegex(actual, "{bzl:module-extension} bzlmod_ext")
         self.assertRegex(actual, "BZLMOD_EXT_DOC_STRING")
-        self.assertRegex(actual, "### bzlmod_ext.bzlmod_ext_tag_a")
+        self.assertRegex(actual, "{bzl:tag-class} bzlmod_ext_tag_a")
         self.assertRegex(actual, "BZLMOD_EXT_TAG_A_DOC_STRING")
         self.assertRegex(actual, "bzlmod_ext_tag_a_attribute_1")
         self.assertRegex(actual, "BZLMOD_EXT_TAG_A_ATTRIBUTE_1_DOC_STRING")
         self.assertRegex(actual, "BZLMOD_EXT_TAG_A_ATTRIBUTE_1_DEFAULT_VALUE")
 
-        self.assertRegex(actual, "## repository_rule")
+        self.assertRegex(actual, "{bzl:repo-rule} repository_rule")
         self.assertRegex(actual, "REPOSITORY_RULE_DOC_STRING")
         self.assertRegex(actual, "repository_rule_attribute_a")
         self.assertRegex(actual, "REPOSITORY_RULE_ATTRIBUTE_A_DOC_STRING")
@@ -172,31 +173,25 @@
     name: "param_without_default"
   }
   parameter: {
+    name: "param_with_function_default",
+    default_value: "<function foo from //bar:baz.bzl>"
+  }
+  parameter: {
+    name: "param_with_label_default",
+    default_value: 'Label(*, "@repo//pkg:file.bzl")'
+  }
+  parameter: {
     name: "last_param"
   }
 }
         """
         )
-        self.assertIn("[param_with_default](#func_param_with_default)=DEFAULT,", actual)
-        self.assertIn("[param_without_default](#func_param_without_default),", actual)
-
-    def test_render_field_list(self):
-        actual = self._render(
-            """\
-file: "@repo//pkg:foo.bzl"
-func_info: {
-  function_name: "func"
-  parameter: {
-    name: "param"
-    default_value: "DEFAULT"
-  }
-}
-"""
-        )
-        self.assertRegex(
-            actual, re.compile("^:.*param.*¶.*headerlink.*:\n", re.MULTILINE)
-        )
-        self.assertRegex(actual, re.compile("^  .*#func_param", re.MULTILINE))
+        self.assertIn("param_with_default=DEFAULT,", actual)
+        self.assertIn("{default-value}`DEFAULT`", actual)
+        self.assertIn(":arg param_with_default:", actual)
+        self.assertIn("param_without_default,", actual)
+        self.assertIn('{default-value}`"@repo//pkg:file.bzl"`', actual)
+        self.assertIn("{default-value}`'<function foo from //bar:baz.bzl>'", actual)
 
 
 if __name__ == "__main__":
diff --git a/sphinxdocs/tests/sphinx_stardoc/BUILD.bazel b/sphinxdocs/tests/sphinx_stardoc/BUILD.bazel
new file mode 100644
index 0000000..b141e5f
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/BUILD.bazel
@@ -0,0 +1,64 @@
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")  # buildifier: disable=bzl-visibility
+load("//sphinxdocs:sphinx.bzl", "sphinx_build_binary", "sphinx_docs")
+load("//sphinxdocs:sphinx_stardoc.bzl", "sphinx_stardocs")
+
+sphinx_docs(
+    name = "docs",
+    srcs = glob(
+        include = [
+            "*.md",
+        ],
+    ) + [":bzl_docs"],
+    config = "conf.py",
+    formats = [
+        "html",
+    ],
+    renamed_srcs = {
+        "//sphinxdocs/inventories:bazel_inventory": "bazel_inventory.inv",
+    },
+    sphinx = ":sphinx-build",
+    strip_prefix = package_name() + "/",
+    # We only develop the docs using Linux/Mac, and there are deps that
+    # don't work for Windows, so just skip Windows.
+    target_compatible_with = select({
+        "@platforms//os:linux": [],
+        "@platforms//os:macos": [],
+        "//conditions:default": ["@platforms//:incompatible"],
+    }) if IS_BAZEL_7_OR_HIGHER else ["@platforms//:incompatible"],
+)
+
+sphinx_stardocs(
+    name = "bzl_docs",
+    docs = {
+        "bzl_function.md": dict(
+            dep = ":all_bzl",
+            input = "//sphinxdocs/tests/sphinx_stardoc:bzl_function.bzl",
+        ),
+        "bzl_providers.md": dict(
+            dep = ":all_bzl",
+            input = "//sphinxdocs/tests/sphinx_stardoc:bzl_providers.bzl",
+        ),
+        "bzl_rule.md": dict(
+            dep = ":all_bzl",
+            input = "//sphinxdocs/tests/sphinx_stardoc:bzl_rule.bzl",
+        ),
+    },
+    target_compatible_with = [] if IS_BAZEL_7_OR_HIGHER else ["@platforms//:incompatible"],
+)
+
+bzl_library(
+    name = "all_bzl",
+    srcs = glob(["*.bzl"]),
+)
+
+sphinx_build_binary(
+    name = "sphinx-build",
+    tags = ["manual"],  # Only needed as part of sphinx doc building
+    deps = [
+        "//sphinxdocs/src/sphinx_bzl",
+        "@dev_pip//myst_parser",
+        "@dev_pip//sphinx",
+        "@dev_pip//typing_extensions",  # Needed by sphinx_stardoc
+    ],
+)
diff --git a/sphinxdocs/tests/sphinx_stardoc/aspect.md b/sphinxdocs/tests/sphinx_stardoc/aspect.md
new file mode 100644
index 0000000..3c49903
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/aspect.md
@@ -0,0 +1,22 @@
+:::{default-domain} bzl
+:::
+
+:::{bzl:currentfile} //lang:aspect.bzl
+:::
+
+
+# Aspect
+
+:::{bzl:aspect} myaspect
+
+:attr aa1:
+  {bzl:default-value}`True`
+  {type}`bool`
+  aa1 doc
+:attr aa2:
+  {type}`str`
+  aa2 doc
+
+:aspect-attributes: edge1, edge2, deps, ra1
+:::
+
diff --git a/sphinxdocs/tests/sphinx_stardoc/bzl_function.bzl b/sphinxdocs/tests/sphinx_stardoc/bzl_function.bzl
new file mode 100644
index 0000000..822ff26
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/bzl_function.bzl
@@ -0,0 +1,34 @@
+"""Tests for plain functions."""
+
+def middle_varargs(a, *args, b):
+    """Expect: `middle_varargs(a, *args, b)`
+
+    NOTE: https://github.com/bazelbuild/stardoc/issues/226: `*args` renders last
+
+    Args:
+        a: {type}`str` doc for a
+        *args: {type}`varags` doc for *args
+        b: {type}`list[str]` doc for c
+
+    """
+    _ = a, args, b  # @unused
+
+def mixture(a, b = 1, *args, c, d = 2, **kwargs):
+    """Expect: `mixture(a, b=1, *args, c, d=2, **kwargs)`"""
+    _ = a, b, args, c, d, kwargs  # @unused
+
+def only_varargs(*args):
+    """Expect: `only_varargs(*args)`"""
+    _ = args  # @unused
+
+def only_varkwargs(**kwargs):
+    """Expect: `only_varkwargs(**kwargs)`"""
+    _ = kwargs  # @unused
+
+def unnamed_varargs(*, a = 1, b):
+    """Expect: unnamed_varargs(*, a=1, b)"""
+    _ = a, b  # @unused
+
+def varargs_and_varkwargs(*args, **kwargs):
+    """Expect: `varargs_and_varkwargs(*args, **kwargs)`"""
+    _ = args, kwargs  # @unused
diff --git a/sphinxdocs/tests/sphinx_stardoc/bzl_providers.bzl b/sphinxdocs/tests/sphinx_stardoc/bzl_providers.bzl
new file mode 100644
index 0000000..189d975
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/bzl_providers.bzl
@@ -0,0 +1,4 @@
+"""Providers"""
+
+# buildifier: disable=provider-params
+GenericInfo = provider()
diff --git a/sphinxdocs/tests/sphinx_stardoc/bzl_rule.bzl b/sphinxdocs/tests/sphinx_stardoc/bzl_rule.bzl
new file mode 100644
index 0000000..d17c8bc
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/bzl_rule.bzl
@@ -0,0 +1,24 @@
+"""Tests for rules."""
+
+load(":bzl_providers.bzl", OtherGenericInfo = "GenericInfo")
+
+# buildifier: disable=provider-params
+GenericInfo = provider()
+
+# buildifier: disable=provider-params
+P1 = provider()
+
+# buildifier: disable=provider-params
+P2 = provider()
+
+def _impl(ctx):
+    _ = ctx  # @unused
+
+my_rule = rule(
+    implementation = _impl,
+    attrs = {
+        "srcs": attr.label(
+            providers = [[GenericInfo], [OtherGenericInfo], [P1, P2], [platform_common.ToolchainInfo]],
+        ),
+    },
+)
diff --git a/sphinxdocs/tests/sphinx_stardoc/conf.py b/sphinxdocs/tests/sphinx_stardoc/conf.py
new file mode 100644
index 0000000..bc288b0
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/conf.py
@@ -0,0 +1,33 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# For the full list of built-in configuration values, see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Project info
+
+project = "Sphinx Stardoc Test"
+
+extensions = [
+    "sphinx_bzl.bzl",
+    "myst_parser",
+    "sphinx.ext.intersphinx",
+]
+
+myst_enable_extensions = [
+    "fieldlist",
+    "attrs_block",
+    "attrs_inline",
+    "colon_fence",
+    "deflist",
+    "substitution",
+]
+
+# --- Stardoc configuration
+
+bzl_default_repository_name = "@testrepo"
+
+# --- Intersphinx configuration
+
+intersphinx_mapping = {
+    "bazel": ("https://bazel.build/", "bazel_inventory.inv"),
+}
diff --git a/sphinxdocs/tests/sphinx_stardoc/envvars.md b/sphinxdocs/tests/sphinx_stardoc/envvars.md
new file mode 100644
index 0000000..d6bcc1b
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/envvars.md
@@ -0,0 +1,9 @@
+# Environment Variables
+
+These are just defined so the repo rules have a xref target.
+
+.. envvar:: FOO
+   The foo environment variable
+
+.. envvar:: BAR
+   The bar environment variable
diff --git a/sphinxdocs/tests/sphinx_stardoc/function.md b/sphinxdocs/tests/sphinx_stardoc/function.md
new file mode 100644
index 0000000..de7d16a
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/function.md
@@ -0,0 +1,46 @@
+:::{default-domain} bzl
+:::
+
+:::{bzl:currentfile} //lang:function.bzl
+:::
+
+
+# Function
+
+Module documentation
+
+::::::{bzl:function} myfunc(foo, bar=False, baz=[]) -> FooObj
+
+This is a bazel function.
+
+:arg arg1:
+  {default-value}`99`
+  {type}`bool | int`
+  arg1 doc
+
+:arg arg2:
+  {default-value}`True`
+  {type}`dict[str, str]` my arg2 doc
+
+  and a second paragraph of text here
+:arg arg3:
+  {default-value}`"arg3default"`
+  {type}`list[int]`
+  my arg3 doc
+:arg arg4:
+  my arg4 doc
+
+:returns:
+  {bzl:return-type}`list | int`
+  description
+
+:::{deprecated} unspecified
+
+Some doc about the deprecation
+:::
+
+::::::
+
+:::{bzl:function} mylongfunc(arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8, arg9)
+
+:::
diff --git a/sphinxdocs/tests/sphinx_stardoc/glossary.md b/sphinxdocs/tests/sphinx_stardoc/glossary.md
new file mode 100644
index 0000000..b3c0721
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/glossary.md
@@ -0,0 +1,8 @@
+# Glossary
+
+:::{glossary}
+
+customterm
+: A custom term definition
+
+:::
diff --git a/sphinxdocs/tests/sphinx_stardoc/index.md b/sphinxdocs/tests/sphinx_stardoc/index.md
new file mode 100644
index 0000000..4f70482
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/index.md
@@ -0,0 +1,26 @@
+# Sphinx Stardoc Test
+
+This is a set of documents to test the sphinx_stardoc extension.
+
+To build and view these docs, run:
+
+```
+bazel run //sphinxdocs/tests/sphinx_stardoc:docs.serve
+```
+
+This will build the docs and start an HTTP server where they can be viewed.
+
+To aid the edit/debug cycle, `ibazel` can be used to automatically rebuild
+the HTML:
+
+```
+ibazel build //sphinxdocs/tests/sphinx_stardoc:docs
+```
+
+:::{toctree}
+:hidden:
+:glob:
+
+*
+genindex
+:::
diff --git a/sphinxdocs/tests/sphinx_stardoc/module_extension.md b/sphinxdocs/tests/sphinx_stardoc/module_extension.md
new file mode 100644
index 0000000..0335386
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/module_extension.md
@@ -0,0 +1,20 @@
+:::{default-domain} bzl
+:::
+
+:::{bzl:currentfile} //lang:extension.bzl
+:::
+
+
+# Module extension
+::::{bzl:module-extension} myext
+
+:::{bzl:tag-class} mytag(ta1, ta2)
+
+:attr ta1:
+  {type}`attr.string_list`
+  ta1 doc
+:attr ta2:
+  {type}`attr.label_list`
+  ta2 doc
+:::
+::::
diff --git a/sphinxdocs/tests/sphinx_stardoc/provider.md b/sphinxdocs/tests/sphinx_stardoc/provider.md
new file mode 100644
index 0000000..dac16f0
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/provider.md
@@ -0,0 +1,34 @@
+:::{default-domain} bzl
+:::
+
+:::{bzl:currentfile} //lang:provider.bzl
+:::
+
+
+# Provider
+
+below is a provider
+
+::::{bzl:provider} LangInfo
+
+my provider doc
+
+:::{bzl:function} LangInfo(mi1, mi2=None)
+
+:arg ami1:
+  {type}`depset[str]`
+  mi1 doc
+:arg ami2: ami2 doc
+  {type}`None | depset[File]`
+:::
+
+:::{bzl:provider-field} mi1
+:type: depset[str]
+
+The doc for mi1
+:::
+
+:::{bzl:provider-field} mi2
+:type: str
+:::
+::::
diff --git a/sphinxdocs/tests/sphinx_stardoc/repo_rule.md b/sphinxdocs/tests/sphinx_stardoc/repo_rule.md
new file mode 100644
index 0000000..0a909d6
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/repo_rule.md
@@ -0,0 +1,19 @@
+:::{default-domain} bzl
+:::
+
+:::{bzl:currentfile} //lang:repo_rule.bzl
+:::
+
+
+# Repo rule
+
+below is a repository rule
+
+:::{bzl:repo-rule} myreporule(rra1, rra2)
+
+:attr rra1: rra1 doc
+:attr rra2: rra2 doc
+
+:envvars: FOO, BAR
+
+:::
diff --git a/sphinxdocs/tests/sphinx_stardoc/rule.md b/sphinxdocs/tests/sphinx_stardoc/rule.md
new file mode 100644
index 0000000..0f90ed3
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/rule.md
@@ -0,0 +1,34 @@
+:::{default-domain} bzl
+:::
+
+:::{bzl:currentfile} //lang:rule.bzl
+:::
+
+
+# Rule
+
+Here is some module documentation
+
+Next, we're going to document some rules.
+
+::::{bzl:rule} my_rule(ra1, ra2=3)
+
+:attr ra1:
+  {bzl:default-value}`//foo:bar`
+  {type}`attr.label`
+  Docs for attribute ra1.
+
+  :::{bzl:attr-info} Info
+  :executable: true
+  :mandatory: true
+  :::
+
+  {required-providers}`"Display <//lang:provider.bzl%LangInfo>"`
+
+:attr ra2:
+  {type}`attr.label`
+  Docs for attribute ra2
+
+:provides: LangInfo
+
+::::
diff --git a/sphinxdocs/tests/sphinx_stardoc/target.md b/sphinxdocs/tests/sphinx_stardoc/target.md
new file mode 100644
index 0000000..447a5ac
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/target.md
@@ -0,0 +1,23 @@
+:::{default-domain} bzl
+:::
+
+:::{bzl:currentfile} //lang:BUILD.bazel
+:::
+
+# Target
+
+Here is some package documentation
+
+:::{bzl:target} relativetarget
+
+Some doc about relativetarget
+
+:::
+
+:::{bzl:target} //absolute:abstarget
+
+:::
+
+:::{bzl:flag} myflag
+
+:::
diff --git a/sphinxdocs/tests/sphinx_stardoc/xrefs.md b/sphinxdocs/tests/sphinx_stardoc/xrefs.md
new file mode 100644
index 0000000..9eb7b81
--- /dev/null
+++ b/sphinxdocs/tests/sphinx_stardoc/xrefs.md
@@ -0,0 +1,58 @@
+:::{default-domain} bzl
+:::
+
+# Xrefs
+
+Various tests of cross referencing support
+
+## Short name
+
+* function: {obj}`myfunc`
+* function arg: {obj}`myfunc.arg1`
+* rule: {obj}`my_rule`
+* rule attr: {obj}`my_rule.ra1`
+* provider: {obj}`LangInfo`
+
+## Fully qualified label without repo
+
+* function: {obj}`//lang:function.bzl%myfunc`
+* function arg: {obj}`//lang:function.bzl%myfunc.arg1`
+* rule: {obj}`//lang:rule.bzl%my_rule`
+* function: {obj}`//lang:rule.bzl%my_rule.ra1`
+* provider: {obj}`//lang:provider.bzl%LangInfo`
+* aspect: {obj}`//lang:aspect.bzl%myaspect`
+* target: {obj}`//lang:relativetarget`
+
+## Fully qualified label with repo
+
+* function: {obj}`@testrepo//lang:function.bzl%myfunc`
+* function arg: {obj}`@testrepo//lang:function.bzl%myfunc.arg1`
+* rule: {obj}`@testrepo//lang:rule.bzl%my_rule`
+* function: {obj}`@testrepo//lang:rule.bzl%my_rule.ra1`
+* provider: {obj}`@testrepo//lang:provider.bzl%LangInfo`
+* aspect: {obj}`@testrepo//lang:aspect.bzl%myaspect`
+* target: {obj}`@testrepo//lang:relativetarget`
+
+## Fully qualified dotted name with repo
+
+* function: {obj}`testrepo.lang.function.myfunc`
+* function arg: {obj}`testrepo.lang.function.myfunc.arg1`
+* rule: {obj}`testrepo.lang.rule.my_rule`
+* function: {obj}`testrepo.lang.rule.my_rule.ra1`
+* provider: {obj}`testrepo.lang.provider.LangInfo`
+
+## Fully qualified dotted name without repo
+
+* function: {obj}`lang.function.myfunc`
+* function arg: {obj}`lang.function.myfunc.arg1`
+* rule: {obj}`lang.rule.my_rule`
+* rule attr: {obj}`lang.rule.my_rule.ra1`
+* provider: {obj}`lang.provider.LangInfo`
+
+## Using origin keys
+
+* provider using `{type}`: {type}`"@rules_python//sphinxdocs/tests/sphinx_stardoc:bzl_rule.bzl%GenericInfo"`
+
+## Any xref
+
+* {any}`LangInfo`
diff --git a/tests/base_rules/BUILD.bazel b/tests/base_rules/BUILD.bazel
index aa21042..cd57715 100644
--- a/tests/base_rules/BUILD.bazel
+++ b/tests/base_rules/BUILD.bazel
@@ -11,3 +11,70 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
+load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")  # buildifier: disable=bzl-visibility
+load("//tests/support:sh_py_run_test.bzl", "py_reconfig_test", "sh_py_run_test")
+
+_SUPPORTS_BOOTSTRAP_SCRIPT = select({
+    "@platforms//os:windows": ["@platforms//:incompatible"],
+    "//conditions:default": [],
+}) if IS_BAZEL_7_OR_HIGHER else ["@platforms//:incompatible"]
+
+sh_py_run_test(
+    name = "run_binary_zip_no_test",
+    build_python_zip = "no",
+    py_src = "bin.py",
+    sh_src = "run_binary_zip_no_test.sh",
+)
+
+sh_py_run_test(
+    name = "run_binary_zip_yes_test",
+    build_python_zip = "yes",
+    py_src = "bin.py",
+    sh_src = "run_binary_zip_yes_test.sh",
+)
+
+sh_py_run_test(
+    name = "run_binary_bootstrap_script_zip_yes_test",
+    bootstrap_impl = "script",
+    build_python_zip = "yes",
+    py_src = "bin.py",
+    sh_src = "run_binary_zip_yes_test.sh",
+    target_compatible_with = _SUPPORTS_BOOTSTRAP_SCRIPT,
+)
+
+sh_py_run_test(
+    name = "run_binary_bootstrap_script_zip_no_test",
+    bootstrap_impl = "script",
+    build_python_zip = "no",
+    py_src = "bin.py",
+    sh_src = "run_binary_zip_no_test.sh",
+    target_compatible_with = _SUPPORTS_BOOTSTRAP_SCRIPT,
+)
+
+py_reconfig_test(
+    name = "sys_path_order_bootstrap_script_test",
+    srcs = ["sys_path_order_test.py"],
+    bootstrap_impl = "script",
+    env = {"BOOTSTRAP": "script"},
+    imports = ["./site-packages"],
+    main = "sys_path_order_test.py",
+    target_compatible_with = _SUPPORTS_BOOTSTRAP_SCRIPT,
+)
+
+py_reconfig_test(
+    name = "sys_path_order_bootstrap_system_python_test",
+    srcs = ["sys_path_order_test.py"],
+    bootstrap_impl = "system_python",
+    env = {"BOOTSTRAP": "system_python"},
+    imports = ["./site-packages"],
+    main = "sys_path_order_test.py",
+)
+
+sh_py_run_test(
+    name = "inherit_pythonsafepath_env_test",
+    bootstrap_impl = "script",
+    py_src = "bin.py",
+    sh_src = "inherit_pythonsafepath_env_test.sh",
+    target_compatible_with = _SUPPORTS_BOOTSTRAP_SCRIPT,
+)
diff --git a/tests/toolchains/workspace_template/python_version_test.py b/tests/base_rules/bin.py
similarity index 61%
copy from tests/toolchains/workspace_template/python_version_test.py
copy to tests/base_rules/bin.py
index c82611c..c46e43a 100644
--- a/tests/toolchains/workspace_template/python_version_test.py
+++ b/tests/base_rules/bin.py
@@ -1,4 +1,4 @@
-# Copyright 2022 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,14 +13,12 @@
 # limitations under the License.
 
 import os
-import platform
-import unittest
+import sys
 
-
-class TestPythonVersion(unittest.TestCase):
-    def test_match_toolchain(self):
-        self.assertEqual(platform.python_version(), os.getenv("PYTHON_VERSION"))
-
-
-if __name__ == "__main__":
-    unittest.main()
+print("Hello")
+print(
+    "RULES_PYTHON_ZIP_DIR:{}".format(sys._xoptions.get("RULES_PYTHON_ZIP_DIR", "UNSET"))
+)
+print("PYTHONSAFEPATH:", os.environ.get("PYTHONSAFEPATH", "UNSET") or "EMPTY")
+print("sys.flags.safe_path:", sys.flags.safe_path)
+print("file:", __file__)
diff --git a/tests/base_rules/inherit_pythonsafepath_env_test.sh b/tests/base_rules/inherit_pythonsafepath_env_test.sh
new file mode 100755
index 0000000..bc6e2d5
--- /dev/null
+++ b/tests/base_rules/inherit_pythonsafepath_env_test.sh
@@ -0,0 +1,69 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# --- begin runfiles.bash initialization v3 ---
+# Copy-pasted from the Bazel Bash runfiles library v3.
+set -uo pipefail; set +e; f=bazel_tools/tools/bash/runfiles/runfiles.bash
+source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \
+  source "$0.runfiles/$f" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
+  { echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e
+# --- end runfiles.bash initialization v3 ---
+set +e
+
+bin=$(rlocation $BIN_RLOCATION)
+if [[ -z "$bin" ]]; then
+  echo "Unable to locate test binary: $BIN_RLOCATION"
+  exit 1
+fi
+
+
+function expect_match() {
+  local expected_pattern=$1
+  local actual=$2
+  if ! (echo "$actual" | grep "$expected_pattern" ) >/dev/null; then
+    echo "expected to match: $expected_pattern"
+    echo "===== actual START ====="
+    echo "$actual"
+    echo "===== actual END ====="
+    echo
+    touch EXPECTATION_FAILED
+    return 1
+  fi
+}
+
+
+echo "Check inherited and disabled"
+# Verify setting it to empty string disables safe path
+actual=$(PYTHONSAFEPATH= $bin 2>&1)
+expect_match "sys.flags.safe_path: False" "$actual"
+expect_match "PYTHONSAFEPATH: EMPTY" "$actual"
+
+echo "Check inherited and propagated"
+# Verify setting it to any string enables safe path and that
+# value is propagated
+actual=$(PYTHONSAFEPATH=OUTER $bin 2>&1)
+expect_match "sys.flags.safe_path: True" "$actual"
+expect_match "PYTHONSAFEPATH: OUTER" "$actual"
+
+echo "Check enabled by default"
+# Verifying doing nothing leaves safepath enabled by default
+actual=$($bin 2>&1)
+expect_match "sys.flags.safe_path: True" "$actual"
+expect_match "PYTHONSAFEPATH: 1" "$actual"
+
+# Exit if any of the expects failed
+[[ ! -e EXPECTATION_FAILED ]]
diff --git a/tests/base_rules/precompile/BUILD.bazel b/tests/base_rules/precompile/BUILD.bazel
new file mode 100644
index 0000000..201adba
--- /dev/null
+++ b/tests/base_rules/precompile/BUILD.bazel
@@ -0,0 +1,3 @@
+load(":precompile_tests.bzl", "precompile_test_suite")
+
+precompile_test_suite(name = "precompile_tests")
diff --git a/tests/base_rules/precompile/precompile_tests.bzl b/tests/base_rules/precompile/precompile_tests.bzl
new file mode 100644
index 0000000..5599f61
--- /dev/null
+++ b/tests/base_rules/precompile/precompile_tests.bzl
@@ -0,0 +1,323 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for precompiling behavior."""
+
+load("@rules_python_internal//:rules_python_config.bzl", rp_config = "config")
+load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("@rules_testing//lib:truth.bzl", "matching")
+load("@rules_testing//lib:util.bzl", rt_util = "util")
+load("//python:py_binary.bzl", "py_binary")
+load("//python:py_info.bzl", "PyInfo")
+load("//python:py_library.bzl", "py_library")
+load("//python:py_test.bzl", "py_test")
+load("//tests/base_rules:py_info_subject.bzl", "py_info_subject")
+load(
+    "//tests/support:support.bzl",
+    "CC_TOOLCHAIN",
+    "EXEC_TOOLS_TOOLCHAIN",
+    "PLATFORM_TOOLCHAIN",
+    "PRECOMPILE",
+    "PRECOMPILE_ADD_TO_RUNFILES",
+    "PRECOMPILE_SOURCE_RETENTION",
+)
+
+_TEST_TOOLCHAINS = [PLATFORM_TOOLCHAIN, CC_TOOLCHAIN]
+
+_tests = []
+
+def _test_precompile_enabled_setup(name, py_rule, **kwargs):
+    if not rp_config.enable_pystar:
+        rt_util.skip_test(name = name)
+        return
+    rt_util.helper_target(
+        py_rule,
+        name = name + "_subject",
+        precompile = "enabled",
+        srcs = ["main.py"],
+        deps = [name + "_lib"],
+        **kwargs
+    )
+    rt_util.helper_target(
+        py_library,
+        name = name + "_lib",
+        srcs = ["lib.py"],
+        precompile = "enabled",
+    )
+    analysis_test(
+        name = name,
+        impl = _test_precompile_enabled_impl,
+        target = name + "_subject",
+        config_settings = {
+            "//command_line_option:extra_toolchains": _TEST_TOOLCHAINS,
+            EXEC_TOOLS_TOOLCHAIN: "enabled",
+        },
+    )
+
+def _test_precompile_enabled_impl(env, target):
+    target = env.expect.that_target(target)
+    runfiles = target.runfiles()
+    runfiles.contains_predicate(
+        matching.str_matches("__pycache__/main.fakepy-45.pyc"),
+    )
+    runfiles.contains_predicate(
+        matching.str_matches("/main.py"),
+    )
+    target.default_outputs().contains_at_least_predicates([
+        matching.file_path_matches("__pycache__/main.fakepy-45.pyc"),
+        matching.file_path_matches("/main.py"),
+    ])
+    py_info = target.provider(PyInfo, factory = py_info_subject)
+    py_info.direct_pyc_files().contains_exactly([
+        "{package}/__pycache__/main.fakepy-45.pyc",
+    ])
+    py_info.transitive_pyc_files().contains_exactly([
+        "{package}/__pycache__/main.fakepy-45.pyc",
+        "{package}/__pycache__/lib.fakepy-45.pyc",
+    ])
+
+def _test_precompile_enabled_py_binary(name):
+    _test_precompile_enabled_setup(name = name, py_rule = py_binary, main = "main.py")
+
+_tests.append(_test_precompile_enabled_py_binary)
+
+def _test_precompile_enabled_py_test(name):
+    _test_precompile_enabled_setup(name = name, py_rule = py_test, main = "main.py")
+
+_tests.append(_test_precompile_enabled_py_test)
+
+def _test_precompile_enabled_py_library(name):
+    _test_precompile_enabled_setup(name = name, py_rule = py_library)
+
+_tests.append(_test_precompile_enabled_py_library)
+
+def _test_pyc_only(name):
+    if not rp_config.enable_pystar:
+        rt_util.skip_test(name = name)
+        return
+    rt_util.helper_target(
+        py_binary,
+        name = name + "_subject",
+        precompile = "enabled",
+        srcs = ["main.py"],
+        main = "main.py",
+        precompile_source_retention = "omit_source",
+    )
+    analysis_test(
+        name = name,
+        impl = _test_pyc_only_impl,
+        config_settings = {
+            "//command_line_option:extra_toolchains": _TEST_TOOLCHAINS,
+            ##PRECOMPILE_SOURCE_RETENTION: "omit_source",
+            EXEC_TOOLS_TOOLCHAIN: "enabled",
+            PRECOMPILE: "enabled",
+        },
+        target = name + "_subject",
+    )
+
+_tests.append(_test_pyc_only)
+
+def _test_pyc_only_impl(env, target):
+    target = env.expect.that_target(target)
+    runfiles = target.runfiles()
+    runfiles.contains_predicate(
+        matching.str_matches("/main.pyc"),
+    )
+    runfiles.not_contains_predicate(
+        matching.str_endswith("/main.py"),
+    )
+    target.default_outputs().contains_at_least_predicates([
+        matching.file_path_matches("/main.pyc"),
+    ])
+    target.default_outputs().not_contains_predicate(
+        matching.file_basename_equals("main.py"),
+    )
+
+def _test_precompile_if_generated(name):
+    if not rp_config.enable_pystar:
+        rt_util.skip_test(name = name)
+        return
+    rt_util.helper_target(
+        py_binary,
+        name = name + "_subject",
+        srcs = [
+            "main.py",
+            rt_util.empty_file("generated1.py"),
+        ],
+        main = "main.py",
+        precompile = "if_generated_source",
+    )
+    analysis_test(
+        name = name,
+        impl = _test_precompile_if_generated_impl,
+        target = name + "_subject",
+        config_settings = {
+            "//command_line_option:extra_toolchains": _TEST_TOOLCHAINS,
+            EXEC_TOOLS_TOOLCHAIN: "enabled",
+        },
+    )
+
+_tests.append(_test_precompile_if_generated)
+
+def _test_precompile_if_generated_impl(env, target):
+    target = env.expect.that_target(target)
+    runfiles = target.runfiles()
+    runfiles.contains_predicate(
+        matching.str_matches("/__pycache__/generated1.fakepy-45.pyc"),
+    )
+    runfiles.not_contains_predicate(
+        matching.str_matches("main.*pyc"),
+    )
+    target.default_outputs().contains_at_least_predicates([
+        matching.file_path_matches("/__pycache__/generated1.fakepy-45.pyc"),
+    ])
+    target.default_outputs().not_contains_predicate(
+        matching.file_path_matches("main.*pyc"),
+    )
+
+def _test_omit_source_if_generated_source(name):
+    if not rp_config.enable_pystar:
+        rt_util.skip_test(name = name)
+        return
+    rt_util.helper_target(
+        py_binary,
+        name = name + "_subject",
+        srcs = [
+            "main.py",
+            rt_util.empty_file("generated2.py"),
+        ],
+        main = "main.py",
+        precompile = "enabled",
+    )
+    analysis_test(
+        name = name,
+        impl = _test_omit_source_if_generated_source_impl,
+        target = name + "_subject",
+        config_settings = {
+            "//command_line_option:extra_toolchains": _TEST_TOOLCHAINS,
+            PRECOMPILE_SOURCE_RETENTION: "omit_if_generated_source",
+            EXEC_TOOLS_TOOLCHAIN: "enabled",
+        },
+    )
+
+_tests.append(_test_omit_source_if_generated_source)
+
+def _test_omit_source_if_generated_source_impl(env, target):
+    target = env.expect.that_target(target)
+    runfiles = target.runfiles()
+    runfiles.contains_predicate(
+        matching.str_matches("/generated2.pyc"),
+    )
+    runfiles.contains_predicate(
+        matching.str_matches("__pycache__/main.fakepy-45.pyc"),
+    )
+    target.default_outputs().contains_at_least_predicates([
+        matching.file_path_matches("generated2.pyc"),
+    ])
+    target.default_outputs().contains_predicate(
+        matching.file_path_matches("__pycache__/main.fakepy-45.pyc"),
+    )
+
+def _test_precompile_add_to_runfiles_decided_elsewhere(name):
+    if not rp_config.enable_pystar:
+        rt_util.skip_test(name = name)
+        return
+    rt_util.helper_target(
+        py_binary,
+        name = name + "_binary",
+        srcs = ["bin.py"],
+        main = "bin.py",
+        deps = [name + "_lib"],
+        pyc_collection = "include_pyc",
+    )
+    rt_util.helper_target(
+        py_library,
+        name = name + "_lib",
+        srcs = ["lib.py"],
+    )
+    analysis_test(
+        name = name,
+        impl = _test_precompile_add_to_runfiles_decided_elsewhere_impl,
+        targets = {
+            "binary": name + "_binary",
+            "library": name + "_lib",
+        },
+        config_settings = {
+            "//command_line_option:extra_toolchains": _TEST_TOOLCHAINS,
+            PRECOMPILE_ADD_TO_RUNFILES: "decided_elsewhere",
+            PRECOMPILE: "enabled",
+            EXEC_TOOLS_TOOLCHAIN: "enabled",
+        },
+    )
+
+_tests.append(_test_precompile_add_to_runfiles_decided_elsewhere)
+
+def _test_precompile_add_to_runfiles_decided_elsewhere_impl(env, targets):
+    env.expect.that_target(targets.binary).runfiles().contains_at_least([
+        "{workspace}/tests/base_rules/precompile/__pycache__/bin.fakepy-45.pyc",
+        "{workspace}/tests/base_rules/precompile/__pycache__/lib.fakepy-45.pyc",
+        "{workspace}/tests/base_rules/precompile/bin.py",
+        "{workspace}/tests/base_rules/precompile/lib.py",
+    ])
+
+    env.expect.that_target(targets.library).runfiles().contains_exactly([
+        "{workspace}/tests/base_rules/precompile/lib.py",
+    ])
+
+def _test_precompiler_action(name):
+    if not rp_config.enable_pystar:
+        rt_util.skip_test(name = name)
+        return
+    rt_util.helper_target(
+        py_binary,
+        name = name + "_subject",
+        srcs = ["main2.py"],
+        main = "main2.py",
+        precompile = "enabled",
+        precompile_optimize_level = 2,
+        precompile_invalidation_mode = "unchecked_hash",
+    )
+    analysis_test(
+        name = name,
+        impl = _test_precompiler_action_impl,
+        target = name + "_subject",
+        config_settings = {
+            "//command_line_option:extra_toolchains": _TEST_TOOLCHAINS,
+            EXEC_TOOLS_TOOLCHAIN: "enabled",
+        },
+    )
+
+_tests.append(_test_precompiler_action)
+
+def _test_precompiler_action_impl(env, target):
+    #env.expect.that_target(target).runfiles().contains_exactly([])
+    action = env.expect.that_target(target).action_named("PyCompile")
+    action.contains_flag_values([
+        ("--optimize", "2"),
+        ("--python_version", "4.5"),
+        ("--invalidation_mode", "unchecked_hash"),
+    ])
+    action.has_flags_specified(["--src", "--pyc", "--src_name"])
+    action.env().contains_at_least({
+        "PYTHONHASHSEED": "0",
+        "PYTHONNOUSERSITE": "1",
+        "PYTHONSAFEPATH": "1",
+    })
+
+def precompile_test_suite(name):
+    test_suite(
+        name = name,
+        tests = _tests,
+    )
diff --git a/tests/base_rules/py_executable_base_tests.bzl b/tests/base_rules/py_executable_base_tests.bzl
index 3960579..eb1a1b6 100644
--- a/tests/base_rules/py_executable_base_tests.bzl
+++ b/tests/base_rules/py_executable_base_tests.bzl
@@ -13,13 +13,17 @@
 # limitations under the License.
 """Tests common to py_binary and py_test (executable rules)."""
 
+load("@rules_python//python:py_runtime_info.bzl", RulesPythonPyRuntimeInfo = "PyRuntimeInfo")
 load("@rules_python_internal//:rules_python_config.bzl", rp_config = "config")
 load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
 load("@rules_testing//lib:truth.bzl", "matching")
 load("@rules_testing//lib:util.bzl", rt_util = "util")
+load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")  # buildifier: disable=bzl-visibility
 load("//tests/base_rules:base_tests.bzl", "create_base_tests")
 load("//tests/base_rules:util.bzl", "WINDOWS_ATTR", pt_util = "util")
-load("//tests/support:test_platforms.bzl", "WINDOWS")
+load("//tests/support:support.bzl", "LINUX_X86_64", "WINDOWS_X86_64")
+
+_BuiltinPyRuntimeInfo = PyRuntimeInfo
 
 _tests = []
 
@@ -47,7 +51,7 @@
             "//command_line_option:cpu": "windows_x86_64",
             "//command_line_option:crosstool_top": Label("//tests/cc:cc_toolchain_suite"),
             "//command_line_option:extra_toolchains": [str(Label("//tests/cc:all"))],
-            "//command_line_option:platforms": [WINDOWS],
+            "//command_line_option:platforms": [WINDOWS_X86_64],
         },
         attr_values = {"target_compatible_with": target_compatible_with},
     )
@@ -64,6 +68,50 @@
 
 _tests.append(_test_basic_windows)
 
+def _test_basic_zip(name, config):
+    if rp_config.enable_pystar:
+        target_compatible_with = select({
+            # Disable the new test on windows because we have _test_basic_windows.
+            "@platforms//os:windows": ["@platforms//:incompatible"],
+            "//conditions:default": [],
+        })
+    else:
+        target_compatible_with = ["@platforms//:incompatible"]
+    rt_util.helper_target(
+        config.rule,
+        name = name + "_subject",
+        srcs = ["main.py"],
+        main = "main.py",
+    )
+    analysis_test(
+        name = name,
+        impl = _test_basic_zip_impl,
+        target = name + "_subject",
+        config_settings = {
+            # NOTE: The default for this flag is based on the Bazel host OS, not
+            # the target platform. For windows, it defaults to true, so force
+            # it to that to match behavior when this test runs on other
+            # platforms.
+            "//command_line_option:build_python_zip": "true",
+            "//command_line_option:cpu": "linux_x86_64",
+            "//command_line_option:crosstool_top": Label("//tests/cc:cc_toolchain_suite"),
+            "//command_line_option:extra_toolchains": [str(Label("//tests/cc:all"))],
+            "//command_line_option:platforms": [LINUX_X86_64],
+        },
+        attr_values = {"target_compatible_with": target_compatible_with},
+    )
+
+def _test_basic_zip_impl(env, target):
+    target = env.expect.that_target(target)
+    target.runfiles().contains_predicate(matching.str_endswith(
+        target.meta.format_str("/{name}.zip"),
+    ))
+    target.runfiles().contains_predicate(matching.str_endswith(
+        target.meta.format_str("/{name}"),
+    ))
+
+_tests.append(_test_basic_zip)
+
 def _test_executable_in_runfiles(name, config):
     rt_util.helper_target(
         config.rule,
@@ -250,6 +298,16 @@
             "{package}/{test_name}_subject.py",
         ])
 
+        if IS_BAZEL_7_OR_HIGHER:
+            # As of Bazel 7, the first default output is the executable, so
+            # verify that is the case. rules_testing
+            # DepsetFileSubject.contains_exactly doesn't provide an in_order()
+            # call, nor access to the underlying depset, so we have to do things
+            # manually.
+            first_default_output = target[DefaultInfo].files.to_list()[0]
+            executable = target[DefaultInfo].files_to_run.executable
+            env.expect.that_file(first_default_output).equals(executable)
+
 def _test_name_cannot_end_in_py(name, config):
     # Bazel 5 will crash with a Java stacktrace when the native Python
     # rules have an error.
@@ -275,6 +333,28 @@
         matching.str_matches("name must not end in*.py"),
     )
 
+def _test_py_runtime_info_provided(name, config):
+    rt_util.helper_target(
+        config.rule,
+        name = name + "_subject",
+        srcs = [name + "_subject.py"],
+    )
+    analysis_test(
+        name = name,
+        impl = _test_py_runtime_info_provided_impl,
+        target = name + "_subject",
+    )
+
+def _test_py_runtime_info_provided_impl(env, target):
+    # Make sure that the rules_python loaded symbol is provided.
+    env.expect.that_target(target).has_provider(RulesPythonPyRuntimeInfo)
+
+    # For compatibility during the transition, the builtin PyRuntimeInfo should
+    # also be provided.
+    env.expect.that_target(target).has_provider(_BuiltinPyRuntimeInfo)
+
+_tests.append(_test_py_runtime_info_provided)
+
 # Can't test this -- mandatory validation happens before analysis test
 # can intercept it
 # TODO(#1069): Once re-implemented in Starlark, modify rule logic to make this
diff --git a/tests/base_rules/py_info_subject.bzl b/tests/base_rules/py_info_subject.bzl
index b23308c..bfed0b3 100644
--- a/tests/base_rules/py_info_subject.bzl
+++ b/tests/base_rules/py_info_subject.bzl
@@ -31,9 +31,11 @@
     # buildifier: disable=uninitialized
     public = struct(
         # go/keep-sorted start
+        direct_pyc_files = lambda *a, **k: _py_info_subject_direct_pyc_files(self, *a, **k),
         has_py2_only_sources = lambda *a, **k: _py_info_subject_has_py2_only_sources(self, *a, **k),
         has_py3_only_sources = lambda *a, **k: _py_info_subject_has_py3_only_sources(self, *a, **k),
         imports = lambda *a, **k: _py_info_subject_imports(self, *a, **k),
+        transitive_pyc_files = lambda *a, **k: _py_info_subject_transitive_pyc_files(self, *a, **k),
         transitive_sources = lambda *a, **k: _py_info_subject_transitive_sources(self, *a, **k),
         uses_shared_libraries = lambda *a, **k: _py_info_subject_uses_shared_libraries(self, *a, **k),
         # go/keep-sorted end
@@ -44,6 +46,16 @@
     )
     return public
 
+def _py_info_subject_direct_pyc_files(self):
+    """Returns a `DepsetFileSubject` for the `direct_pyc_files` attribute.
+
+    Method: PyInfoSubject.direct_pyc_files
+    """
+    return subjects.depset_file(
+        self.actual.direct_pyc_files,
+        meta = self.meta.derive("direct_pyc_files()"),
+    )
+
 def _py_info_subject_has_py2_only_sources(self):
     """Returns a `BoolSubject` for the `has_py2_only_sources` attribute.
 
@@ -74,6 +86,16 @@
         meta = self.meta.derive("imports()"),
     )
 
+def _py_info_subject_transitive_pyc_files(self):
+    """Returns a `DepsetFileSubject` for the `transitive_pyc_files` attribute.
+
+    Method: PyInfoSubject.transitive_pyc_files
+    """
+    return subjects.depset_file(
+        self.actual.transitive_pyc_files,
+        meta = self.meta.derive("transitive_pyc_files()"),
+    )
+
 def _py_info_subject_transitive_sources(self):
     """Returns a `DepsetFileSubject` for the `transitive_sources` attribute.
 
diff --git a/tests/base_rules/py_test/py_test_tests.bzl b/tests/base_rules/py_test/py_test_tests.bzl
index f4b704e..c77bd7e 100644
--- a/tests/base_rules/py_test/py_test_tests.bzl
+++ b/tests/base_rules/py_test/py_test_tests.bzl
@@ -21,13 +21,26 @@
     "create_executable_tests",
 )
 load("//tests/base_rules:util.bzl", pt_util = "util")
-load("//tests/support:test_platforms.bzl", "LINUX", "MAC")
+load("//tests/support:support.bzl", "LINUX_X86_64", "MAC_X86_64")
 
 # Explicit Label() calls are required so that it resolves in @rules_python
 # context instead of @rules_testing context.
 _FAKE_CC_TOOLCHAIN = Label("//tests/cc:cc_toolchain_suite")
 _FAKE_CC_TOOLCHAINS = [str(Label("//tests/cc:all"))]
 
+# The Windows CI currently runs as root, which breaks when
+# the analysis tests try to install (but not use, because
+# these are analysis tests) a runtime for another platform.
+# This is because the toolchain install has an assert to
+# verify the runtime install is read-only, which it can't
+# be when running as root.
+_SKIP_WINDOWS = {
+    "target_compatible_with": select({
+        "@platforms//os:windows": ["@platforms//:incompatible"],
+        "//conditions:default": [],
+    }),
+}
+
 _tests = []
 
 def _test_mac_requires_darwin_for_execution(name, config):
@@ -52,8 +65,9 @@
             "//command_line_option:cpu": "darwin_x86_64",
             "//command_line_option:crosstool_top": _FAKE_CC_TOOLCHAIN,
             "//command_line_option:extra_toolchains": _FAKE_CC_TOOLCHAINS,
-            "//command_line_option:platforms": [MAC],
+            "//command_line_option:platforms": [MAC_X86_64],
         },
+        attr_values = _SKIP_WINDOWS,
     )
 
 def _test_mac_requires_darwin_for_execution_impl(env, target):
@@ -84,8 +98,9 @@
             "//command_line_option:cpu": "k8",
             "//command_line_option:crosstool_top": _FAKE_CC_TOOLCHAIN,
             "//command_line_option:extra_toolchains": _FAKE_CC_TOOLCHAINS,
-            "//command_line_option:platforms": [LINUX],
+            "//command_line_option:platforms": [LINUX_X86_64],
         },
+        attr_values = _SKIP_WINDOWS,
     )
 
 def _test_non_mac_doesnt_require_darwin_for_execution_impl(env, target):
diff --git a/tests/base_rules/run_binary_zip_no_test.sh b/tests/base_rules/run_binary_zip_no_test.sh
new file mode 100755
index 0000000..2ee69f3
--- /dev/null
+++ b/tests/base_rules/run_binary_zip_no_test.sh
@@ -0,0 +1,43 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# --- begin runfiles.bash initialization v3 ---
+# Copy-pasted from the Bazel Bash runfiles library v3.
+set -uo pipefail; set +e; f=bazel_tools/tools/bash/runfiles/runfiles.bash
+source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \
+  source "$0.runfiles/$f" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
+  { echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e
+# --- end runfiles.bash initialization v3 ---
+set +e
+
+bin=$(rlocation $BIN_RLOCATION)
+if [[ -z "$bin" ]]; then
+  echo "Unable to locate test binary: $BIN_RLOCATION"
+  exit 1
+fi
+actual=$($bin 2>&1)
+
+# How we detect if a zip file was executed from depends on which bootstrap
+# is used.
+# bootstrap_impl=script outputs RULES_PYTHON_ZIP_DIR=<somepath>
+# bootstrap_impl=system_python outputs file:.*Bazel.runfiles
+expected_pattern="Hello"
+if ! (echo "$actual" | grep "$expected_pattern" ) >/dev/null; then
+  echo "expected output to match: $expected_pattern"
+  echo "but got:\n$actual"
+  exit 1
+fi
diff --git a/tests/base_rules/run_binary_zip_yes_test.sh b/tests/base_rules/run_binary_zip_yes_test.sh
new file mode 100755
index 0000000..ca27808
--- /dev/null
+++ b/tests/base_rules/run_binary_zip_yes_test.sh
@@ -0,0 +1,44 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# --- begin runfiles.bash initialization v3 ---
+# Copy-pasted from the Bazel Bash runfiles library v3.
+set -uo pipefail; set +e; f=bazel_tools/tools/bash/runfiles/runfiles.bash
+source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \
+  source "$0.runfiles/$f" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
+  { echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e
+# --- end runfiles.bash initialization v3 ---
+set +e
+
+bin=$(rlocation $BIN_RLOCATION)
+if [[ -z "$bin" ]]; then
+  echo "Unable to locate test binary: $BIN_RLOCATION"
+  exit 1
+fi
+actual=$($bin)
+
+# How we detect if a zip file was executed from depends on which bootstrap
+# is used.
+# bootstrap_impl=script outputs RULES_PYTHON_ZIP_DIR:<somepath>
+# bootstrap_impl=system_python outputs file:.*Bazel.runfiles
+expected_pattern="RULES_PYTHON_ZIP_DIR:/\|file:.*Bazel.runfiles"
+if ! (echo "$actual" | grep "$expected_pattern" ) >/dev/null; then
+  echo "expected output to match: $expected_pattern"
+  echo "but got: $actual"
+  exit 1
+fi
+
diff --git a/tests/base_rules/run_zip_test.sh b/tests/base_rules/run_zip_test.sh
new file mode 100755
index 0000000..64857e6
--- /dev/null
+++ b/tests/base_rules/run_zip_test.sh
@@ -0,0 +1,38 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# --- begin runfiles.bash initialization v3 ---
+# Copy-pasted from the Bazel Bash runfiles library v3.
+set -uo pipefail; set +e; f=bazel_tools/tools/bash/runfiles/runfiles.bash
+source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \
+  source "$0.runfiles/$f" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
+  source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
+  { echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e
+# --- end runfiles.bash initialization v3 ---
+set +e
+
+bin=$(rlocation _main/tests/base_rules/_run_zip_test_bin)
+if [[ -z "$bin" ]]; then
+  echo "Unable to locate test binary"
+  exit 1
+fi
+actual=$($bin)
+
+if [[ ! "$actual" == RULES_PYTHON_ZIP_DIR=/* ]]; then
+  echo "expected output: RULES_PYTHON_ZIP_DIR=<some path>"
+  echo "but got: $actual"
+  exit 1
+fi
diff --git a/tests/base_rules/sys_path_order_test.py b/tests/base_rules/sys_path_order_test.py
new file mode 100644
index 0000000..2e33464
--- /dev/null
+++ b/tests/base_rules/sys_path_order_test.py
@@ -0,0 +1,88 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os.path
+import re
+import sys
+import unittest
+
+
+class SysPathOrderTest(unittest.TestCase):
+    def test_sys_path_order(self):
+        last_stdlib = None
+        first_user = None
+        first_runtime_site = None
+
+        # Classify paths into the three different types we care about: stdlib,
+        # user dependency, or the runtime's site-package's directory.
+        #
+        # Because they often share common prefixes with one another, and vary
+        # subtly between platforms, we do this in two passes: first categorize,
+        # then pick out the indexes. This is just so debugging is easier and
+        # error messages are more informative.
+        categorized_paths = []
+        for i, value in enumerate(sys.path):
+            # The runtime's root repo may be added to sys.path, but it
+            # counts as a user directory, not stdlib directory.
+            if value == sys.prefix:
+                category = "user"
+            elif value.startswith(sys.prefix):
+                # The runtime's site-package directory might be called
+                # dist-packages when using Debian's system python.
+                if os.path.basename(value).endswith("-packages"):
+                    category = "runtime-site"
+                else:
+                    category = "stdlib"
+            else:
+                category = "user"
+
+            categorized_paths.append((category, value))
+
+        for i, (category, _) in enumerate(categorized_paths):
+            if category == "stdlib":
+                last_stdlib = i
+            elif category == "runtime-site":
+                if first_runtime_site is None:
+                    first_runtime_site = i
+            elif category == "user":
+                if first_user is None:
+                    first_user = i
+
+        sys_path_str = "\n".join(
+            f"{i}: ({category}) {value}"
+            for i, (category, value) in enumerate(categorized_paths)
+        )
+        if None in (last_stdlib, first_user, first_runtime_site):
+            self.fail(
+                "Failed to find position for one of:\n"
+                + f"{last_stdlib=} {first_user=} {first_runtime_site=}\n"
+                + f"for sys.path:\n{sys_path_str}"
+            )
+
+        if os.environ["BOOTSTRAP"] == "script":
+            self.assertTrue(
+                last_stdlib < first_user < first_runtime_site,
+                f"Expected {last_stdlib=} < {first_user=} < {first_runtime_site=}\n"
+                + f"for sys.path:\n{sys_path_str}",
+            )
+        else:
+            self.assertTrue(
+                first_user < last_stdlib < first_runtime_site,
+                f"Expected {first_user=} < {last_stdlib=} < {first_runtime_site=}\n"
+                + f"for sys.path:\n{sys_path_str}",
+            )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/cc/BUILD.bazel b/tests/cc/BUILD.bazel
index ef64d6d..889f9e0 100644
--- a/tests/cc/BUILD.bazel
+++ b/tests/cc/BUILD.bazel
@@ -21,6 +21,12 @@
 
 exports_files(["fake_header.h"])
 
+filegroup(
+    name = "libpython",
+    srcs = ["libpython-fake.so"],
+    tags = PREVENT_IMPLICIT_BUILDING_TAGS,
+)
+
 toolchain(
     name = "fake_py_cc_toolchain",
     tags = PREVENT_IMPLICIT_BUILDING_TAGS,
@@ -31,6 +37,7 @@
 py_cc_toolchain(
     name = "fake_py_cc_toolchain_impl",
     headers = ":fake_headers",
+    libs = ":fake_libs",
     python_version = "3.999",
     tags = PREVENT_IMPLICIT_BUILDING_TAGS,
 )
@@ -44,6 +51,14 @@
     tags = PREVENT_IMPLICIT_BUILDING_TAGS,
 )
 
+# buildifier: disable=native-cc
+cc_library(
+    name = "fake_libs",
+    srcs = ["libpython3.so"],
+    data = ["libdata.txt"],
+    tags = PREVENT_IMPLICIT_BUILDING_TAGS,
+)
+
 cc_toolchain_suite(
     name = "cc_toolchain_suite",
     tags = ["manual"],
diff --git a/tests/cc/current_py_cc_headers/current_py_cc_headers_tests.bzl b/tests/cc/current_py_cc_headers/current_py_cc_headers_tests.bzl
index 931a9c1..9aeec38 100644
--- a/tests/cc/current_py_cc_headers/current_py_cc_headers_tests.bzl
+++ b/tests/cc/current_py_cc_headers/current_py_cc_headers_tests.bzl
@@ -14,6 +14,7 @@
 
 """Tests for current_py_cc_headers."""
 
+load("@rules_cc//cc:defs.bzl", "CcInfo")
 load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite")
 load("@rules_testing//lib:truth.bzl", "matching")
 load("//tests:cc_info_subject.bzl", "cc_info_subject")
diff --git a/tests/cc/current_py_cc_libs/BUILD.bazel b/tests/cc/current_py_cc_libs/BUILD.bazel
new file mode 100644
index 0000000..2180555
--- /dev/null
+++ b/tests/cc/current_py_cc_libs/BUILD.bazel
@@ -0,0 +1,35 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load(":current_py_cc_libs_tests.bzl", "current_py_cc_libs_test_suite")
+
+current_py_cc_libs_test_suite(name = "current_py_cc_libs_tests")
+
+# buildifier: disable=native-cc
+cc_test(
+    name = "python_libs_linking_test",
+    srcs = ["python_libs_linking_test.cc"],
+    # Windows fails with linking errors, but its not clear why; someone
+    # with more C + Windows experience will have to figure it out.
+    # - rickeylev@
+    target_compatible_with = select({
+        "@platforms//os:linux": [],
+        "@platforms//os:osx": [],
+        "//conditions:default": ["@platforms//:incompatible"],
+    }),
+    deps = [
+        "@rules_python//python/cc:current_py_cc_headers",
+        "@rules_python//python/cc:current_py_cc_libs",
+    ],
+)
diff --git a/tests/cc/current_py_cc_libs/current_py_cc_libs_tests.bzl b/tests/cc/current_py_cc_libs/current_py_cc_libs_tests.bzl
new file mode 100644
index 0000000..44615ee
--- /dev/null
+++ b/tests/cc/current_py_cc_libs/current_py_cc_libs_tests.bzl
@@ -0,0 +1,78 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for current_py_cc_libs."""
+
+load("@rules_cc//cc:defs.bzl", "CcInfo")
+load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite")
+load("@rules_testing//lib:truth.bzl", "matching")
+load("//tests:cc_info_subject.bzl", "cc_info_subject")
+
+_tests = []
+
+def _test_current_toolchain_libs(name):
+    analysis_test(
+        name = name,
+        impl = _test_current_toolchain_libs_impl,
+        target = "//python/cc:current_py_cc_libs",
+        config_settings = {
+            "//command_line_option:extra_toolchains": [str(Label("//tests/cc:all"))],
+        },
+        attrs = {
+            "lib": attr.label(
+                default = "//tests/cc:libpython",
+                allow_single_file = True,
+            ),
+        },
+    )
+
+def _test_current_toolchain_libs_impl(env, target):
+    # Check that the forwarded CcInfo looks vaguely correct.
+    cc_info = env.expect.that_target(target).provider(
+        CcInfo,
+        factory = cc_info_subject,
+    )
+    cc_info.linking_context().linker_inputs().has_size(2)
+
+    # Check that the forward DefaultInfo looks correct
+    env.expect.that_target(target).runfiles().contains_predicate(
+        matching.str_matches("*/libdata.txt"),
+    )
+
+    # The shared library should also end up in runfiles
+    # The `_solib` directory is a special directory CC rules put
+    # libraries into.
+    env.expect.that_target(target).runfiles().contains_predicate(
+        matching.str_matches("*_solib*/libpython3.so"),
+    )
+
+_tests.append(_test_current_toolchain_libs)
+
+def _test_toolchain_is_registered_by_default(name):
+    analysis_test(
+        name = name,
+        impl = _test_toolchain_is_registered_by_default_impl,
+        target = "//python/cc:current_py_cc_libs",
+    )
+
+def _test_toolchain_is_registered_by_default_impl(env, target):
+    env.expect.that_target(target).has_provider(CcInfo)
+
+_tests.append(_test_toolchain_is_registered_by_default)
+
+def current_py_cc_libs_test_suite(name):
+    test_suite(
+        name = name,
+        tests = _tests,
+    )
diff --git a/tests/cc/current_py_cc_libs/python_libs_linking_test.cc b/tests/cc/current_py_cc_libs/python_libs_linking_test.cc
new file mode 100644
index 0000000..1ecce08
--- /dev/null
+++ b/tests/cc/current_py_cc_libs/python_libs_linking_test.cc
@@ -0,0 +1,18 @@
+#include <Python.h>
+
+int main(int argc, char** argv) {
+  // Early return to prevent the broken code below from running.
+  if (argc >= 1) {
+    return 0;
+  }
+
+  // The below code won't actually run. We just reference some Python
+  // symbols so the compiler and linker do some work to verify they are
+  // able to resolve the symbols.
+  // To make it actually run, more custom initialization is necessary.
+  // See https://docs.python.org/3/c-api/intro.html#embedding-python
+  Py_Initialize();
+  PyRun_SimpleString("print('Hello, world')\n");
+  Py_Finalize();
+  return 0;
+}
diff --git a/tests/cc/fake_cc_toolchain_config.bzl b/tests/cc/fake_cc_toolchain_config.bzl
index b3214a6..a2ad615 100644
--- a/tests/cc/fake_cc_toolchain_config.bzl
+++ b/tests/cc/fake_cc_toolchain_config.bzl
@@ -14,6 +14,8 @@
 
 """Fake for providing CcToolchainConfigInfo."""
 
+load("@rules_cc//cc:defs.bzl", "cc_common")
+
 def _impl(ctx):
     return cc_common.create_cc_toolchain_config_info(
         ctx = ctx,
diff --git a/tests/cc/py_cc_toolchain/py_cc_toolchain_tests.bzl b/tests/cc/py_cc_toolchain/py_cc_toolchain_tests.bzl
index 609518d..fe83bf2 100644
--- a/tests/cc/py_cc_toolchain/py_cc_toolchain_tests.bzl
+++ b/tests/cc/py_cc_toolchain/py_cc_toolchain_tests.bzl
@@ -15,7 +15,7 @@
 """Tests for py_cc_toolchain."""
 
 load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite")
-load("@rules_testing//lib:truth.bzl", "matching")
+load("@rules_testing//lib:truth.bzl", "matching", "subjects")
 load("//tests:cc_info_subject.bzl", "cc_info_subject")
 load("//tests:default_info_subject.bzl", "default_info_subject")
 load("//tests:py_cc_toolchain_info_subject.bzl", "PyCcToolchainInfoSubject")
@@ -74,6 +74,19 @@
         matching.str_matches("*/cc/data.txt"),
     )
 
+    libs_providers = toolchain.libs().providers_map()
+    libs_providers.keys().contains_exactly(["CcInfo", "DefaultInfo"])
+
+    cc_info = libs_providers.get("CcInfo", factory = cc_info_subject)
+
+    cc_info.linking_context().linker_inputs().has_size(2)
+
+    default_info = libs_providers.get("DefaultInfo", factory = subjects.default_info)
+    default_info.runfiles().contains("{workspace}/tests/cc/libdata.txt")
+    default_info.runfiles().contains_predicate(
+        matching.str_matches("/libpython3."),
+    )
+
 _tests.append(_py_cc_toolchain_test)
 
 def py_cc_toolchain_test_suite(name):
diff --git a/tests/cc_info_subject.bzl b/tests/cc_info_subject.bzl
index 31ac03a..e33ccb8 100644
--- a/tests/cc_info_subject.bzl
+++ b/tests/cc_info_subject.bzl
@@ -29,7 +29,9 @@
     # buildifier: disable=uninitialized
     public = struct(
         # go/keep-sorted start
+        actual = info,
         compilation_context = lambda *a, **k: _cc_info_subject_compilation_context(self, *a, **k),
+        linking_context = lambda *a, **k: _cc_info_subject_linking_context(self, *a, **k),
         # go/keep-sorted end
     )
     self = struct(
@@ -52,6 +54,20 @@
         meta = self.meta.derive("compilation_context()"),
     )
 
+def _cc_info_subject_linking_context(self):
+    """Returns the CcInfo.linking_context as a subject.
+
+    Args:
+        self: implicitly added.
+
+    Returns:
+        [`LinkingContextSubject`] instance.
+    """
+    return _linking_context_subject_new(
+        self.actual.linking_context,
+        meta = self.meta.derive("linking_context()"),
+    )
+
 def _compilation_context_subject_new(info, *, meta):
     """Creates a CompilationContextSubject.
 
@@ -126,3 +142,42 @@
         container_name = "includes",
         element_plural_name = "include paths",
     )
+
+def _linking_context_subject_new(info, meta):
+    """Creates a LinkingContextSubject.
+
+    Args:
+        info: ([`LinkingContext`]) object instance.
+        meta: rules_testing `ExpectMeta` instance.
+
+    Returns:
+        [`LinkingContextSubject`] object.
+    """
+
+    # buildifier: disable=uninitialized
+    public = struct(
+        # go/keep-sorted start
+        linker_inputs = lambda *a, **k: _linking_context_subject_linker_inputs(self, *a, **k),
+        # go/keep-sorted end
+    )
+    self = struct(
+        actual = info,
+        meta = meta,
+    )
+    return public
+
+def _linking_context_subject_linker_inputs(self):
+    """Returns the linker inputs.
+
+    Args:
+        self: implicitly added
+
+    Returns:
+        [`CollectionSubject`] of the linker inputs.
+    """
+    return subjects.collection(
+        self.actual.linker_inputs.to_list(),
+        meta = self.meta.derive("linker_inputs()"),
+        container_name = "linker_inputs",
+        element_plural_name = "linker input values",
+    )
diff --git a/tests/toolchains/workspace_template/python_version_test.py b/tests/config_settings/BUILD.bazel
similarity index 69%
rename from tests/toolchains/workspace_template/python_version_test.py
rename to tests/config_settings/BUILD.bazel
index c82611c..212e3f7 100644
--- a/tests/toolchains/workspace_template/python_version_test.py
+++ b/tests/config_settings/BUILD.bazel
@@ -12,15 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
-import platform
-import unittest
+load(":construct_config_settings_tests.bzl", "construct_config_settings_test_suite")
 
-
-class TestPythonVersion(unittest.TestCase):
-    def test_match_toolchain(self):
-        self.assertEqual(platform.python_version(), os.getenv("PYTHON_VERSION"))
-
-
-if __name__ == "__main__":
-    unittest.main()
+construct_config_settings_test_suite(
+    name = "construct_config_settings_tests",
+)
diff --git a/tests/config_settings/construct_config_settings_tests.bzl b/tests/config_settings/construct_config_settings_tests.bzl
new file mode 100644
index 0000000..b1b2e06
--- /dev/null
+++ b/tests/config_settings/construct_config_settings_tests.bzl
@@ -0,0 +1,211 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for construction of Python version matching config settings."""
+
+load("@//python:versions.bzl", "MINOR_MAPPING")
+load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("@rules_testing//lib:truth.bzl", "subjects")
+load("@rules_testing//lib:util.bzl", rt_util = "util")
+load("//python/config_settings:config_settings.bzl", "is_python_config_setting")
+
+_tests = []
+
+def _subject_impl(ctx):
+    _ = ctx  # @unused
+    return [DefaultInfo()]
+
+_subject = rule(
+    implementation = _subject_impl,
+    attrs = {
+        "match_cpu": attr.string(),
+        "match_micro": attr.string(),
+        "match_minor": attr.string(),
+        "match_os": attr.string(),
+        "match_os_cpu": attr.string(),
+        "no_match": attr.string(),
+        "no_match_micro": attr.string(),
+    },
+)
+
+def _test_minor_version_matching(name):
+    minor_matches = {
+        # Having it here ensures that we can mix and match config settings defined in
+        # the repo and elsewhere
+        str(Label("//python/config_settings:is_python_3.11")): "matched-3.11",
+        "//conditions:default": "matched-default",
+    }
+    minor_cpu_matches = {
+        str(Label(":is_python_3.11_aarch64")): "matched-3.11-aarch64",
+        str(Label(":is_python_3.11_ppc")): "matched-3.11-ppc",
+        str(Label(":is_python_3.11_s390x")): "matched-3.11-s390x",
+        str(Label(":is_python_3.11_x86_64")): "matched-3.11-x86_64",
+    }
+    minor_os_matches = {
+        str(Label(":is_python_3.11_linux")): "matched-3.11-linux",
+        str(Label(":is_python_3.11_osx")): "matched-3.11-osx",
+        str(Label(":is_python_3.11_windows")): "matched-3.11-windows",
+    }
+    minor_os_cpu_matches = {
+        str(Label(":is_python_3.11_linux_aarch64")): "matched-3.11-linux-aarch64",
+        str(Label(":is_python_3.11_linux_ppc")): "matched-3.11-linux-ppc",
+        str(Label(":is_python_3.11_linux_s390x")): "matched-3.11-linux-s390x",
+        str(Label(":is_python_3.11_linux_x86_64")): "matched-3.11-linux-x86_64",
+        str(Label(":is_python_3.11_osx_aarch64")): "matched-3.11-osx-aarch64",
+        str(Label(":is_python_3.11_osx_x86_64")): "matched-3.11-osx-x86_64",
+        str(Label(":is_python_3.11_windows_x86_64")): "matched-3.11-windows-x86_64",
+    }
+
+    rt_util.helper_target(
+        _subject,
+        name = name + "_subject",
+        match_minor = select(minor_matches),
+        match_cpu = select(minor_matches | minor_cpu_matches),
+        match_os = select(minor_matches | minor_os_matches),
+        match_os_cpu = select(minor_matches | minor_cpu_matches | minor_os_matches | minor_os_cpu_matches),
+        no_match = select({
+            "//python/config_settings:is_python_3.12": "matched-3.12",
+            "//conditions:default": "matched-default",
+        }),
+    )
+
+    analysis_test(
+        name = name,
+        target = name + "_subject",
+        impl = _test_minor_version_matching_impl,
+        config_settings = {
+            str(Label("//python/config_settings:python_version")): "3.11.1",
+            "//command_line_option:platforms": str(Label("//tests/config_settings:linux_aarch64")),
+        },
+    )
+
+def _test_minor_version_matching_impl(env, target):
+    target = env.expect.that_target(target)
+    target.attr("match_cpu", factory = subjects.str).equals(
+        "matched-3.11-aarch64",
+    )
+    target.attr("match_minor", factory = subjects.str).equals(
+        "matched-3.11",
+    )
+    target.attr("match_os", factory = subjects.str).equals(
+        "matched-3.11-linux",
+    )
+    target.attr("match_os_cpu", factory = subjects.str).equals(
+        "matched-3.11-linux-aarch64",
+    )
+    target.attr("no_match", factory = subjects.str).equals(
+        "matched-default",
+    )
+
+_tests.append(_test_minor_version_matching)
+
+def _test_latest_micro_version_matching(name):
+    rt_util.helper_target(
+        _subject,
+        name = name + "_subject",
+        match_minor = select({
+            "//python/config_settings:is_python_3.12": "matched-3.12",
+            "//conditions:default": "matched-default",
+        }),
+        match_micro = select({
+            "//python/config_settings:is_python_" + MINOR_MAPPING["3.12"]: "matched-3.12",
+            "//conditions:default": "matched-default",
+        }),
+        no_match_micro = select({
+            "//python/config_settings:is_python_3.12.0": "matched-3.12",
+            "//conditions:default": "matched-default",
+        }),
+        no_match = select({
+            "//python/config_settings:is_python_" + MINOR_MAPPING["3.11"]: "matched-3.11",
+            "//conditions:default": "matched-default",
+        }),
+    )
+
+    analysis_test(
+        name = name,
+        target = name + "_subject",
+        impl = _test_latest_micro_version_matching_impl,
+        config_settings = {
+            str(Label("//python/config_settings:python_version")): "3.12",
+        },
+    )
+
+def _test_latest_micro_version_matching_impl(env, target):
+    target = env.expect.that_target(target)
+    target.attr("match_minor", factory = subjects.str).equals(
+        "matched-3.12",
+    )
+    target.attr("match_micro", factory = subjects.str).equals(
+        "matched-3.12",
+    )
+    target.attr("no_match_micro", factory = subjects.str).equals(
+        "matched-default",
+    )
+    target.attr("no_match", factory = subjects.str).equals(
+        "matched-default",
+    )
+
+_tests.append(_test_latest_micro_version_matching)
+
+def construct_config_settings_test_suite(name):  # buildifier: disable=function-docstring
+    # We have CI runners running on a great deal of the platforms from the list below,
+    # hence use all of them within tests.
+    for os in ["linux", "osx", "windows"]:
+        is_python_config_setting(
+            name = "is_python_3.11_" + os,
+            constraint_values = [
+                "@platforms//os:" + os,
+            ],
+            python_version = "3.11",
+        )
+
+    for cpu in ["s390x", "ppc", "x86_64", "aarch64"]:
+        is_python_config_setting(
+            name = "is_python_3.11_" + cpu,
+            constraint_values = [
+                "@platforms//cpu:" + cpu,
+            ],
+            python_version = "3.11",
+        )
+
+    for (os, cpu) in [
+        ("linux", "aarch64"),
+        ("linux", "ppc"),
+        ("linux", "s390x"),
+        ("linux", "x86_64"),
+        ("osx", "aarch64"),
+        ("osx", "x86_64"),
+        ("windows", "x86_64"),
+    ]:
+        is_python_config_setting(
+            name = "is_python_3.11_{}_{}".format(os, cpu),
+            constraint_values = [
+                "@platforms//cpu:" + cpu,
+                "@platforms//os:" + os,
+            ],
+            python_version = "3.11",
+        )
+
+    test_suite(
+        name = name,
+        tests = _tests,
+    )
+
+    native.platform(
+        name = "linux_aarch64",
+        constraint_values = [
+            "@platforms//os:linux",
+            "@platforms//cpu:aarch64",
+        ],
+    )
diff --git a/tests/config_settings/transition/multi_version_tests.bzl b/tests/config_settings/transition/multi_version_tests.bzl
index 32f7209..6659da5 100644
--- a/tests/config_settings/transition/multi_version_tests.bzl
+++ b/tests/config_settings/transition/multi_version_tests.bzl
@@ -15,9 +15,19 @@
 
 load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
 load("@rules_testing//lib:test_suite.bzl", "test_suite")
-load("@rules_testing//lib:util.bzl", rt_util = "util")
-load("//python:versions.bzl", "TOOL_VERSIONS")
+load("@rules_testing//lib:util.bzl", "TestingAspectInfo", rt_util = "util")
+load("//python:py_info.bzl", "PyInfo")
 load("//python/config_settings:transition.bzl", py_binary_transitioned = "py_binary", py_test_transitioned = "py_test")
+load("//python/private:reexports.bzl", "BuiltinPyInfo")  # buildifier: disable=bzl-visibility
+load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")  # buildifier: disable=bzl-visibility
+
+# NOTE @aignas 2024-06-04: we are using here something that is registered in the MODULE.Bazel
+# and if you find tests failing, it could be because of the toolchain resolution issues here.
+#
+# If the toolchain is not resolved then you will have a weird message telling
+# you that your transition target does not have a PyRuntime provider, which is
+# caused by there not being a toolchain detected for the target.
+_PYTHON_VERSION = "3.11"
 
 _tests = []
 
@@ -26,7 +36,7 @@
         py_test_transitioned,
         name = name + "_subject",
         srcs = [name + "_subject.py"],
-        python_version = TOOL_VERSIONS.keys()[0],
+        python_version = _PYTHON_VERSION,
     )
 
     analysis_test(
@@ -37,7 +47,8 @@
 
 def _test_py_test_with_transition_impl(env, target):
     # Nothing to assert; we just want to make sure it builds
-    _ = env, target  # @unused
+    env.expect.that_target(target).has_provider(PyInfo)
+    env.expect.that_target(target).has_provider(BuiltinPyInfo)
 
 _tests.append(_test_py_test_with_transition)
 
@@ -46,7 +57,7 @@
         py_binary_transitioned,
         name = name + "_subject",
         srcs = [name + "_subject.py"],
-        python_version = TOOL_VERSIONS.keys()[0],
+        python_version = _PYTHON_VERSION,
     )
 
     analysis_test(
@@ -57,10 +68,85 @@
 
 def _test_py_binary_with_transition_impl(env, target):
     # Nothing to assert; we just want to make sure it builds
-    _ = env, target  # @unused
+    env.expect.that_target(target).has_provider(PyInfo)
+    env.expect.that_target(target).has_provider(BuiltinPyInfo)
 
 _tests.append(_test_py_binary_with_transition)
 
+def _setup_py_binary_windows(name, *, impl, build_python_zip):
+    rt_util.helper_target(
+        py_binary_transitioned,
+        name = name + "_subject",
+        srcs = [name + "_subject.py"],
+        python_version = _PYTHON_VERSION,
+    )
+
+    analysis_test(
+        name = name,
+        target = name + "_subject",
+        impl = impl,
+        config_settings = {
+            "//command_line_option:build_python_zip": build_python_zip,
+            "//command_line_option:extra_toolchains": "//tests/cc:all",
+            "//command_line_option:platforms": str(Label("//tests/support:windows_x86_64")),
+        },
+    )
+
+def _test_py_binary_windows_build_python_zip_false(name):
+    _setup_py_binary_windows(
+        name,
+        build_python_zip = "false",
+        impl = _test_py_binary_windows_build_python_zip_false_impl,
+    )
+
+def _test_py_binary_windows_build_python_zip_false_impl(env, target):
+    default_outputs = env.expect.that_target(target).default_outputs()
+    if IS_BAZEL_7_OR_HIGHER:
+        # TODO: These outputs aren't correct. The outputs shouldn't
+        # have the "_" prefix on them (those are coming from the underlying
+        # wrapped binary).
+        env.expect.that_target(target).default_outputs().contains_exactly([
+            "{package}/_{test_name}_subject",
+            "{package}/_{test_name}_subject.exe",
+            "{package}/{test_name}_subject",
+            "{package}/{test_name}_subject.py",
+        ])
+    else:
+        inner_exe = target[TestingAspectInfo].attrs.target[DefaultInfo].files_to_run.executable
+        default_outputs.contains_at_least([
+            inner_exe.short_path,
+        ])
+
+_tests.append(_test_py_binary_windows_build_python_zip_false)
+
+def _test_py_binary_windows_build_python_zip_true(name):
+    _setup_py_binary_windows(
+        name,
+        build_python_zip = "true",
+        impl = _test_py_binary_windows_build_python_zip_true_impl,
+    )
+
+def _test_py_binary_windows_build_python_zip_true_impl(env, target):
+    default_outputs = env.expect.that_target(target).default_outputs()
+    if IS_BAZEL_7_OR_HIGHER:
+        # TODO: These outputs aren't correct. The outputs shouldn't
+        # have the "_" prefix on them (those are coming from the underlying
+        # wrapped binary).
+        default_outputs.contains_exactly([
+            "{package}/_{test_name}_subject.exe",
+            "{package}/_{test_name}_subject.zip",
+            "{package}/{test_name}_subject.py",
+            "{package}/{test_name}_subject.zip",
+        ])
+    else:
+        inner_exe = target[TestingAspectInfo].attrs.target[DefaultInfo].files_to_run.executable
+        default_outputs.contains_at_least([
+            "{package}/{test_name}_subject.zip",
+            inner_exe.short_path,
+        ])
+
+_tests.append(_test_py_binary_windows_build_python_zip_true)
+
 def multi_version_test_suite(name):
     test_suite(
         name = name,
diff --git a/tests/entry_points/BUILD.bazel b/tests/entry_points/BUILD.bazel
index 7a22d3c..c877462 100644
--- a/tests/entry_points/BUILD.bazel
+++ b/tests/entry_points/BUILD.bazel
@@ -28,12 +28,19 @@
 
 py_console_script_binary_in_a_macro(
     name = "twine",
-    pkg = "@publish_deps_twine//:pkg",
+    pkg = "@rules_python_publish_deps//twine",
+)
+
+py_console_script_binary_in_a_macro(
+    name = "twine_pkg",
+    pkg = "@rules_python_publish_deps//twine:pkg",
+    script = "twine",
 )
 
 build_test(
     name = "build_entry_point",
     targets = [
         ":twine",
+        ":twine_pkg",
     ],
 )
diff --git a/tests/entry_points/simple_macro.bzl b/tests/entry_points/simple_macro.bzl
index 4764a3f..c56f2e1 100644
--- a/tests/entry_points/simple_macro.bzl
+++ b/tests/entry_points/simple_macro.bzl
@@ -18,14 +18,16 @@
 
 load("//python/entry_points:py_console_script_binary.bzl", "py_console_script_binary")
 
-def py_console_script_binary_in_a_macro(name, pkg):
+def py_console_script_binary_in_a_macro(name, pkg, **kwargs):
     """A simple macro to see that we can use our macro in a macro.
 
     Args:
         name, str: the name of the target
         pkg, str: the pkg target
+        **kwargs, Any: extra kwargs passed through.
     """
     py_console_script_binary(
         name = name,
         pkg = Label(pkg),
+        **kwargs
     )
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/tests/envsubst/BUILD.bazel
similarity index 67%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to tests/envsubst/BUILD.bazel
index bbdfb4c..ec99705 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/tests/envsubst/BUILD.bazel
@@ -1,10 +1,10 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#     http://www.apache.org/licenses/LICENSE-2.0
+#    http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,3 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+"""Tests for envsubsts."""
+
+load(":envsubst_tests.bzl", "envsubst_test_suite")
+
+envsubst_test_suite(name = "envsubst_tests")
diff --git a/tests/envsubst/envsubst_tests.bzl b/tests/envsubst/envsubst_tests.bzl
new file mode 100644
index 0000000..dd5e706
--- /dev/null
+++ b/tests/envsubst/envsubst_tests.bzl
@@ -0,0 +1,126 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test for py_wheel."""
+
+load("@rules_testing//lib:analysis_test.bzl", "test_suite")
+load("//python/private:envsubst.bzl", "envsubst")  # buildifier: disable=bzl-visibility
+
+_basic_tests = []
+
+def _test_envsubst_braceless(env):
+    env.expect.that_str(
+        envsubst("--retries=$PIP_RETRIES", ["PIP_RETRIES"], {"PIP_RETRIES": "5"}.get),
+    ).equals("--retries=5")
+
+    env.expect.that_str(
+        envsubst("--retries=$PIP_RETRIES", [], {"PIP_RETRIES": "5"}.get),
+    ).equals("--retries=$PIP_RETRIES")
+
+    env.expect.that_str(
+        envsubst("--retries=$PIP_RETRIES", ["PIP_RETRIES"], {}.get),
+    ).equals("--retries=")
+
+_basic_tests.append(_test_envsubst_braceless)
+
+def _test_envsubst_braces_without_default(env):
+    env.expect.that_str(
+        envsubst("--retries=${PIP_RETRIES}", ["PIP_RETRIES"], {"PIP_RETRIES": "5"}.get),
+    ).equals("--retries=5")
+
+    env.expect.that_str(
+        envsubst("--retries=${PIP_RETRIES}", [], {"PIP_RETRIES": "5"}.get),
+    ).equals("--retries=${PIP_RETRIES}")
+
+    env.expect.that_str(
+        envsubst("--retries=${PIP_RETRIES}", ["PIP_RETRIES"], {}.get),
+    ).equals("--retries=")
+
+_basic_tests.append(_test_envsubst_braces_without_default)
+
+def _test_envsubst_braces_with_default(env):
+    env.expect.that_str(
+        envsubst("--retries=${PIP_RETRIES:-6}", ["PIP_RETRIES"], {"PIP_RETRIES": "5"}.get),
+    ).equals("--retries=5")
+
+    env.expect.that_str(
+        envsubst("--retries=${PIP_RETRIES:-6}", [], {"PIP_RETRIES": "5"}.get),
+    ).equals("--retries=${PIP_RETRIES:-6}")
+
+    env.expect.that_str(
+        envsubst("--retries=${PIP_RETRIES:-6}", ["PIP_RETRIES"], {}.get),
+    ).equals("--retries=6")
+
+_basic_tests.append(_test_envsubst_braces_with_default)
+
+def _test_envsubst_nested_both_vars(env):
+    env.expect.that_str(
+        envsubst(
+            "${HOME:-/home/$USER}",
+            ["HOME", "USER"],
+            {"HOME": "/home/testuser", "USER": "mockuser"}.get,
+        ),
+    ).equals("/home/testuser")
+
+_basic_tests.append(_test_envsubst_nested_both_vars)
+
+def _test_envsubst_nested_outer_var(env):
+    env.expect.that_str(
+        envsubst(
+            "${HOME:-/home/$USER}",
+            ["HOME"],
+            {"HOME": "/home/testuser", "USER": "mockuser"}.get,
+        ),
+    ).equals("/home/testuser")
+
+_basic_tests.append(_test_envsubst_nested_outer_var)
+
+def _test_envsubst_nested_no_vars(env):
+    env.expect.that_str(
+        envsubst(
+            "${HOME:-/home/$USER}",
+            [],
+            {"HOME": "/home/testuser", "USER": "mockuser"}.get,
+        ),
+    ).equals("${HOME:-/home/$USER}")
+
+    env.expect.that_str(
+        envsubst("${HOME:-/home/$USER}", ["HOME", "USER"], {}.get),
+    ).equals("/home/")
+
+_basic_tests.append(_test_envsubst_nested_no_vars)
+
+def _test_envsubst_nested_braces_inner_var(env):
+    env.expect.that_str(
+        envsubst(
+            "Home directory is ${HOME:-/home/$USER}.",
+            ["HOME", "USER"],
+            {"USER": "mockuser"}.get,
+        ),
+    ).equals("Home directory is /home/mockuser.")
+
+    env.expect.that_str(
+        envsubst(
+            "Home directory is ${HOME:-/home/$USER}.",
+            ["USER"],
+            {"USER": "mockuser"}.get,
+        ),
+    ).equals("Home directory is ${HOME:-/home/mockuser}.")
+
+_basic_tests.append(_test_envsubst_nested_braces_inner_var)
+
+def envsubst_test_suite(name):
+    test_suite(
+        name = name,
+        basic_tests = _basic_tests,
+    )
diff --git a/tests/exec_toolchain_matching/BUILD.bazel b/tests/exec_toolchain_matching/BUILD.bazel
new file mode 100644
index 0000000..ce04bf7
--- /dev/null
+++ b/tests/exec_toolchain_matching/BUILD.bazel
@@ -0,0 +1,76 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//python/private:py_exec_tools_toolchain.bzl", "py_exec_tools_toolchain")  # buildifier: disable=bzl-visibility
+load(
+    ":exec_toolchain_matching_tests.bzl",
+    "define_py_runtime",
+    "exec_toolchain_matching_test_suite",
+)
+
+exec_toolchain_matching_test_suite(
+    name = "exec_toolchain_matching_tests",
+)
+
+define_py_runtime(
+    name = "target_3.12_linux",
+    interpreter_path = "/linux/python3.12",
+    interpreter_version_info = {
+        "major": "3",
+        "minor": "12",
+    },
+)
+
+define_py_runtime(
+    name = "target_3.12_mac",
+    interpreter_path = "/mac/python3.12",
+    interpreter_version_info = {
+        "major": "3",
+        "minor": "12",
+    },
+)
+
+define_py_runtime(
+    name = "target_3.12_any",
+    interpreter_path = "/any/python3.11",
+    interpreter_version_info = {
+        "major": "3",
+        "minor": "11",
+    },
+)
+
+define_py_runtime(
+    name = "target_default",
+    interpreter_path = "/should_not_match_anything",
+    interpreter_version_info = {
+        "major": "-1",
+        "minor": "-1",
+    },
+)
+
+# While these have the same definition, we register duplicates with different
+# names because it makes understanding toolchain resolution easier. Toolchain
+# resolution debug output shows the implementation name, not the toolchain()
+# call that was being evaluated.
+py_exec_tools_toolchain(
+    name = "exec_3.12",
+)
+
+py_exec_tools_toolchain(
+    name = "exec_3.11_any",
+)
+
+py_exec_tools_toolchain(
+    name = "exec_default",
+)
diff --git a/tests/exec_toolchain_matching/exec_toolchain_matching_tests.bzl b/tests/exec_toolchain_matching/exec_toolchain_matching_tests.bzl
new file mode 100644
index 0000000..f6eae5a
--- /dev/null
+++ b/tests/exec_toolchain_matching/exec_toolchain_matching_tests.bzl
@@ -0,0 +1,152 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Starlark tests for PyRuntimeInfo provider."""
+
+load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("@rules_testing//lib:util.bzl", rt_util = "util")
+load("//python:py_runtime.bzl", "py_runtime")
+load("//python:py_runtime_pair.bzl", "py_runtime_pair")
+load("//python/private:toolchain_types.bzl", "EXEC_TOOLS_TOOLCHAIN_TYPE", "TARGET_TOOLCHAIN_TYPE")  # buildifier: disable=bzl-visibility
+load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")  # buildifier: disable=bzl-visibility
+load("//tests/support:support.bzl", "LINUX", "MAC", "PYTHON_VERSION")
+
+_LookupInfo = provider()  # buildifier: disable=provider-params
+
+def _lookup_toolchains_impl(ctx):
+    return [_LookupInfo(
+        target = ctx.toolchains[TARGET_TOOLCHAIN_TYPE],
+        exec = ctx.toolchains[EXEC_TOOLS_TOOLCHAIN_TYPE],
+    )]
+
+_lookup_toolchains = rule(
+    implementation = _lookup_toolchains_impl,
+    toolchains = [TARGET_TOOLCHAIN_TYPE, EXEC_TOOLS_TOOLCHAIN_TYPE],
+    attrs = {"_use_auto_exec_groups": attr.bool(default = True)},
+)
+
+def define_py_runtime(name, **kwargs):
+    py_runtime(
+        name = name + "_runtime",
+        **kwargs
+    )
+    py_runtime_pair(
+        name = name,
+        py3_runtime = name + "_runtime",
+    )
+
+_tests = []
+
+def _test_exec_matches_target_python_version(name):
+    rt_util.helper_target(
+        _lookup_toolchains,
+        name = name + "_subject",
+    )
+
+    # ==== Target toolchains =====
+
+    # This is never matched. It comes first to ensure the python version
+    # constraint is being respected.
+    native.toolchain(
+        name = "00_target_3.11_any",
+        toolchain_type = TARGET_TOOLCHAIN_TYPE,
+        toolchain = ":target_3.12_linux",
+        target_settings = ["//python/config_settings:is_python_3.11"],
+    )
+
+    # This is matched by the top-level target being built in what --platforms
+    # specifies.
+    native.toolchain(
+        name = "10_target_3.12_linux",
+        toolchain_type = TARGET_TOOLCHAIN_TYPE,
+        toolchain = ":target_3.12_linux",
+        target_compatible_with = ["@platforms//os:linux"],
+        target_settings = ["//python/config_settings:is_python_3.12"],
+    )
+
+    # This is matched when the exec config switches to the mac platform and
+    # then looks for a Python runtime for itself.
+    native.toolchain(
+        name = "15_target_3.12_mac",
+        toolchain_type = TARGET_TOOLCHAIN_TYPE,
+        toolchain = ":target_3.12_mac",
+        target_compatible_with = ["@platforms//os:macos"],
+        target_settings = ["//python/config_settings:is_python_3.12"],
+    )
+
+    # This is never matched. It's just here so that toolchains from the
+    # environment don't match.
+    native.toolchain(
+        name = "99_target_default",
+        toolchain_type = TARGET_TOOLCHAIN_TYPE,
+        toolchain = ":target_default",
+    )
+
+    # ==== Exec tools toolchains =====
+
+    # Register a 3.11 before to ensure it the python version is respected
+    native.toolchain(
+        name = "00_exec_3.11_any",
+        toolchain_type = EXEC_TOOLS_TOOLCHAIN_TYPE,
+        toolchain = ":exec_3.11_any",
+        target_settings = ["//python/config_settings:is_python_3.11"],
+    )
+
+    # Note that mac comes first. This is so it matches instead of linux
+    # We only ever look for mac ones, so no need to register others.
+    native.toolchain(
+        name = "10_exec_3.12_mac",
+        toolchain_type = EXEC_TOOLS_TOOLCHAIN_TYPE,
+        toolchain = ":exec_3.12",
+        exec_compatible_with = ["@platforms//os:macos"],
+        target_settings = ["//python/config_settings:is_python_3.12"],
+    )
+
+    # This is never matched. It's just here so that toolchains from the
+    # environment don't match.
+    native.toolchain(
+        name = "99_exec_default",
+        toolchain_type = EXEC_TOOLS_TOOLCHAIN_TYPE,
+        toolchain = ":exec_default",
+    )
+
+    analysis_test(
+        name = name,
+        target = name + "_subject",
+        impl = _test_exec_matches_target_python_version_impl,
+        config_settings = {
+            "//command_line_option:extra_execution_platforms": [str(MAC)],
+            "//command_line_option:extra_toolchains": ["//tests/exec_toolchain_matching:all"],
+            "//command_line_option:platforms": [str(LINUX)],
+            PYTHON_VERSION: "3.12",
+        },
+    )
+
+_tests.append(_test_exec_matches_target_python_version)
+
+def _test_exec_matches_target_python_version_impl(env, target):
+    target_runtime = target[_LookupInfo].target.py3_runtime
+    exec_runtime = target[_LookupInfo].exec.exec_tools.exec_interpreter[platform_common.ToolchainInfo].py3_runtime
+
+    env.expect.that_str(target_runtime.interpreter_path).equals("/linux/python3.12")
+    env.expect.that_str(exec_runtime.interpreter_path).equals("/mac/python3.12")
+
+    if IS_BAZEL_7_OR_HIGHER:
+        target_version = target_runtime.interpreter_version_info
+        exec_version = exec_runtime.interpreter_version_info
+
+        env.expect.that_bool(target_version == exec_version)
+
+def exec_toolchain_matching_test_suite(name):
+    test_suite(name = name, tests = _tests)
diff --git a/tests/integration/BUILD.bazel b/tests/integration/BUILD.bazel
index 0e793cd..8724b25 100644
--- a/tests/integration/BUILD.bazel
+++ b/tests/integration/BUILD.bazel
@@ -12,7 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+load("@bazel_binaries//:defs.bzl", "bazel_binaries")
 load("@rules_bazel_integration_test//bazel_integration_test:defs.bzl", "default_test_runner")
+load("//python:py_library.bzl", "py_library")
 load(":integration_test.bzl", "rules_python_integration_test")
 
 licenses(["notice"])
@@ -64,15 +66,6 @@
 # TODO: add compile_pip_requirements_test_from_external_repo
 
 rules_python_integration_test(
-    name = "pip_repository_entry_points_workspace_test",
-    timeout = "long",
-    bzlmod = False,
-    # The dependencies needed for this test are not cross-platform: https://github.com/bazelbuild/rules_python/issues/260
-    tags = ["fix-windows"],
-    workspace_path = "pip_repository_entry_points",
-)
-
-rules_python_integration_test(
     name = "compile_pip_requirements_test",
 )
 
@@ -93,6 +86,25 @@
 )
 
 rules_python_integration_test(
+    name = "local_toolchains_test",
+    bazel_versions = [
+        version
+        for version in bazel_binaries.versions.all
+        if not version.startswith("6.")
+    ],
+)
+
+rules_python_integration_test(
+    name = "pip_parse_test",
+)
+
+rules_python_integration_test(
+    name = "pip_parse_workspace_test",
+    bzlmod = False,
+    workspace_path = "pip_parse",
+)
+
+rules_python_integration_test(
     name = "py_cc_toolchain_registered_test",
 )
 
@@ -101,3 +113,14 @@
     bzlmod = False,
     workspace_path = "py_cc_toolchain_registered",
 )
+
+rules_python_integration_test(
+    name = "custom_commands_test",
+    py_main = "custom_commands_test.py",
+)
+
+py_library(
+    name = "runner_lib",
+    srcs = ["runner.py"],
+    imports = ["../../"],
+)
diff --git a/tests/integration/README.md b/tests/integration/README.md
new file mode 100644
index 0000000..e36e363
--- /dev/null
+++ b/tests/integration/README.md
@@ -0,0 +1,21 @@
+# Bazel-in-Bazel integration tests
+
+The tests in this directory are Bazel-in-Bazel integration tests. These are
+necessary because our CI has a limit of 80 jobs, and our test matrix uses most
+of those for more important end-to-end tests of user-facing examples.
+
+The tests in here are more for testing internal aspects of the rules that aren't
+easily tested as tests run by Bazel itself (basically anything that happens
+prior to the analysis phase).
+
+## Adding a new directory
+
+When adding a new diretory, a couple files need to be updated to tell the outer
+Bazel to ignore the nested workspace.
+
+* Add the directory to the `--deleted_packages` flag. Run `pre-commit` and it
+  will do this for you. This also allows the integration test to see the
+  nested workspace files correctly.
+* Update `.bazelignore` and add `tests/integration/<directory>/bazel-<name>`.
+  This prevents Bazel from following infinite symlinks and freezing.
+* Add a `rules_python_integration_test` target to the BUILD file.
diff --git a/tests/integration/compile_pip_requirements/.bazelrc b/tests/integration/compile_pip_requirements/.bazelrc
index f23315a..8a42e64 100644
--- a/tests/integration/compile_pip_requirements/.bazelrc
+++ b/tests/integration/compile_pip_requirements/.bazelrc
@@ -2,4 +2,3 @@
 
 # Windows requires these for multi-python support:
 build --enable_runfiles
-startup --windows_enable_symlinks
diff --git a/tests/integration/compile_pip_requirements/WORKSPACE b/tests/integration/compile_pip_requirements/WORKSPACE
index 5a2204b..0eeab20 100644
--- a/tests/integration/compile_pip_requirements/WORKSPACE
+++ b/tests/integration/compile_pip_requirements/WORKSPACE
@@ -7,10 +7,6 @@
 
 py_repositories()
 
-load("@rules_python//python/pip_install:repositories.bzl", "pip_install_dependencies")
-
-pip_install_dependencies()
-
 python_register_toolchains(
     name = "python39",
     python_version = "3.9",
diff --git a/tests/integration/compile_pip_requirements_test_from_external_repo/WORKSPACE b/tests/integration/compile_pip_requirements_test_from_external_repo/WORKSPACE
index 9a08c28..48caeb4 100644
--- a/tests/integration/compile_pip_requirements_test_from_external_repo/WORKSPACE
+++ b/tests/integration/compile_pip_requirements_test_from_external_repo/WORKSPACE
@@ -7,10 +7,6 @@
 
 py_repositories()
 
-load("@rules_python//python/pip_install:repositories.bzl", "pip_install_dependencies")
-
-pip_install_dependencies()
-
 python_register_toolchains(
     name = "python39",
     python_version = "3.9",
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/tests/integration/custom_commands/BUILD.bazel
similarity index 68%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to tests/integration/custom_commands/BUILD.bazel
index bbdfb4c..b0fafff 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/tests/integration/custom_commands/BUILD.bazel
@@ -1,10 +1,10 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#     http://www.apache.org/licenses/LICENSE-2.0
+#    http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,3 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+load("@rules_python//python:py_binary.bzl", "py_binary")
+
+py_binary(
+    name = "bin",
+    srcs = ["bin.py"],
+)
diff --git a/tests/private/whl_target_platforms/BUILD.bazel b/tests/integration/custom_commands/MODULE.bazel
similarity index 64%
copy from tests/private/whl_target_platforms/BUILD.bazel
copy to tests/integration/custom_commands/MODULE.bazel
index fec25af..5bea812 100644
--- a/tests/private/whl_target_platforms/BUILD.bazel
+++ b/tests/integration/custom_commands/MODULE.bazel
@@ -1,10 +1,10 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#     http://www.apache.org/licenses/LICENSE-2.0
+#    http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,6 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-load(":whl_target_platforms_tests.bzl", "whl_target_platforms_test_suite")
+module(name = "module_under_test")
 
-whl_target_platforms_test_suite(name = "whl_target_platforms_tests")
+bazel_dep(name = "rules_python", version = "0.0.0")
+local_path_override(
+    module_name = "rules_python",
+    path = "../../..",
+)
diff --git a/tests/integration/custom_commands/WORKSPACE b/tests/integration/custom_commands/WORKSPACE
new file mode 100644
index 0000000..de90854
--- /dev/null
+++ b/tests/integration/custom_commands/WORKSPACE
@@ -0,0 +1,13 @@
+local_repository(
+    name = "rules_python",
+    path = "../../..",
+)
+
+load("@rules_python//python:repositories.bzl", "py_repositories", "python_register_toolchains")
+
+py_repositories()
+
+python_register_toolchains(
+    name = "python_3_11",
+    python_version = "3.11",
+)
diff --git a/tests/pip_install/BUILD.bazel b/tests/integration/custom_commands/WORKSPACE.bzlmod
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to tests/integration/custom_commands/WORKSPACE.bzlmod
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/tests/integration/custom_commands/bin.py
similarity index 77%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to tests/integration/custom_commands/bin.py
index bbdfb4c..62487b5 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/tests/integration/custom_commands/bin.py
@@ -1,10 +1,10 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#     http://www.apache.org/licenses/LICENSE-2.0
+#    http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,3 +12,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+print("Hello, world")
+print(__file__)
diff --git a/tests/integration/custom_commands_test.py b/tests/integration/custom_commands_test.py
new file mode 100644
index 0000000..f78ee46
--- /dev/null
+++ b/tests/integration/custom_commands_test.py
@@ -0,0 +1,31 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import unittest
+
+from tests.integration import runner
+
+
+class CustomCommandsTest(runner.TestCase):
+    # Regression test for https://github.com/bazelbuild/rules_python/issues/1840
+    def test_run_build_python_zip_false(self):
+        result = self.run_bazel("run", "--build_python_zip=false", "//:bin")
+        self.assert_result_matches(result, "bazel-out")
+
+
+if __name__ == "__main__":
+    # Enabling this makes the runner log subprocesses as the test goes along.
+    # logging.basicConfig(level = "INFO")
+    unittest.main()
diff --git a/tests/integration/ignore_root_user_error/.bazelrc b/tests/integration/ignore_root_user_error/.bazelrc
index f23315a..27d7d13 100644
--- a/tests/integration/ignore_root_user_error/.bazelrc
+++ b/tests/integration/ignore_root_user_error/.bazelrc
@@ -1,5 +1,6 @@
+common --action_env=RULES_PYTHON_BZLMOD_DEBUG=1
+common --lockfile_mode=off
 test --test_output=errors
 
 # Windows requires these for multi-python support:
 build --enable_runfiles
-startup --windows_enable_symlinks
diff --git a/tests/integration/ignore_root_user_error/BUILD.bazel b/tests/integration/ignore_root_user_error/BUILD.bazel
index f907624..6e3b7b9 100644
--- a/tests/integration/ignore_root_user_error/BUILD.bazel
+++ b/tests/integration/ignore_root_user_error/BUILD.bazel
@@ -1,7 +1,32 @@
-load("@rules_python//python:defs.bzl", "py_test")
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@rules_python//python:py_test.bzl", "py_test")
+load("@rules_python//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")  # buildifier: disable=bzl-visibility
 
 py_test(
     name = "foo_test",
     srcs = ["foo_test.py"],
     visibility = ["//visibility:public"],
 )
+
+py_test(
+    name = "bzlmod_test",
+    srcs = ["bzlmod_test.py"],
+    data = [
+        "@rules_python//python/runfiles",
+        "@rules_python_bzlmod_debug//:debug_info.json",
+    ],
+    target_compatible_with = [] if BZLMOD_ENABLED else ["@platforms//:incompatible"],
+)
diff --git a/tests/integration/ignore_root_user_error/MODULE.bazel b/tests/integration/ignore_root_user_error/MODULE.bazel
new file mode 100644
index 0000000..15c37c4
--- /dev/null
+++ b/tests/integration/ignore_root_user_error/MODULE.bazel
@@ -0,0 +1,20 @@
+module(name = "ignore_root_user_error")
+
+bazel_dep(name = "rules_python", version = "0.0.0")
+local_path_override(
+    module_name = "rules_python",
+    path = "../../..",
+)
+
+bazel_dep(name = "submodule")
+local_path_override(
+    module_name = "submodule",
+    path = "submodule",
+)
+
+python = use_extension("@rules_python//python/extensions:python.bzl", "python")
+python.toolchain(
+    ignore_root_user_error = True,
+    python_version = "3.11",
+)
+use_repo(python, "rules_python_bzlmod_debug")
diff --git a/tests/integration/ignore_root_user_error/bzlmod_test.py b/tests/integration/ignore_root_user_error/bzlmod_test.py
new file mode 100644
index 0000000..98715b3
--- /dev/null
+++ b/tests/integration/ignore_root_user_error/bzlmod_test.py
@@ -0,0 +1,38 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import pathlib
+import unittest
+
+from python.runfiles import runfiles
+
+
+class BzlmodTest(unittest.TestCase):
+    def test_toolchains(self):
+        rf = runfiles.Create()
+        debug_path = pathlib.Path(
+            rf.Rlocation("rules_python_bzlmod_debug/debug_info.json")
+        )
+        debug_info = json.loads(debug_path.read_bytes())
+
+        expected = [
+            {"ignore_root_user_error": True, "name": "python_3_11"},
+            {"ignore_root_user_error": True, "name": "python_3_10"},
+        ]
+        self.assertCountEqual(debug_info["toolchains_registered"], expected)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/pip_install/BUILD.bazel b/tests/integration/ignore_root_user_error/submodule/BUILD.bazel
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to tests/integration/ignore_root_user_error/submodule/BUILD.bazel
diff --git a/tests/integration/ignore_root_user_error/submodule/MODULE.bazel b/tests/integration/ignore_root_user_error/submodule/MODULE.bazel
new file mode 100644
index 0000000..f128709
--- /dev/null
+++ b/tests/integration/ignore_root_user_error/submodule/MODULE.bazel
@@ -0,0 +1,9 @@
+module(name = "submodule")
+
+bazel_dep(name = "rules_python", version = "0.0.0")
+
+python = use_extension("@rules_python//python/extensions:python.bzl", "python")
+python.toolchain(
+    ignore_root_user_error = False,
+    python_version = "3.10",
+)
diff --git a/tests/pip_install/BUILD.bazel b/tests/integration/ignore_root_user_error/submodule/WORKSPACE
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to tests/integration/ignore_root_user_error/submodule/WORKSPACE
diff --git a/tests/integration/integration_test.bzl b/tests/integration/integration_test.bzl
index 16d6a5a..8606f66 100644
--- a/tests/integration/integration_test.bzl
+++ b/tests/integration/integration_test.bzl
@@ -19,6 +19,7 @@
     "bazel_integration_tests",
     "integration_test_utils",
 )
+load("//python:py_test.bzl", "py_test")
 
 def rules_python_integration_test(
         name,
@@ -26,6 +27,8 @@
         bzlmod = True,
         gazelle_plugin = False,
         tags = None,
+        py_main = None,
+        bazel_versions = None,
         **kwargs):
     """Runs a bazel-in-bazel integration test.
 
@@ -37,10 +40,26 @@
             disable bzlmod.
         gazelle_plugin: Whether the test uses the gazelle plugin.
         tags: Test tags.
+        py_main: Optional `.py` file to run tests using. When specified, a
+            python based test runner is used, and this source file is the main
+            entry point and responsible for executing tests.
+        bazel_versions: `list[str] | None`, the bazel versions to test. I
+            not specified, defaults to all configured bazel versions.
         **kwargs: Passed to the upstream `bazel_integration_tests` rule.
     """
     workspace_path = workspace_path or name.removesuffix("_test")
-    if bzlmod:
+    if py_main:
+        test_runner = name + "_py_runner"
+        py_test(
+            name = test_runner,
+            srcs = [py_main],
+            main = py_main,
+            deps = [":runner_lib"],
+            # Hide from ... patterns; should only be run as part
+            # of the bazel integration test
+            tags = ["manual"],
+        )
+    elif bzlmod:
         if gazelle_plugin:
             test_runner = "//tests/integration:test_runner_gazelle_plugin"
         else:
@@ -74,7 +93,7 @@
         name = name,
         workspace_path = workspace_path,
         test_runner = test_runner,
-        bazel_versions = bazel_binaries.versions.all,
+        bazel_versions = bazel_versions or bazel_binaries.versions.all,
         workspace_files = [name + "_workspace_files"],
         # Override the tags so that the `manual` tag isn't applied.
         tags = (tags or []) + [
diff --git a/tests/integration/local_toolchains/.bazelrc b/tests/integration/local_toolchains/.bazelrc
new file mode 100644
index 0000000..551df40
--- /dev/null
+++ b/tests/integration/local_toolchains/.bazelrc
@@ -0,0 +1,5 @@
+common --action_env=RULES_PYTHON_BZLMOD_DEBUG=1
+common --lockfile_mode=off
+test --test_output=errors
+# Windows requires these for multi-python support:
+build --enable_runfiles
diff --git a/python/pip_install/tools/dependency_resolver/__init__.py b/tests/integration/local_toolchains/BUILD.bazel
similarity index 76%
copy from python/pip_install/tools/dependency_resolver/__init__.py
copy to tests/integration/local_toolchains/BUILD.bazel
index bbdfb4c..6fbf548 100644
--- a/python/pip_install/tools/dependency_resolver/__init__.py
+++ b/tests/integration/local_toolchains/BUILD.bazel
@@ -1,4 +1,4 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
+# Copyright 2024 The Bazel Authors. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+load("@rules_python//python:py_test.bzl", "py_test")
+
+py_test(
+    name = "test",
+    srcs = ["test.py"],
+)
diff --git a/tests/integration/local_toolchains/MODULE.bazel b/tests/integration/local_toolchains/MODULE.bazel
new file mode 100644
index 0000000..d4ef12e
--- /dev/null
+++ b/tests/integration/local_toolchains/MODULE.bazel
@@ -0,0 +1,40 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+module(name = "module_under_test")
+
+bazel_dep(name = "rules_python", version = "0.0.0")
+local_path_override(
+    module_name = "rules_python",
+    path = "../../..",
+)
+
+local_runtime_repo = use_repo_rule("@rules_python//python/private:local_runtime_repo.bzl", "local_runtime_repo")
+
+local_runtime_toolchains_repo = use_repo_rule("@rules_python//python/private:local_runtime_toolchains_repo.bzl", "local_runtime_toolchains_repo")
+
+local_runtime_repo(
+    name = "local_python3",
+    interpreter_path = "python3",
+    on_failure = "fail",
+)
+
+local_runtime_toolchains_repo(
+    name = "local_toolchains",
+    runtimes = ["local_python3"],
+)
+
+python = use_extension("@rules_python//python/extensions:python.bzl", "python")
+use_repo(python, "rules_python_bzlmod_debug")
+
+register_toolchains("@local_toolchains//:all")
diff --git a/tests/pip_install/BUILD.bazel b/tests/integration/local_toolchains/REPO.bazel
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to tests/integration/local_toolchains/REPO.bazel
diff --git a/tests/pip_install/BUILD.bazel b/tests/integration/local_toolchains/WORKSPACE
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to tests/integration/local_toolchains/WORKSPACE
diff --git a/tests/pip_install/BUILD.bazel b/tests/integration/local_toolchains/WORKSPACE.bzlmod
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to tests/integration/local_toolchains/WORKSPACE.bzlmod
diff --git a/tests/integration/local_toolchains/test.py b/tests/integration/local_toolchains/test.py
new file mode 100644
index 0000000..63771cf
--- /dev/null
+++ b/tests/integration/local_toolchains/test.py
@@ -0,0 +1,26 @@
+import shutil
+import subprocess
+import sys
+import unittest
+
+
+class LocalToolchainTest(unittest.TestCase):
+    maxDiff = None
+
+    def test_python_from_path_used(self):
+        shell_path = shutil.which("python3")
+
+        # We call the interpreter and print its executable because of
+        # things like pyenv: they install a shim that re-execs python.
+        # The shim is e.g. /home/user/.pyenv/shims/python3, which then
+        # runs e.g. /usr/bin/python3
+        expected = subprocess.check_output(
+            [shell_path, "-c", "import sys; print(sys.executable)"],
+            text=True,
+        )
+        expected = expected.strip()
+        self.assertEqual(expected, sys.executable)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/integration/pip_parse/.bazelrc b/tests/integration/pip_parse/.bazelrc
new file mode 100644
index 0000000..efeccbe
--- /dev/null
+++ b/tests/integration/pip_parse/.bazelrc
@@ -0,0 +1,7 @@
+# Bazel configuration flags
+
+build --enable_runfiles
+
+# https://docs.bazel.build/versions/main/best-practices.html#using-the-bazelrc-file
+try-import %workspace%/user.bazelrc
+
diff --git a/tests/integration/pip_parse/.gitignore b/tests/integration/pip_parse/.gitignore
new file mode 100644
index 0000000..ac51a05
--- /dev/null
+++ b/tests/integration/pip_parse/.gitignore
@@ -0,0 +1 @@
+bazel-*
diff --git a/tests/pip_install/BUILD.bazel b/tests/integration/pip_parse/BUILD.bazel
similarity index 100%
rename from tests/pip_install/BUILD.bazel
rename to tests/integration/pip_parse/BUILD.bazel
diff --git a/tests/integration/pip_parse/MODULE.bazel b/tests/integration/pip_parse/MODULE.bazel
new file mode 100644
index 0000000..4e5a2ca
--- /dev/null
+++ b/tests/integration/pip_parse/MODULE.bazel
@@ -0,0 +1,20 @@
+module(name = "compile_pip_requirements")
+
+bazel_dep(name = "rules_python", version = "0.0.0")
+local_path_override(
+    module_name = "rules_python",
+    path = "../../..",
+)
+
+python = use_extension("@rules_python//python/extensions:python.bzl", "python")
+python.toolchain(
+    python_version = "3.9",
+)
+
+pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
+pip.parse(
+    hub_name = "pip_empty",
+    python_version = "3.9",
+    requirements_lock = "//empty:requirements.txt",
+)
+use_repo(pip, "pip_empty")
diff --git a/tests/integration/pip_parse/README.md b/tests/integration/pip_parse/README.md
new file mode 100644
index 0000000..f45e54d
--- /dev/null
+++ b/tests/integration/pip_parse/README.md
@@ -0,0 +1,3 @@
+# pip_parse
+
+Tests that ensure pip_parse is working.
diff --git a/tests/integration/pip_parse/WORKSPACE b/tests/integration/pip_parse/WORKSPACE
new file mode 100644
index 0000000..db0cd0c
--- /dev/null
+++ b/tests/integration/pip_parse/WORKSPACE
@@ -0,0 +1,34 @@
+local_repository(
+    name = "rules_python",
+    path = "../../..",
+)
+
+load("@rules_python//python:repositories.bzl", "py_repositories", "python_register_toolchains")
+
+py_repositories()
+
+# This call is included in `py_repositories` and we are calling
+# `pip_install_dependencies` only to ensure that we are not breaking really old
+# code.
+#
+# TODO @aignas 2024-06-23: remove this before 1.0.0
+load("@rules_python//python/pip_install:repositories.bzl", "pip_install_dependencies")
+
+pip_install_dependencies()
+
+python_register_toolchains(
+    name = "python39",
+    python_version = "3.9",
+)
+
+load("@rules_python//python:pip.bzl", "pip_parse")
+
+pip_parse(
+    name = "pip_empty",
+    python_interpreter_target = "@python39_host//:python",
+    requirements_lock = "//empty:requirements.txt",
+)
+
+load("@pip_empty//:requirements.bzl", "install_deps")
+
+install_deps()
diff --git a/tests/pip_install/BUILD.bazel b/tests/integration/pip_parse/WORKSPACE.bzlmod
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to tests/integration/pip_parse/WORKSPACE.bzlmod
diff --git a/tests/integration/pip_parse/empty/BUILD.bazel b/tests/integration/pip_parse/empty/BUILD.bazel
new file mode 100644
index 0000000..a6780bc
--- /dev/null
+++ b/tests/integration/pip_parse/empty/BUILD.bazel
@@ -0,0 +1,12 @@
+load("@pip_empty//:requirements.bzl", "all_data_requirements", "all_requirements", "all_whl_requirements")
+load("@rules_python//python:py_test.bzl", "py_test")
+
+py_test(
+    name = "test_empty",
+    srcs = ["test_empty.py"],
+    env = {
+        "REQUIREMENTS": ",".join(all_requirements),
+        "REQUIREMENTS_DATA": ",".join(all_data_requirements),
+        "REQUIREMENTS_WHL": ",".join(all_whl_requirements),
+    },
+)
diff --git a/tests/integration/pip_parse/empty/README.md b/tests/integration/pip_parse/empty/README.md
new file mode 100644
index 0000000..2f4228e
--- /dev/null
+++ b/tests/integration/pip_parse/empty/README.md
@@ -0,0 +1,3 @@
+# empty
+
+A test that ensures that an empty requirements.txt does not break.
diff --git a/tests/pip_install/BUILD.bazel b/tests/integration/pip_parse/empty/requirements.txt
similarity index 100%
copy from tests/pip_install/BUILD.bazel
copy to tests/integration/pip_parse/empty/requirements.txt
diff --git a/tests/integration/pip_parse/empty/test_empty.py b/tests/integration/pip_parse/empty/test_empty.py
new file mode 100644
index 0000000..6b5af5a
--- /dev/null
+++ b/tests/integration/pip_parse/empty/test_empty.py
@@ -0,0 +1,13 @@
+import os
+import unittest
+
+
+class TestEmpty(unittest.TestCase):
+    def test_lists(self):
+        self.assertEqual("", os.environ["REQUIREMENTS"])
+        self.assertEqual("", os.environ["REQUIREMENTS_WHL"])
+        self.assertEqual("", os.environ["REQUIREMENTS_DATA"])
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/integration/pip_repository_entry_points/.bazelrc b/tests/integration/pip_repository_entry_points/.bazelrc
deleted file mode 100644
index 936806d..0000000
--- a/tests/integration/pip_repository_entry_points/.bazelrc
+++ /dev/null
@@ -1,11 +0,0 @@
-# Bazel configuration flags
-
-build --enable_runfiles
-startup --windows_enable_symlinks
-
-# https://docs.bazel.build/versions/main/best-practices.html#using-the-bazelrc-file
-try-import %workspace%/user.bazelrc
-
-# The requirements.bzl entry_point functions aren't supported under bzlmod.
-# They are replaced by py_console_script_binary, which already has tests
-build --noexperimental_enable_bzlmod
diff --git a/tests/integration/pip_repository_entry_points/.gitignore b/tests/integration/pip_repository_entry_points/.gitignore
deleted file mode 100644
index e5ae073..0000000
--- a/tests/integration/pip_repository_entry_points/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-# git ignore patterns
-
-/bazel-*
-user.bazelrc
diff --git a/tests/integration/pip_repository_entry_points/BUILD.bazel b/tests/integration/pip_repository_entry_points/BUILD.bazel
deleted file mode 100644
index c39b1f0..0000000
--- a/tests/integration/pip_repository_entry_points/BUILD.bazel
+++ /dev/null
@@ -1,32 +0,0 @@
-load("@pip//:requirements.bzl", "entry_point")
-load("@rules_python//python:defs.bzl", "py_test")
-load("@rules_python//python:pip.bzl", "compile_pip_requirements")
-
-# This rule adds a convenient way to update the requirements file.
-compile_pip_requirements(
-    name = "requirements",
-    src = "requirements.in",
-    requirements_windows = ":requirements_windows.txt",
-)
-
-pip_sphinx = entry_point(
-    pkg = "sphinx",
-    script = "sphinx-build",
-)
-
-pip_yamllint = entry_point("yamllint")
-
-py_test(
-    name = "pip_parse_entry_points_test",
-    srcs = ["pip_repository_entry_points_test.py"],
-    data = [
-        pip_sphinx,
-        pip_yamllint,
-    ],
-    env = {
-        "SPHINX_BUILD_ENTRY_POINT": "$(rootpath {})".format(pip_sphinx),
-        "YAMLLINT_ENTRY_POINT": "$(rootpath {})".format(pip_yamllint),
-    },
-    main = "pip_repository_entry_points_test.py",
-    deps = ["@rules_python//python/runfiles"],
-)
diff --git a/tests/integration/pip_repository_entry_points/WORKSPACE b/tests/integration/pip_repository_entry_points/WORKSPACE
deleted file mode 100644
index 0ae087b..0000000
--- a/tests/integration/pip_repository_entry_points/WORKSPACE
+++ /dev/null
@@ -1,32 +0,0 @@
-workspace(name = "pip_entry_points_example")
-
-local_repository(
-    name = "rules_python",
-    path = "../../..",
-)
-
-load("@rules_python//python:repositories.bzl", "py_repositories", "python_register_toolchains")
-
-py_repositories()
-
-# This toolchain is explicitly 3.10 while `rules_python` is 3.9 to act as
-# a regression test, ensuring 3.10 is functional
-python_register_toolchains(
-    name = "python310",
-    python_version = "3.10",
-)
-
-load("@python310//:defs.bzl", "interpreter")
-load("@rules_python//python:pip.bzl", "pip_parse")
-
-# For a more thorough example of `pip_parse`. See `@rules_python//examples/pip_parse`
-pip_parse(
-    name = "pip",
-    python_interpreter_target = interpreter,
-    requirements_lock = "//:requirements.txt",
-    requirements_windows = "//:requirements_windows.txt",
-)
-
-load("@pip//:requirements.bzl", "install_deps")
-
-install_deps()
diff --git a/tests/integration/pip_repository_entry_points/pip_repository_entry_points_test.py b/tests/integration/pip_repository_entry_points/pip_repository_entry_points_test.py
deleted file mode 100644
index 0375153..0000000
--- a/tests/integration/pip_repository_entry_points/pip_repository_entry_points_test.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-import os
-import subprocess
-import unittest
-from pathlib import Path
-
-from rules_python.python.runfiles import runfiles
-
-
-class PipRepositoryEntryPointsTest(unittest.TestCase):
-    maxDiff = None
-
-    def test_entry_point_void_return(self):
-        env = os.environ.get("YAMLLINT_ENTRY_POINT")
-        self.assertIsNotNone(env)
-
-        r = runfiles.Create()
-        entry_point = Path(r.Rlocation(str(Path(*Path(env).parts[1:]))))
-        self.assertTrue(entry_point.exists())
-
-        proc = subprocess.run(
-            [str(entry_point), "--version"],
-            check=True,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE,
-        )
-        self.assertEqual(proc.stdout.decode("utf-8").strip(), "yamllint 1.28.0")
-
-        # yamllint entry_point is of the form `def run(argv=None):`
-        with self.assertRaises(subprocess.CalledProcessError) as context:
-            subprocess.run(
-                [str(entry_point), "--option-does-not-exist"],
-                check=True,
-                stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE,
-            )
-        self.assertIn("returned non-zero exit status 2", str(context.exception))
-
-    def test_entry_point_int_return(self):
-        env = os.environ.get("SPHINX_BUILD_ENTRY_POINT")
-        self.assertIsNotNone(env)
-
-        r = runfiles.Create()
-        entry_point = Path(r.Rlocation(str(Path(*Path(env).parts[1:]))))
-        self.assertTrue(entry_point.exists())
-
-        proc = subprocess.run(
-            [str(entry_point), "--version"],
-            check=True,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE,
-        )
-        # sphinx-build uses args[0] for its name, only assert the version here
-        self.assertTrue(proc.stdout.decode("utf-8").strip().endswith("4.3.2"))
-
-        # sphinx-build entry_point is of the form `def main(argv: List[str] = sys.argv[1:]) -> int:`
-        with self.assertRaises(subprocess.CalledProcessError) as context:
-            subprocess.run(
-                [entry_point, "--option-does-not-exist"],
-                check=True,
-                stdout=subprocess.PIPE,
-                stderr=subprocess.PIPE,
-            )
-        self.assertIn("returned non-zero exit status 2", str(context.exception))
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/tests/integration/pip_repository_entry_points/requirements.in b/tests/integration/pip_repository_entry_points/requirements.in
deleted file mode 100644
index f5391eb..0000000
--- a/tests/integration/pip_repository_entry_points/requirements.in
+++ /dev/null
@@ -1,8 +0,0 @@
-sphinx==4.3.2
-yamllint>=1.28.0
-
-# Last available for Ubuntu python3.6
-setuptools==65.5.1
-
-certifi>=2023.7.22  # https://security.snyk.io/vuln/SNYK-PYTHON-CERTIFI-5805047
-
diff --git a/tests/integration/pip_repository_entry_points/requirements.txt b/tests/integration/pip_repository_entry_points/requirements.txt
deleted file mode 100644
index 1ef1f9d..0000000
--- a/tests/integration/pip_repository_entry_points/requirements.txt
+++ /dev/null
@@ -1,219 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.10
-# by the following command:
-#
-#    bazel run //:requirements.update
-#
-alabaster==0.7.12 \
-    --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \
-    --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02
-    # via sphinx
-babel==2.9.1 \
-    --hash=sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9 \
-    --hash=sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0
-    # via sphinx
-certifi==2023.7.22 \
-    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
-    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
-    # via
-    #   -r requirements.in
-    #   requests
-charset-normalizer==2.0.10 \
-    --hash=sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd \
-    --hash=sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455
-    # via requests
-docutils==0.17.1 \
-    --hash=sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125 \
-    --hash=sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61
-    # via sphinx
-idna==3.3 \
-    --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \
-    --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d
-    # via requests
-imagesize==1.3.0 \
-    --hash=sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c \
-    --hash=sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d
-    # via sphinx
-jinja2==3.0.3 \
-    --hash=sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8 \
-    --hash=sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7
-    # via sphinx
-markupsafe==2.0.1 \
-    --hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \
-    --hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \
-    --hash=sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b \
-    --hash=sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194 \
-    --hash=sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567 \
-    --hash=sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff \
-    --hash=sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724 \
-    --hash=sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74 \
-    --hash=sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646 \
-    --hash=sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35 \
-    --hash=sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6 \
-    --hash=sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a \
-    --hash=sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6 \
-    --hash=sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad \
-    --hash=sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26 \
-    --hash=sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38 \
-    --hash=sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac \
-    --hash=sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7 \
-    --hash=sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6 \
-    --hash=sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047 \
-    --hash=sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75 \
-    --hash=sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f \
-    --hash=sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b \
-    --hash=sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135 \
-    --hash=sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8 \
-    --hash=sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a \
-    --hash=sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a \
-    --hash=sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1 \
-    --hash=sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9 \
-    --hash=sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864 \
-    --hash=sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914 \
-    --hash=sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee \
-    --hash=sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f \
-    --hash=sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18 \
-    --hash=sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8 \
-    --hash=sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2 \
-    --hash=sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d \
-    --hash=sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b \
-    --hash=sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b \
-    --hash=sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86 \
-    --hash=sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6 \
-    --hash=sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f \
-    --hash=sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb \
-    --hash=sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833 \
-    --hash=sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28 \
-    --hash=sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e \
-    --hash=sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415 \
-    --hash=sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902 \
-    --hash=sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f \
-    --hash=sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d \
-    --hash=sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9 \
-    --hash=sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d \
-    --hash=sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145 \
-    --hash=sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066 \
-    --hash=sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c \
-    --hash=sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1 \
-    --hash=sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a \
-    --hash=sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207 \
-    --hash=sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f \
-    --hash=sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53 \
-    --hash=sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd \
-    --hash=sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134 \
-    --hash=sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85 \
-    --hash=sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9 \
-    --hash=sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5 \
-    --hash=sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94 \
-    --hash=sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509 \
-    --hash=sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51 \
-    --hash=sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872
-    # via jinja2
-packaging==21.3 \
-    --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \
-    --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522
-    # via sphinx
-pathspec==0.9.0 \
-    --hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \
-    --hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1
-    # via yamllint
-pygments==2.11.2 \
-    --hash=sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65 \
-    --hash=sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a
-    # via sphinx
-pyparsing==3.0.6 \
-    --hash=sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4 \
-    --hash=sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81
-    # via packaging
-pytz==2021.3 \
-    --hash=sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c \
-    --hash=sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326
-    # via babel
-pyyaml==6.0 \
-    --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
-    --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
-    --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
-    --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
-    --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
-    --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
-    --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
-    --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
-    --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
-    --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
-    --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
-    --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
-    --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
-    --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
-    --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
-    --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
-    --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
-    --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
-    --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
-    --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
-    --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
-    --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
-    --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
-    --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
-    --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
-    --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
-    --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
-    --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
-    --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
-    --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
-    --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
-    --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
-    --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
-    # via yamllint
-requests==2.27.1 \
-    --hash=sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61 \
-    --hash=sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d
-    # via sphinx
-snowballstemmer==2.2.0 \
-    --hash=sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1 \
-    --hash=sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a
-    # via sphinx
-sphinx==4.3.2 \
-    --hash=sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c \
-    --hash=sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851
-    # via -r requirements.in
-sphinxcontrib-applehelp==1.0.2 \
-    --hash=sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a \
-    --hash=sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58
-    # via sphinx
-sphinxcontrib-devhelp==1.0.2 \
-    --hash=sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e \
-    --hash=sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4
-    # via sphinx
-sphinxcontrib-htmlhelp==2.0.0 \
-    --hash=sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07 \
-    --hash=sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2
-    # via sphinx
-sphinxcontrib-jsmath==1.0.1 \
-    --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \
-    --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8
-    # via sphinx
-sphinxcontrib-qthelp==1.0.3 \
-    --hash=sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72 \
-    --hash=sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6
-    # via sphinx
-sphinxcontrib-serializinghtml==1.1.5 \
-    --hash=sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd \
-    --hash=sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952
-    # via sphinx
-urllib3==1.26.7 \
-    --hash=sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece \
-    --hash=sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844
-    # via requests
-yamllint==1.28.0 \
-    --hash=sha256:89bb5b5ac33b1ade059743cf227de73daa34d5e5a474b06a5e17fc16583b0cf2 \
-    --hash=sha256:9e3d8ddd16d0583214c5fdffe806c9344086721f107435f68bad990e5a88826b
-    # via -r requirements.in
-
-# The following packages are considered to be unsafe in a requirements file:
-setuptools==65.5.1 \
-    --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \
-    --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f
-    # via
-    #   -r requirements.in
-    #   sphinx
-    #   yamllint
diff --git a/tests/integration/pip_repository_entry_points/requirements_windows.txt b/tests/integration/pip_repository_entry_points/requirements_windows.txt
deleted file mode 100644
index aeff522..0000000
--- a/tests/integration/pip_repository_entry_points/requirements_windows.txt
+++ /dev/null
@@ -1,223 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.10
-# by the following command:
-#
-#    bazel run //:requirements.update
-#
-alabaster==0.7.12 \
-    --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \
-    --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02
-    # via sphinx
-babel==2.9.1 \
-    --hash=sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9 \
-    --hash=sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0
-    # via sphinx
-certifi==2023.7.22 \
-    --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
-    --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
-    # via
-    #   -r requirements.in
-    #   requests
-charset-normalizer==2.0.10 \
-    --hash=sha256:876d180e9d7432c5d1dfd4c5d26b72f099d503e8fcc0feb7532c9289be60fcbd \
-    --hash=sha256:cb957888737fc0bbcd78e3df769addb41fd1ff8cf950dc9e7ad7793f1bf44455
-    # via requests
-colorama==0.4.6 \
-    --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
-    --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
-    # via sphinx
-docutils==0.17.1 \
-    --hash=sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125 \
-    --hash=sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61
-    # via sphinx
-idna==3.3 \
-    --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \
-    --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d
-    # via requests
-imagesize==1.3.0 \
-    --hash=sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c \
-    --hash=sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d
-    # via sphinx
-jinja2==3.0.3 \
-    --hash=sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8 \
-    --hash=sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7
-    # via sphinx
-markupsafe==2.0.1 \
-    --hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \
-    --hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \
-    --hash=sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b \
-    --hash=sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194 \
-    --hash=sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567 \
-    --hash=sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff \
-    --hash=sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724 \
-    --hash=sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74 \
-    --hash=sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646 \
-    --hash=sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35 \
-    --hash=sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6 \
-    --hash=sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a \
-    --hash=sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6 \
-    --hash=sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad \
-    --hash=sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26 \
-    --hash=sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38 \
-    --hash=sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac \
-    --hash=sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7 \
-    --hash=sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6 \
-    --hash=sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047 \
-    --hash=sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75 \
-    --hash=sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f \
-    --hash=sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b \
-    --hash=sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135 \
-    --hash=sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8 \
-    --hash=sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a \
-    --hash=sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a \
-    --hash=sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1 \
-    --hash=sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9 \
-    --hash=sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864 \
-    --hash=sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914 \
-    --hash=sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee \
-    --hash=sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f \
-    --hash=sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18 \
-    --hash=sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8 \
-    --hash=sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2 \
-    --hash=sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d \
-    --hash=sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b \
-    --hash=sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b \
-    --hash=sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86 \
-    --hash=sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6 \
-    --hash=sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f \
-    --hash=sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb \
-    --hash=sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833 \
-    --hash=sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28 \
-    --hash=sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e \
-    --hash=sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415 \
-    --hash=sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902 \
-    --hash=sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f \
-    --hash=sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d \
-    --hash=sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9 \
-    --hash=sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d \
-    --hash=sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145 \
-    --hash=sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066 \
-    --hash=sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c \
-    --hash=sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1 \
-    --hash=sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a \
-    --hash=sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207 \
-    --hash=sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f \
-    --hash=sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53 \
-    --hash=sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd \
-    --hash=sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134 \
-    --hash=sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85 \
-    --hash=sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9 \
-    --hash=sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5 \
-    --hash=sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94 \
-    --hash=sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509 \
-    --hash=sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51 \
-    --hash=sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872
-    # via jinja2
-packaging==21.3 \
-    --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \
-    --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522
-    # via sphinx
-pathspec==0.9.0 \
-    --hash=sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a \
-    --hash=sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1
-    # via yamllint
-pygments==2.15.0 \
-    --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \
-    --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500
-    # via sphinx
-pyparsing==3.0.6 \
-    --hash=sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4 \
-    --hash=sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81
-    # via packaging
-pytz==2021.3 \
-    --hash=sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c \
-    --hash=sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326
-    # via babel
-pyyaml==6.0 \
-    --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \
-    --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \
-    --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \
-    --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \
-    --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \
-    --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \
-    --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \
-    --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \
-    --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \
-    --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \
-    --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \
-    --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \
-    --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \
-    --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \
-    --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \
-    --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \
-    --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \
-    --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \
-    --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \
-    --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \
-    --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \
-    --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \
-    --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \
-    --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \
-    --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \
-    --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \
-    --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \
-    --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \
-    --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \
-    --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \
-    --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \
-    --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \
-    --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5
-    # via yamllint
-requests==2.31.0 \
-    --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
-    --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
-    # via sphinx
-snowballstemmer==2.2.0 \
-    --hash=sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1 \
-    --hash=sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a
-    # via sphinx
-sphinx==4.3.2 \
-    --hash=sha256:0a8836751a68306b3fe97ecbe44db786f8479c3bf4b80e3a7f5c838657b4698c \
-    --hash=sha256:6a11ea5dd0bdb197f9c2abc2e0ce73e01340464feaece525e64036546d24c851
-    # via -r requirements.in
-sphinxcontrib-applehelp==1.0.2 \
-    --hash=sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a \
-    --hash=sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58
-    # via sphinx
-sphinxcontrib-devhelp==1.0.2 \
-    --hash=sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e \
-    --hash=sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4
-    # via sphinx
-sphinxcontrib-htmlhelp==2.0.0 \
-    --hash=sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07 \
-    --hash=sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2
-    # via sphinx
-sphinxcontrib-jsmath==1.0.1 \
-    --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \
-    --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8
-    # via sphinx
-sphinxcontrib-qthelp==1.0.3 \
-    --hash=sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72 \
-    --hash=sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6
-    # via sphinx
-sphinxcontrib-serializinghtml==1.1.5 \
-    --hash=sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd \
-    --hash=sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952
-    # via sphinx
-urllib3==1.26.18 \
-    --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
-    --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
-    # via requests
-yamllint==1.28.0 \
-    --hash=sha256:89bb5b5ac33b1ade059743cf227de73daa34d5e5a474b06a5e17fc16583b0cf2 \
-    --hash=sha256:9e3d8ddd16d0583214c5fdffe806c9344086721f107435f68bad990e5a88826b
-    # via -r requirements.in
-
-# The following packages are considered to be unsafe in a requirements file:
-setuptools==65.5.1 \
-    --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \
-    --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f
-    # via
-    #   -r requirements.in
-    #   sphinx
-    #   yamllint
diff --git a/tests/integration/runner.py b/tests/integration/runner.py
new file mode 100644
index 0000000..9414a86
--- /dev/null
+++ b/tests/integration/runner.py
@@ -0,0 +1,131 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import os
+import os.path
+import pathlib
+import re
+import shlex
+import subprocess
+import unittest
+
+_logger = logging.getLogger(__name__)
+
+class ExecuteError(Exception):
+    def __init__(self, result):
+        self.result = result
+    def __str__(self):
+        return self.result.describe()
+
+class ExecuteResult:
+    def __init__(
+        self,
+        args: list[str],
+        env: dict[str, str],
+        cwd: pathlib.Path,
+        proc_result: subprocess.CompletedProcess,
+    ):
+        self.args = args
+        self.env = env
+        self.cwd = cwd
+        self.exit_code = proc_result.returncode
+        self.stdout = proc_result.stdout
+        self.stderr = proc_result.stderr
+
+    def describe(self) -> str:
+        env_lines = [
+            "  " + shlex.quote(f"{key}={value}")
+            for key, value in sorted(self.env.items())
+        ]
+        env = " \\\n".join(env_lines)
+        args = shlex.join(self.args)
+        maybe_stdout_nl = "" if self.stdout.endswith("\n") else "\n"
+        maybe_stderr_nl = "" if self.stderr.endswith("\n") else "\n"
+        return f"""\
+COMMAND:
+cd {self.cwd} && \\
+env \\
+{env} \\
+  {args}
+RESULT: exit_code: {self.exit_code}
+===== STDOUT START =====
+{self.stdout}{maybe_stdout_nl}===== STDOUT END   =====
+===== STDERR START =====
+{self.stderr}{maybe_stderr_nl}===== STDERR END   =====
+"""
+
+
+class TestCase(unittest.TestCase):
+    def setUp(self):
+        super().setUp()
+        self.repo_root = pathlib.Path(os.environ["BIT_WORKSPACE_DIR"])
+        self.bazel = pathlib.Path(os.environ["BIT_BAZEL_BINARY"])
+        outer_test_tmpdir = pathlib.Path(os.environ["TEST_TMPDIR"])
+        self.test_tmp_dir = outer_test_tmpdir / "bit_test_tmp"
+        # Put the global tmp not under the test tmp to better match how a real
+        # execution has entirely different directories for these.
+        self.tmp_dir = outer_test_tmpdir / "bit_tmp"
+        self.bazel_env = {
+            "PATH": os.environ["PATH"],
+            "TEST_TMPDIR": str(self.test_tmp_dir),
+            "TMP": str(self.tmp_dir),
+            # For some reason, this is necessary for Bazel 6.4 to work.
+            # If not present, it can't find some bash helpers in @bazel_tools
+            "RUNFILES_DIR": os.environ["TEST_SRCDIR"]
+        }
+
+    def run_bazel(self, *args: str, check: bool = True) -> ExecuteResult:
+        """Run a bazel invocation.
+
+        Args:
+            *args: The args to pass to bazel; the leading `bazel` command is
+                added automatically
+            check: True if the execution must succeed, False if failure
+                should raise an error.
+        Returns:
+            An `ExecuteResult` from running Bazel
+        """
+        args = [str(self.bazel), *args]
+        env = self.bazel_env
+        _logger.info("executing: %s", shlex.join(args))
+        cwd = self.repo_root
+        proc_result = subprocess.run(
+            args=args,
+            text=True,
+            capture_output=True,
+            cwd=cwd,
+            env=env,
+            check=False,
+        )
+        exec_result = ExecuteResult(args, env, cwd, proc_result)
+        if check and exec_result.exit_code:
+            raise ExecuteError(exec_result)
+        else:
+            return exec_result
+
+    def assert_result_matches(self, result: ExecuteResult, regex: str) -> None:
+        """Assert stdout/stderr of an invocation matches a regex.
+
+        Args:
+            result: ExecuteResult from `run_bazel` whose stdout/stderr will
+                be checked.
+            regex: Pattern to match, using `re.search` semantics.
+        """
+        if not re.search(regex, result.stdout + result.stderr):
+            self.fail(
+                "Bazel output did not match expected pattern\n"
+                + f"expected pattern: {regex}\n"
+                + f"invocation details:\n{result.describe()}"
+            )
diff --git a/tests/multiple_inputs/BUILD.bazel b/tests/multiple_inputs/BUILD.bazel
new file mode 100644
index 0000000..3e3cab8
--- /dev/null
+++ b/tests/multiple_inputs/BUILD.bazel
@@ -0,0 +1,30 @@
+load("@rules_python//python:pip.bzl", "compile_pip_requirements")
+
+compile_pip_requirements(
+    name = "multiple_requirements_in",
+    srcs = [
+        "requirements_1.in",
+        "requirements_2.in",
+    ],
+    requirements_txt = "multiple_requirements_in.txt",
+)
+
+compile_pip_requirements(
+    name = "multiple_pyproject_toml",
+    srcs = [
+        "a/pyproject.toml",
+        "b/pyproject.toml",
+    ],
+    requirements_txt = "multiple_pyproject_toml.txt",
+)
+
+compile_pip_requirements(
+    name = "multiple_inputs",
+    srcs = [
+        "a/pyproject.toml",
+        "b/pyproject.toml",
+        "requirements_1.in",
+        "requirements_2.in",
+    ],
+    requirements_txt = "multiple_inputs.txt",
+)
diff --git a/tests/multiple_inputs/README.md b/tests/multiple_inputs/README.md
new file mode 100644
index 0000000..7b6bade
--- /dev/null
+++ b/tests/multiple_inputs/README.md
@@ -0,0 +1,3 @@
+# multiple_inputs
+
+Test that `compile_pip_requirements` works as intended when using more than one input file.
diff --git a/tests/multiple_inputs/a/pyproject.toml b/tests/multiple_inputs/a/pyproject.toml
new file mode 100644
index 0000000..91efec3
--- /dev/null
+++ b/tests/multiple_inputs/a/pyproject.toml
@@ -0,0 +1,5 @@
+[project]
+name = "multiple_inputs_1"
+version = "0.0.0"
+
+dependencies = ["urllib3"]
diff --git a/tests/multiple_inputs/b/pyproject.toml b/tests/multiple_inputs/b/pyproject.toml
new file mode 100644
index 0000000..a461f4e
--- /dev/null
+++ b/tests/multiple_inputs/b/pyproject.toml
@@ -0,0 +1,5 @@
+[project]
+name = "multiple_inputs_2"
+version = "0.0.0"
+
+dependencies = ["attrs"]
diff --git a/tests/multiple_inputs/multiple_inputs.txt b/tests/multiple_inputs/multiple_inputs.txt
new file mode 100644
index 0000000..a036c3f
--- /dev/null
+++ b/tests/multiple_inputs/multiple_inputs.txt
@@ -0,0 +1,18 @@
+#
+# This file is autogenerated by pip-compile with Python 3.11
+# by the following command:
+#
+#    bazel run //tests/multiple_inputs:multiple_inputs.update
+#
+attrs==23.1.0 \
+    --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
+    --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
+    # via
+    #   -r tests/multiple_inputs/requirements_2.in
+    #   multiple_inputs_2 (tests/multiple_inputs/b/pyproject.toml)
+urllib3==2.0.7 \
+    --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \
+    --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e
+    # via
+    #   -r tests/multiple_inputs/requirements_1.in
+    #   multiple_inputs_1 (tests/multiple_inputs/a/pyproject.toml)
diff --git a/tests/multiple_inputs/multiple_pyproject_toml.txt b/tests/multiple_inputs/multiple_pyproject_toml.txt
new file mode 100644
index 0000000..b8af28a
--- /dev/null
+++ b/tests/multiple_inputs/multiple_pyproject_toml.txt
@@ -0,0 +1,14 @@
+#
+# This file is autogenerated by pip-compile with Python 3.11
+# by the following command:
+#
+#    bazel run //tests/multiple_inputs:multiple_pyproject_toml.update
+#
+attrs==23.1.0 \
+    --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
+    --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
+    # via multiple_inputs_2 (tests/multiple_inputs/b/pyproject.toml)
+urllib3==2.0.7 \
+    --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \
+    --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e
+    # via multiple_inputs_1 (tests/multiple_inputs/a/pyproject.toml)
diff --git a/tests/multiple_inputs/multiple_requirements_in.txt b/tests/multiple_inputs/multiple_requirements_in.txt
new file mode 100644
index 0000000..63edfe9
--- /dev/null
+++ b/tests/multiple_inputs/multiple_requirements_in.txt
@@ -0,0 +1,14 @@
+#
+# This file is autogenerated by pip-compile with Python 3.11
+# by the following command:
+#
+#    bazel run //tests/multiple_inputs:multiple_requirements_in.update
+#
+attrs==23.1.0 \
+    --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \
+    --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015
+    # via -r tests/multiple_inputs/requirements_2.in
+urllib3==2.0.7 \
+    --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \
+    --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e
+    # via -r tests/multiple_inputs/requirements_1.in
diff --git a/tests/multiple_inputs/requirements_1.in b/tests/multiple_inputs/requirements_1.in
new file mode 100644
index 0000000..a42590b
--- /dev/null
+++ b/tests/multiple_inputs/requirements_1.in
@@ -0,0 +1 @@
+urllib3
diff --git a/tests/multiple_inputs/requirements_2.in b/tests/multiple_inputs/requirements_2.in
new file mode 100644
index 0000000..04cb102
--- /dev/null
+++ b/tests/multiple_inputs/requirements_2.in
@@ -0,0 +1 @@
+attrs
diff --git a/tests/pip_hub_repository/normalize_name/BUILD.bazel b/tests/normalize_name/BUILD.bazel
similarity index 100%
rename from tests/pip_hub_repository/normalize_name/BUILD.bazel
rename to tests/normalize_name/BUILD.bazel
diff --git a/tests/pip_hub_repository/normalize_name/normalize_name_tests.bzl b/tests/normalize_name/normalize_name_tests.bzl
similarity index 100%
rename from tests/pip_hub_repository/normalize_name/normalize_name_tests.bzl
rename to tests/normalize_name/normalize_name_tests.bzl
diff --git a/tests/pip_hub_repository/render_pkg_aliases/render_pkg_aliases_test.bzl b/tests/pip_hub_repository/render_pkg_aliases/render_pkg_aliases_test.bzl
deleted file mode 100644
index dff7cd0..0000000
--- a/tests/pip_hub_repository/render_pkg_aliases/render_pkg_aliases_test.bzl
+++ /dev/null
@@ -1,338 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""render_pkg_aliases tests"""
-
-load("@rules_testing//lib:test_suite.bzl", "test_suite")
-load("//python/private:render_pkg_aliases.bzl", "render_pkg_aliases")  # buildifier: disable=bzl-visibility
-
-_tests = []
-
-def _test_legacy_aliases(env):
-    actual = render_pkg_aliases(
-        bzl_packages = ["foo"],
-        repo_name = "pypi",
-    )
-
-    want = {
-        "foo/BUILD.bazel": """\
-package(default_visibility = ["//visibility:public"])
-
-alias(
-    name = "foo",
-    actual = ":pkg",
-)
-
-alias(
-    name = "pkg",
-    actual = "@pypi_foo//:pkg",
-)
-
-alias(
-    name = "whl",
-    actual = "@pypi_foo//:whl",
-)
-
-alias(
-    name = "data",
-    actual = "@pypi_foo//:data",
-)
-
-alias(
-    name = "dist_info",
-    actual = "@pypi_foo//:dist_info",
-)""",
-    }
-
-    env.expect.that_dict(actual).contains_exactly(want)
-
-_tests.append(_test_legacy_aliases)
-
-def _test_all_legacy_aliases_are_created(env):
-    actual = render_pkg_aliases(
-        bzl_packages = ["foo", "bar"],
-        repo_name = "pypi",
-    )
-
-    want_files = ["bar/BUILD.bazel", "foo/BUILD.bazel"]
-
-    env.expect.that_dict(actual).keys().contains_exactly(want_files)
-
-_tests.append(_test_all_legacy_aliases_are_created)
-
-def _test_bzlmod_aliases(env):
-    actual = render_pkg_aliases(
-        default_version = "3.2.3",
-        repo_name = "pypi",
-        rules_python = "rules_python",
-        whl_map = {
-            "bar-baz": ["3.2.3"],
-        },
-    )
-
-    want = {
-        "bar_baz/BUILD.bazel": """\
-package(default_visibility = ["//visibility:public"])
-
-alias(
-    name = "bar_baz",
-    actual = ":pkg",
-)
-
-alias(
-    name = "pkg",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:pkg",
-            "//conditions:default": "@pypi_32_bar_baz//:pkg",
-        },
-    ),
-)
-
-alias(
-    name = "whl",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:whl",
-            "//conditions:default": "@pypi_32_bar_baz//:whl",
-        },
-    ),
-)
-
-alias(
-    name = "data",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:data",
-            "//conditions:default": "@pypi_32_bar_baz//:data",
-        },
-    ),
-)
-
-alias(
-    name = "dist_info",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:dist_info",
-            "//conditions:default": "@pypi_32_bar_baz//:dist_info",
-        },
-    ),
-)""",
-    }
-
-    env.expect.that_dict(actual).contains_exactly(want)
-
-_tests.append(_test_bzlmod_aliases)
-
-def _test_bzlmod_aliases_with_no_default_version(env):
-    actual = render_pkg_aliases(
-        default_version = None,
-        repo_name = "pypi",
-        rules_python = "rules_python",
-        whl_map = {
-            "bar-baz": ["3.2.3", "3.1.3"],
-        },
-    )
-
-    want_key = "bar_baz/BUILD.bazel"
-    want_content = """\
-package(default_visibility = ["//visibility:public"])
-
-_NO_MATCH_ERROR = \"\"\"\\
-No matching wheel for current configuration's Python version.
-
-The current build configuration's Python version doesn't match any of the Python
-versions available for this wheel. This wheel supports the following Python versions:
-    3.1.3, 3.2.3
-
-As matched by the `@rules_python//python/config_settings:is_python_<version>`
-configuration settings.
-
-To determine the current configuration's Python version, run:
-    `bazel config <config id>` (shown further below)
-and look for
-    rules_python//python/config_settings:python_version
-
-If the value is missing, then the "default" Python version is being used,
-which has a "null" version value and will not match version constraints.
-\"\"\"
-
-alias(
-    name = "bar_baz",
-    actual = ":pkg",
-)
-
-alias(
-    name = "pkg",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.1.3": "@pypi_31_bar_baz//:pkg",
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:pkg",
-        },
-        no_match_error = _NO_MATCH_ERROR,
-    ),
-)
-
-alias(
-    name = "whl",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.1.3": "@pypi_31_bar_baz//:whl",
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:whl",
-        },
-        no_match_error = _NO_MATCH_ERROR,
-    ),
-)
-
-alias(
-    name = "data",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.1.3": "@pypi_31_bar_baz//:data",
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:data",
-        },
-        no_match_error = _NO_MATCH_ERROR,
-    ),
-)
-
-alias(
-    name = "dist_info",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.1.3": "@pypi_31_bar_baz//:dist_info",
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:dist_info",
-        },
-        no_match_error = _NO_MATCH_ERROR,
-    ),
-)"""
-
-    env.expect.that_collection(actual.keys()).contains_exactly([want_key])
-    env.expect.that_str(actual[want_key]).equals(want_content)
-
-_tests.append(_test_bzlmod_aliases_with_no_default_version)
-
-def _test_bzlmod_aliases_for_non_root_modules(env):
-    actual = render_pkg_aliases(
-        default_version = "3.2.4",
-        repo_name = "pypi",
-        rules_python = "rules_python",
-        whl_map = {
-            "bar-baz": ["3.2.3", "3.1.3"],
-        },
-    )
-
-    want_key = "bar_baz/BUILD.bazel"
-    want_content = """\
-package(default_visibility = ["//visibility:public"])
-
-_NO_MATCH_ERROR = \"\"\"\\
-No matching wheel for current configuration's Python version.
-
-The current build configuration's Python version doesn't match any of the Python
-versions available for this wheel. This wheel supports the following Python versions:
-    3.1.3, 3.2.3
-
-As matched by the `@rules_python//python/config_settings:is_python_<version>`
-configuration settings.
-
-To determine the current configuration's Python version, run:
-    `bazel config <config id>` (shown further below)
-and look for
-    rules_python//python/config_settings:python_version
-
-If the value is missing, then the "default" Python version is being used,
-which has a "null" version value and will not match version constraints.
-\"\"\"
-
-alias(
-    name = "bar_baz",
-    actual = ":pkg",
-)
-
-alias(
-    name = "pkg",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.1.3": "@pypi_31_bar_baz//:pkg",
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:pkg",
-        },
-        no_match_error = _NO_MATCH_ERROR,
-    ),
-)
-
-alias(
-    name = "whl",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.1.3": "@pypi_31_bar_baz//:whl",
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:whl",
-        },
-        no_match_error = _NO_MATCH_ERROR,
-    ),
-)
-
-alias(
-    name = "data",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.1.3": "@pypi_31_bar_baz//:data",
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:data",
-        },
-        no_match_error = _NO_MATCH_ERROR,
-    ),
-)
-
-alias(
-    name = "dist_info",
-    actual = select(
-        {
-            "@@rules_python//python/config_settings:is_python_3.1.3": "@pypi_31_bar_baz//:dist_info",
-            "@@rules_python//python/config_settings:is_python_3.2.3": "@pypi_32_bar_baz//:dist_info",
-        },
-        no_match_error = _NO_MATCH_ERROR,
-    ),
-)"""
-
-    env.expect.that_collection(actual.keys()).contains_exactly([want_key])
-    env.expect.that_str(actual[want_key]).equals(want_content)
-
-_tests.append(_test_bzlmod_aliases_for_non_root_modules)
-
-def _test_bzlmod_aliases_are_created_for_all_wheels(env):
-    actual = render_pkg_aliases(
-        default_version = "3.2.3",
-        repo_name = "pypi",
-        rules_python = "rules_python",
-        whl_map = {
-            "bar": ["3.1.2", "3.2.3"],
-            "foo": ["3.1.2", "3.2.3"],
-        },
-    )
-
-    want_files = [
-        "bar/BUILD.bazel",
-        "foo/BUILD.bazel",
-    ]
-
-    env.expect.that_dict(actual).keys().contains_exactly(want_files)
-
-_tests.append(_test_bzlmod_aliases_are_created_for_all_wheels)
-
-def render_pkg_aliases_test_suite(name):
-    """Create the test suite.
-
-    Args:
-        name: the name of the test suite
-    """
-    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/pip_install/group_library/BUILD.bazel b/tests/pip_install/group_library/BUILD.bazel
deleted file mode 100644
index 5a27e11..0000000
--- a/tests/pip_install/group_library/BUILD.bazel
+++ /dev/null
@@ -1,3 +0,0 @@
-load(":generate_build_bazel_tests.bzl", "generate_build_bazel_test_suite")
-
-generate_build_bazel_test_suite(name = "generate_build_bazel_tests")
diff --git a/tests/pip_install/group_library/generate_build_bazel_tests.bzl b/tests/pip_install/group_library/generate_build_bazel_tests.bzl
deleted file mode 100644
index e7d6b44..0000000
--- a/tests/pip_install/group_library/generate_build_bazel_tests.bzl
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-""
-
-load("@rules_testing//lib:test_suite.bzl", "test_suite")
-load("//python/pip_install/private:generate_group_library_build_bazel.bzl", "generate_group_library_build_bazel")  # buildifier: disable=bzl-visibility
-
-_tests = []
-
-def _test_simple(env):
-    want = """\
-load("@rules_python//python:defs.bzl", "py_library", "py_binary")
-
-
-## Group vbap
-
-filegroup(
-    name = "vbap_whl",
-    srcs = [],
-    data = ["@pypi_oletools//:_whl", "@pypi_pcodedmp//:_whl"],
-    visibility = ["@pypi_oletools//:__pkg__", "@pypi_pcodedmp//:__pkg__"],
-)
-
-py_library(
-    name = "vbap_pkg",
-    srcs = [],
-    deps = ["@pypi_oletools//:_pkg", "@pypi_pcodedmp//:_pkg"],
-    visibility = ["@pypi_oletools//:__pkg__", "@pypi_pcodedmp//:__pkg__"],
-)
-"""
-    actual = generate_group_library_build_bazel(
-        repo_prefix = "pypi_",
-        groups = {"vbap": ["oletools", "pcodedmp"]},
-    )
-    env.expect.that_str(actual).equals(want)
-
-_tests.append(_test_simple)
-
-def generate_build_bazel_test_suite(name):
-    """Create the test suite.
-
-    Args:
-        name: the name of the test suite
-    """
-    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/pip_install/whl_library/BUILD.bazel b/tests/pip_install/whl_library/BUILD.bazel
deleted file mode 100644
index 5a27e11..0000000
--- a/tests/pip_install/whl_library/BUILD.bazel
+++ /dev/null
@@ -1,3 +0,0 @@
-load(":generate_build_bazel_tests.bzl", "generate_build_bazel_test_suite")
-
-generate_build_bazel_test_suite(name = "generate_build_bazel_tests")
diff --git a/tests/pip_install/whl_library/generate_build_bazel_tests.bzl b/tests/pip_install/whl_library/generate_build_bazel_tests.bzl
deleted file mode 100644
index b89477f..0000000
--- a/tests/pip_install/whl_library/generate_build_bazel_tests.bzl
+++ /dev/null
@@ -1,397 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-""
-
-load("@rules_testing//lib:test_suite.bzl", "test_suite")
-load("//python/pip_install/private:generate_whl_library_build_bazel.bzl", "generate_whl_library_build_bazel")  # buildifier: disable=bzl-visibility
-
-_tests = []
-
-def _test_simple(env):
-    want = """\
-load("@rules_python//python:defs.bzl", "py_library", "py_binary")
-load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
-
-package(default_visibility = ["//visibility:public"])
-
-filegroup(
-    name = "dist_info",
-    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
-)
-
-filegroup(
-    name = "data",
-    srcs = glob(["data/**"], allow_empty = True),
-)
-
-filegroup(
-    name = "_whl",
-    srcs = ["foo.whl"],
-    data = [
-        "@pypi_bar_baz//:whl",
-        "@pypi_foo//:whl",
-    ] + select(
-        {
-            "@platforms//os:windows": ["@pypi_colorama//:whl"],
-            "//conditions:default": [],
-        },
-    ),
-    visibility = ["//visibility:private"],
-)
-
-py_library(
-    name = "_pkg",
-    srcs = glob(
-        ["site-packages/**/*.py"],
-        exclude=[],
-        # Empty sources are allowed to support wheels that don't have any
-        # pure-Python code, e.g. pymssql, which is written in Cython.
-        allow_empty = True,
-    ),
-    data = [] + glob(
-        ["site-packages/**/*"],
-        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD"],
-    ),
-    # This makes this directory a top-level in the python import
-    # search path for anything that depends on this.
-    imports = ["site-packages"],
-    deps = [
-        "@pypi_bar_baz//:pkg",
-        "@pypi_foo//:pkg",
-    ] + select(
-        {
-            "@platforms//os:windows": ["@pypi_colorama//:pkg"],
-            "//conditions:default": [],
-        },
-    ),
-    tags = ["tag1", "tag2"],
-    visibility = ["//visibility:private"],
-)
-
-alias(
-   name = "pkg",
-   actual = "_pkg",
-)
-
-alias(
-   name = "whl",
-   actual = "_whl",
-)
-"""
-    actual = generate_whl_library_build_bazel(
-        repo_prefix = "pypi_",
-        whl_name = "foo.whl",
-        dependencies = ["foo", "bar-baz"],
-        dependencies_by_platform = {"@platforms//os:windows": ["colorama"]},
-        data_exclude = [],
-        tags = ["tag1", "tag2"],
-        entry_points = {},
-        annotation = None,
-    )
-    env.expect.that_str(actual).equals(want)
-
-_tests.append(_test_simple)
-
-def _test_with_annotation(env):
-    want = """\
-load("@rules_python//python:defs.bzl", "py_library", "py_binary")
-load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
-
-package(default_visibility = ["//visibility:public"])
-
-filegroup(
-    name = "dist_info",
-    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
-)
-
-filegroup(
-    name = "data",
-    srcs = glob(["data/**"], allow_empty = True),
-)
-
-filegroup(
-    name = "_whl",
-    srcs = ["foo.whl"],
-    data = [
-        "@pypi_bar_baz//:whl",
-        "@pypi_foo//:whl",
-    ],
-    visibility = ["//visibility:private"],
-)
-
-py_library(
-    name = "_pkg",
-    srcs = glob(
-        ["site-packages/**/*.py"],
-        exclude=["srcs_exclude_all"],
-        # Empty sources are allowed to support wheels that don't have any
-        # pure-Python code, e.g. pymssql, which is written in Cython.
-        allow_empty = True,
-    ),
-    data = ["file_dest", "exec_dest"] + glob(
-        ["site-packages/**/*"],
-        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD", "data_exclude_all"],
-    ),
-    # This makes this directory a top-level in the python import
-    # search path for anything that depends on this.
-    imports = ["site-packages"],
-    deps = [
-        "@pypi_bar_baz//:pkg",
-        "@pypi_foo//:pkg",
-    ],
-    tags = ["tag1", "tag2"],
-    visibility = ["//visibility:private"],
-)
-
-alias(
-   name = "pkg",
-   actual = "_pkg",
-)
-
-alias(
-   name = "whl",
-   actual = "_whl",
-)
-
-copy_file(
-    name = "file_dest.copy",
-    src = "file_src",
-    out = "file_dest",
-    is_executable = False,
-)
-
-copy_file(
-    name = "exec_dest.copy",
-    src = "exec_src",
-    out = "exec_dest",
-    is_executable = True,
-)
-
-# SOMETHING SPECIAL AT THE END
-"""
-    actual = generate_whl_library_build_bazel(
-        repo_prefix = "pypi_",
-        whl_name = "foo.whl",
-        dependencies = ["foo", "bar-baz"],
-        dependencies_by_platform = {},
-        data_exclude = [],
-        tags = ["tag1", "tag2"],
-        entry_points = {},
-        annotation = struct(
-            copy_files = {"file_src": "file_dest"},
-            copy_executables = {"exec_src": "exec_dest"},
-            data = [],
-            data_exclude_glob = ["data_exclude_all"],
-            srcs_exclude_glob = ["srcs_exclude_all"],
-            additive_build_content = """# SOMETHING SPECIAL AT THE END""",
-        ),
-    )
-    env.expect.that_str(actual).equals(want)
-
-_tests.append(_test_with_annotation)
-
-def _test_with_entry_points(env):
-    want = """\
-load("@rules_python//python:defs.bzl", "py_library", "py_binary")
-load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
-
-package(default_visibility = ["//visibility:public"])
-
-filegroup(
-    name = "dist_info",
-    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
-)
-
-filegroup(
-    name = "data",
-    srcs = glob(["data/**"], allow_empty = True),
-)
-
-filegroup(
-    name = "_whl",
-    srcs = ["foo.whl"],
-    data = [
-        "@pypi_bar_baz//:whl",
-        "@pypi_foo//:whl",
-    ],
-    visibility = ["//visibility:private"],
-)
-
-py_library(
-    name = "_pkg",
-    srcs = glob(
-        ["site-packages/**/*.py"],
-        exclude=[],
-        # Empty sources are allowed to support wheels that don't have any
-        # pure-Python code, e.g. pymssql, which is written in Cython.
-        allow_empty = True,
-    ),
-    data = [] + glob(
-        ["site-packages/**/*"],
-        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD"],
-    ),
-    # This makes this directory a top-level in the python import
-    # search path for anything that depends on this.
-    imports = ["site-packages"],
-    deps = [
-        "@pypi_bar_baz//:pkg",
-        "@pypi_foo//:pkg",
-    ],
-    tags = ["tag1", "tag2"],
-    visibility = ["//visibility:private"],
-)
-
-alias(
-   name = "pkg",
-   actual = "_pkg",
-)
-
-alias(
-   name = "whl",
-   actual = "_whl",
-)
-
-py_binary(
-    name = "rules_python_wheel_entry_point_fizz",
-    srcs = ["buzz.py"],
-    # This makes this directory a top-level in the python import
-    # search path for anything that depends on this.
-    imports = ["."],
-    deps = [":pkg"],
-)
-"""
-    actual = generate_whl_library_build_bazel(
-        repo_prefix = "pypi_",
-        whl_name = "foo.whl",
-        dependencies = ["foo", "bar-baz"],
-        dependencies_by_platform = {},
-        data_exclude = [],
-        tags = ["tag1", "tag2"],
-        entry_points = {"fizz": "buzz.py"},
-        annotation = None,
-    )
-    env.expect.that_str(actual).equals(want)
-
-_tests.append(_test_with_entry_points)
-
-def _test_group_member(env):
-    want = """\
-load("@rules_python//python:defs.bzl", "py_library", "py_binary")
-load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
-
-package(default_visibility = ["//visibility:public"])
-
-filegroup(
-    name = "dist_info",
-    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
-)
-
-filegroup(
-    name = "data",
-    srcs = glob(["data/**"], allow_empty = True),
-)
-
-filegroup(
-    name = "_whl",
-    srcs = ["foo.whl"],
-    data = ["@pypi_bar_baz//:whl"] + select(
-        {
-            ":is_linux_x86_64": [
-                "@pypi_box//:whl",
-                "@pypi_box_amd64//:whl",
-            ],
-            "@platforms//os:linux": ["@pypi_box//:whl"],
-            "//conditions:default": [],
-        },
-    ),
-    visibility = ["@pypi__groups//:__pkg__"],
-)
-
-py_library(
-    name = "_pkg",
-    srcs = glob(
-        ["site-packages/**/*.py"],
-        exclude=[],
-        # Empty sources are allowed to support wheels that don't have any
-        # pure-Python code, e.g. pymssql, which is written in Cython.
-        allow_empty = True,
-    ),
-    data = [] + glob(
-        ["site-packages/**/*"],
-        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD"],
-    ),
-    # This makes this directory a top-level in the python import
-    # search path for anything that depends on this.
-    imports = ["site-packages"],
-    deps = ["@pypi_bar_baz//:pkg"] + select(
-        {
-            ":is_linux_x86_64": [
-                "@pypi_box//:pkg",
-                "@pypi_box_amd64//:pkg",
-            ],
-            "@platforms//os:linux": ["@pypi_box//:pkg"],
-            "//conditions:default": [],
-        },
-    ),
-    tags = [],
-    visibility = ["@pypi__groups//:__pkg__"],
-)
-
-alias(
-   name = "pkg",
-   actual = "@pypi__groups//:qux_pkg",
-)
-
-alias(
-   name = "whl",
-   actual = "@pypi__groups//:qux_whl",
-)
-
-config_setting(
-    name = "is_linux_x86_64",
-    constraint_values = [
-        "@platforms//cpu:x86_64",
-        "@platforms//os:linux",
-    ],
-    visibility = ["//visibility:private"],
-)
-"""
-    actual = generate_whl_library_build_bazel(
-        repo_prefix = "pypi_",
-        whl_name = "foo.whl",
-        dependencies = ["foo", "bar-baz", "qux"],
-        dependencies_by_platform = {
-            "linux_x86_64": ["box", "box-amd64"],
-            "windows_x86_64": ["fox"],
-            "@platforms//os:linux": ["box"],  # buildifier: disable=unsorted-dict-items
-        },
-        tags = [],
-        entry_points = {},
-        data_exclude = [],
-        annotation = None,
-        group_name = "qux",
-        group_deps = ["foo", "fox", "qux"],
-    )
-    env.expect.that_str(actual).equals(want)
-
-_tests.append(_test_group_member)
-
-def generate_build_bazel_test_suite(name):
-    """Create the test suite.
-
-    Args:
-        name: the name of the test suite
-    """
-    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/private/whl_target_platforms/whl_target_platforms_tests.bzl b/tests/private/whl_target_platforms/whl_target_platforms_tests.bzl
deleted file mode 100644
index 9ccff0e..0000000
--- a/tests/private/whl_target_platforms/whl_target_platforms_tests.bzl
+++ /dev/null
@@ -1,54 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-""
-
-load("@rules_testing//lib:test_suite.bzl", "test_suite")
-load("//python/private:whl_target_platforms.bzl", "whl_target_platforms")  # buildifier: disable=bzl-visibility
-
-_tests = []
-
-def _test_simple(env):
-    tests = {
-        "macosx_10_9_arm64": [
-            struct(os = "osx", cpu = "aarch64"),
-        ],
-        "macosx_10_9_universal2": [
-            struct(os = "osx", cpu = "x86_64"),
-            struct(os = "osx", cpu = "aarch64"),
-        ],
-        "manylinux1_i686.manylinux_2_17_i686": [
-            struct(os = "linux", cpu = "x86_32"),
-        ],
-        "musllinux_1_1_ppc64le": [
-            struct(os = "linux", cpu = "ppc"),
-        ],
-        "win_amd64": [
-            struct(os = "windows", cpu = "x86_64"),
-        ],
-    }
-
-    for give, want in tests.items():
-        got = whl_target_platforms(give)
-        env.expect.that_collection(got).contains_exactly(want)
-
-_tests.append(_test_simple)
-
-def whl_target_platforms_test_suite(name):
-    """Create the test suite.
-
-    Args:
-        name: the name of the test suite
-    """
-    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/py_cc_toolchain_info_subject.bzl b/tests/py_cc_toolchain_info_subject.bzl
index ab9d1b8..4d3647c 100644
--- a/tests/py_cc_toolchain_info_subject.bzl
+++ b/tests/py_cc_toolchain_info_subject.bzl
@@ -19,6 +19,7 @@
     # buildifier: disable=uninitialized
     public = struct(
         headers = lambda *a, **k: _py_cc_toolchain_info_subject_headers(self, *a, **k),
+        libs = lambda *a, **k: _py_cc_toolchain_info_subject_libs(self, *a, **k),
         python_version = lambda *a, **k: _py_cc_toolchain_info_subject_python_version(self, *a, **k),
         actual = info,
     )
@@ -34,6 +35,15 @@
         ),
     )
 
+def _py_cc_toolchain_info_subject_libs(self):
+    return subjects.struct(
+        self.actual.libs,
+        meta = self.meta.derive("libs()"),
+        attrs = dict(
+            providers_map = subjects.dict,
+        ),
+    )
+
 def _py_cc_toolchain_info_subject_python_version(self):
     return subjects.str(
         self.actual.python_version,
diff --git a/tests/py_runtime/py_runtime_tests.bzl b/tests/py_runtime/py_runtime_tests.bzl
index 9fa5e2a..b47923d 100644
--- a/tests/py_runtime/py_runtime_tests.bzl
+++ b/tests/py_runtime/py_runtime_tests.bzl
@@ -413,6 +413,121 @@
 
 _tests.append(_test_system_interpreter_must_be_absolute)
 
+def _interpreter_version_info_test(name, interpreter_version_info, impl, expect_failure = True):
+    if config.enable_pystar:
+        py_runtime_kwargs = {
+            "interpreter_version_info": interpreter_version_info,
+        }
+        attr_values = {}
+    else:
+        py_runtime_kwargs = {}
+        attr_values = _SKIP_TEST
+
+    rt_util.helper_target(
+        py_runtime,
+        name = name + "_subject",
+        python_version = "PY3",
+        interpreter_path = "/py",
+        **py_runtime_kwargs
+    )
+    analysis_test(
+        name = name,
+        target = name + "_subject",
+        impl = impl,
+        expect_failure = expect_failure,
+        attr_values = attr_values,
+    )
+
+def _test_interpreter_version_info_must_define_major_and_minor_only_major(name):
+    _interpreter_version_info_test(
+        name,
+        {
+            "major": "3",
+        },
+        lambda env, target: (
+            env.expect.that_target(target).failures().contains_predicate(
+                matching.str_matches("must have at least two keys, 'major' and 'minor'"),
+            )
+        ),
+    )
+
+_tests.append(_test_interpreter_version_info_must_define_major_and_minor_only_major)
+
+def _test_interpreter_version_info_must_define_major_and_minor_only_minor(name):
+    _interpreter_version_info_test(
+        name,
+        {
+            "minor": "3",
+        },
+        lambda env, target: (
+            env.expect.that_target(target).failures().contains_predicate(
+                matching.str_matches("must have at least two keys, 'major' and 'minor'"),
+            )
+        ),
+    )
+
+_tests.append(_test_interpreter_version_info_must_define_major_and_minor_only_minor)
+
+def _test_interpreter_version_info_no_extraneous_keys(name):
+    _interpreter_version_info_test(
+        name,
+        {
+            "major": "3",
+            "minor": "3",
+            "something": "foo",
+        },
+        lambda env, target: (
+            env.expect.that_target(target).failures().contains_predicate(
+                matching.str_matches("unexpected keys [\"something\"]"),
+            )
+        ),
+    )
+
+_tests.append(_test_interpreter_version_info_no_extraneous_keys)
+
+def _test_interpreter_version_info_sets_values_to_none_if_not_given(name):
+    _interpreter_version_info_test(
+        name,
+        {
+            "major": "3",
+            "micro": "10",
+            "minor": "3",
+        },
+        lambda env, target: (
+            env.expect.that_target(target).provider(
+                PyRuntimeInfo,
+                factory = py_runtime_info_subject,
+            ).interpreter_version_info().serial().equals(None)
+        ),
+        expect_failure = False,
+    )
+
+_tests.append(_test_interpreter_version_info_sets_values_to_none_if_not_given)
+
+def _test_interpreter_version_info_parses_values_to_struct(name):
+    _interpreter_version_info_test(
+        name,
+        {
+            "major": "3",
+            "micro": "10",
+            "minor": "6",
+            "releaselevel": "alpha",
+            "serial": "1",
+        },
+        impl = _test_interpreter_version_info_parses_values_to_struct_impl,
+        expect_failure = False,
+    )
+
+def _test_interpreter_version_info_parses_values_to_struct_impl(env, target):
+    version_info = env.expect.that_target(target).provider(PyRuntimeInfo, factory = py_runtime_info_subject).interpreter_version_info()
+    version_info.major().equals(3)
+    version_info.minor().equals(6)
+    version_info.micro().equals(10)
+    version_info.releaselevel().equals("alpha")
+    version_info.serial().equals(1)
+
+_tests.append(_test_interpreter_version_info_parses_values_to_struct)
+
 def py_runtime_test_suite(name):
     test_suite(
         name = name,
diff --git a/tests/py_runtime_info/BUILD.bazel b/tests/py_runtime_info/BUILD.bazel
new file mode 100644
index 0000000..c501d6d
--- /dev/null
+++ b/tests/py_runtime_info/BUILD.bazel
@@ -0,0 +1,5 @@
+load(":py_runtime_info_tests.bzl", "py_runtime_info_test_suite")
+
+py_runtime_info_test_suite(
+    name = "py_runtime_info_tests",
+)
diff --git a/tests/py_runtime_info/py_runtime_info_tests.bzl b/tests/py_runtime_info/py_runtime_info_tests.bzl
new file mode 100644
index 0000000..9acf541
--- /dev/null
+++ b/tests/py_runtime_info/py_runtime_info_tests.bzl
@@ -0,0 +1,65 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Starlark tests for PyRuntimeInfo provider."""
+
+load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python:py_runtime_info.bzl", "PyRuntimeInfo")
+load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")  # buildifier: disable=bzl-visibility
+
+def _create_py_runtime_info_without_interpreter_version_info_impl(ctx):
+    kwargs = {}
+    if IS_BAZEL_7_OR_HIGHER:
+        kwargs["bootstrap_template"] = ctx.attr.bootstrap_template
+
+    return [PyRuntimeInfo(
+        interpreter = ctx.file.interpreter,
+        files = depset(ctx.files.files),
+        python_version = "PY3",
+        **kwargs
+    )]
+
+_create_py_runtime_info_without_interpreter_version_info = rule(
+    implementation = _create_py_runtime_info_without_interpreter_version_info_impl,
+    attrs = {
+        "bootstrap_template": attr.label(allow_single_file = True, default = "bootstrap.txt"),
+        "files": attr.label_list(allow_files = True, default = ["data.txt"]),
+        "interpreter": attr.label(allow_single_file = True, default = "interpreter.sh"),
+        "python_version": attr.string(default = "PY3"),
+    },
+)
+
+_tests = []
+
+def _test_can_create_py_runtime_info_without_interpreter_version_info(name):
+    _create_py_runtime_info_without_interpreter_version_info(
+        name = name + "_subject",
+    )
+    analysis_test(
+        name = name,
+        target = name + "_subject",
+        impl = _test_can_create_py_runtime_info_without_interpreter_version_info_impl,
+    )
+
+def _test_can_create_py_runtime_info_without_interpreter_version_info_impl(env, target):
+    # If we get this for, construction succeeded, so nothing to check
+    _ = env, target  # @unused
+
+_tests.append(_test_can_create_py_runtime_info_without_interpreter_version_info)
+
+def py_runtime_info_test_suite(name):
+    test_suite(
+        name = name,
+        tests = _tests,
+    )
diff --git a/tests/py_runtime_info_subject.bzl b/tests/py_runtime_info_subject.bzl
index 219719f..541d4d9 100644
--- a/tests/py_runtime_info_subject.bzl
+++ b/tests/py_runtime_info_subject.bzl
@@ -38,6 +38,7 @@
         files = lambda *a, **k: _py_runtime_info_subject_files(self, *a, **k),
         interpreter = lambda *a, **k: _py_runtime_info_subject_interpreter(self, *a, **k),
         interpreter_path = lambda *a, **k: _py_runtime_info_subject_interpreter_path(self, *a, **k),
+        interpreter_version_info = lambda *a, **k: _py_runtime_info_subject_interpreter_version_info(self, *a, **k),
         python_version = lambda *a, **k: _py_runtime_info_subject_python_version(self, *a, **k),
         stub_shebang = lambda *a, **k: _py_runtime_info_subject_stub_shebang(self, *a, **k),
         # go/keep-sorted end
@@ -100,3 +101,16 @@
         self.actual.stub_shebang,
         meta = self.meta.derive("stub_shebang()"),
     )
+
+def _py_runtime_info_subject_interpreter_version_info(self):
+    return subjects.struct(
+        self.actual.interpreter_version_info,
+        attrs = dict(
+            major = subjects.int,
+            minor = subjects.int,
+            micro = subjects.int,
+            releaselevel = subjects.str,
+            serial = subjects.int,
+        ),
+        meta = self.meta.derive("interpreter_version_info()"),
+    )
diff --git a/tests/py_wheel/py_wheel_tests.bzl b/tests/py_wheel/py_wheel_tests.bzl
index 3c03a1b..091e01c 100644
--- a/tests/py_wheel/py_wheel_tests.bzl
+++ b/tests/py_wheel/py_wheel_tests.bzl
@@ -14,6 +14,7 @@
 """Test for py_wheel."""
 
 load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite")
+load("@rules_testing//lib:truth.bzl", "matching")
 load("@rules_testing//lib:util.bzl", rt_util = "util")
 load("//python:packaging.bzl", "py_wheel")
 load("//python/private:py_wheel_normalize_pep440.bzl", "normalize_pep440")  # buildifier: disable=bzl-visibility
@@ -46,6 +47,79 @@
 
 _tests.append(_test_metadata)
 
+def _test_data(name):
+    rt_util.helper_target(
+        py_wheel,
+        name = name + "_data",
+        distribution = "mydist_" + name,
+        version = "0.0.0",
+        data_files = {
+            "source_name": "scripts/wheel_name",
+        },
+    )
+    analysis_test(
+        name = name,
+        impl = _test_data_impl,
+        target = name + "_data",
+    )
+
+def _test_data_impl(env, target):
+    action = env.expect.that_target(target).action_named(
+        "PyWheel",
+    )
+    action.contains_at_least_args(["--data_files", "scripts/wheel_name;tests/py_wheel/source_name"])
+    action.contains_at_least_inputs(["tests/py_wheel/source_name"])
+
+_tests.append(_test_data)
+
+def _test_data_bad_path(name):
+    rt_util.helper_target(
+        py_wheel,
+        name = name + "_data",
+        distribution = "mydist_" + name,
+        version = "0.0.0",
+        data_files = {
+            "source_name": "unsupported_path/wheel_name",
+        },
+    )
+    analysis_test(
+        name = name,
+        impl = _test_data_bad_path_impl,
+        target = name + "_data",
+        expect_failure = True,
+    )
+
+def _test_data_bad_path_impl(env, target):
+    env.expect.that_target(target).failures().contains_predicate(
+        matching.str_matches("target data file must start with"),
+    )
+
+_tests.append(_test_data_bad_path)
+
+def _test_data_bad_path_but_right_prefix(name):
+    rt_util.helper_target(
+        py_wheel,
+        name = name + "_data",
+        distribution = "mydist_" + name,
+        version = "0.0.0",
+        data_files = {
+            "source_name": "scripts2/wheel_name",
+        },
+    )
+    analysis_test(
+        name = name,
+        impl = _test_data_bad_path_but_right_prefix_impl,
+        target = name + "_data",
+        expect_failure = True,
+    )
+
+def _test_data_bad_path_but_right_prefix_impl(env, target):
+    env.expect.that_target(target).failures().contains_predicate(
+        matching.str_matches("target data file must start with"),
+    )
+
+_tests.append(_test_data_bad_path_but_right_prefix)
+
 def _test_content_type_from_attr(name):
     rt_util.helper_target(
         py_wheel,
diff --git a/tests/pycross/patched_py_wheel_library_test.py b/tests/pycross/patched_py_wheel_library_test.py
index 4591187..e1b404a 100644
--- a/tests/pycross/patched_py_wheel_library_test.py
+++ b/tests/pycross/patched_py_wheel_library_test.py
@@ -23,7 +23,9 @@
 class TestPyWheelLibrary(unittest.TestCase):
     def setUp(self):
         self.extraction_dir = Path(
-            RUNFILES.Rlocation("rules_python/tests/pycross/patched_extracted_wheel_for_testing")
+            RUNFILES.Rlocation(
+                "rules_python/tests/pycross/patched_extracted_wheel_for_testing"
+            )
         )
         self.assertTrue(self.extraction_dir.exists(), self.extraction_dir)
         self.assertTrue(self.extraction_dir.is_dir(), self.extraction_dir)
diff --git a/tests/pypi/config_settings/BUILD.bazel b/tests/pypi/config_settings/BUILD.bazel
new file mode 100644
index 0000000..15dbd7f
--- /dev/null
+++ b/tests/pypi/config_settings/BUILD.bazel
@@ -0,0 +1,5 @@
+load(":config_settings_tests.bzl", "config_settings_test_suite")
+
+config_settings_test_suite(
+    name = "config_settings_tests",
+)
diff --git a/tests/pypi/config_settings/config_settings_tests.bzl b/tests/pypi/config_settings/config_settings_tests.bzl
new file mode 100644
index 0000000..87e18b4
--- /dev/null
+++ b/tests/pypi/config_settings/config_settings_tests.bzl
@@ -0,0 +1,544 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for construction of Python version matching config settings."""
+
+load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("@rules_testing//lib:truth.bzl", "subjects")
+load("@rules_testing//lib:util.bzl", test_util = "util")
+load("//python/private/pypi:config_settings.bzl", "config_settings")  # buildifier: disable=bzl-visibility
+
+def _subject_impl(ctx):
+    _ = ctx  # @unused
+    return [DefaultInfo()]
+
+_subject = rule(
+    implementation = _subject_impl,
+    attrs = {
+        "dist": attr.string(),
+    },
+)
+
+_flag = struct(
+    platform = lambda x: ("//command_line_option:platforms", str(Label("//tests/support:" + x))),
+    pip_whl = lambda x: (str(Label("//python/config_settings:pip_whl")), str(x)),
+    pip_whl_glibc_version = lambda x: (str(Label("//python/config_settings:pip_whl_glibc_version")), str(x)),
+    pip_whl_muslc_version = lambda x: (str(Label("//python/config_settings:pip_whl_muslc_version")), str(x)),
+    pip_whl_osx_version = lambda x: (str(Label("//python/config_settings:pip_whl_osx_version")), str(x)),
+    pip_whl_osx_arch = lambda x: (str(Label("//python/config_settings:pip_whl_osx_arch")), str(x)),
+    py_linux_libc = lambda x: (str(Label("//python/config_settings:py_linux_libc")), str(x)),
+    python_version = lambda x: (str(Label("//python/config_settings:python_version")), str(x)),
+)
+
+def _analysis_test(*, name, dist, want, config_settings = [_flag.platform("linux_aarch64")]):
+    subject_name = name + "_subject"
+    test_util.helper_target(
+        _subject,
+        name = subject_name,
+        dist = select(
+            dist | {
+                "//conditions:default": "no_match",
+            },
+        ),
+    )
+    config_settings = dict(config_settings)
+    if not config_settings:
+        fail("For reproducibility on different platforms, the config setting must be specified")
+
+    analysis_test(
+        name = name,
+        target = subject_name,
+        impl = lambda env, target: _match(env, target, want),
+        config_settings = config_settings,
+    )
+
+def _match(env, target, want):
+    target = env.expect.that_target(target)
+    target.attr("dist", factory = subjects.str).equals(want)
+
+_tests = []
+
+# Tests when we only have an `sdist` present.
+
+def _test_sdist_default(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_sdist": "sdist",
+        },
+        want = "sdist",
+    )
+
+_tests.append(_test_sdist_default)
+
+def _test_sdist_no_whl(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_sdist": "sdist",
+        },
+        config_settings = [
+            _flag.platform("linux_aarch64"),
+            _flag.pip_whl("no"),
+        ],
+        want = "sdist",
+    )
+
+_tests.append(_test_sdist_no_whl)
+
+def _test_sdist_no_sdist(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_sdist": "sdist",
+        },
+        config_settings = [
+            _flag.platform("linux_aarch64"),
+            _flag.pip_whl("only"),
+        ],
+        # We will use `no_match_error` in the real case to indicate that `sdist` is not
+        # allowed to be used.
+        want = "no_match",
+    )
+
+_tests.append(_test_sdist_no_sdist)
+
+def _test_basic_whl_default(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py_none_any": "whl",
+            "is_sdist": "sdist",
+        },
+        want = "whl",
+    )
+
+_tests.append(_test_basic_whl_default)
+
+def _test_basic_whl_nowhl(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py_none_any": "whl",
+            "is_sdist": "sdist",
+        },
+        config_settings = [
+            _flag.platform("linux_aarch64"),
+            _flag.pip_whl("no"),
+        ],
+        want = "sdist",
+    )
+
+_tests.append(_test_basic_whl_nowhl)
+
+def _test_basic_whl_nosdist(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py_none_any": "whl",
+            "is_sdist": "sdist",
+        },
+        config_settings = [
+            _flag.platform("linux_aarch64"),
+            _flag.pip_whl("only"),
+        ],
+        want = "whl",
+    )
+
+_tests.append(_test_basic_whl_nosdist)
+
+def _test_whl_default(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py3_none_any": "whl",
+            "is_py_none_any": "basic_whl",
+        },
+        want = "whl",
+    )
+
+_tests.append(_test_whl_default)
+
+def _test_whl_nowhl(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py3_none_any": "whl",
+            "is_py_none_any": "basic_whl",
+        },
+        config_settings = [
+            _flag.platform("linux_aarch64"),
+            _flag.pip_whl("no"),
+        ],
+        want = "no_match",
+    )
+
+_tests.append(_test_whl_nowhl)
+
+def _test_whl_nosdist(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py3_none_any": "whl",
+        },
+        config_settings = [
+            _flag.platform("linux_aarch64"),
+            _flag.pip_whl("only"),
+        ],
+        want = "whl",
+    )
+
+_tests.append(_test_whl_nosdist)
+
+def _test_abi_whl_is_prefered(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py3_abi3_any": "abi_whl",
+            "is_py3_none_any": "whl",
+        },
+        want = "abi_whl",
+    )
+
+_tests.append(_test_abi_whl_is_prefered)
+
+def _test_whl_with_constraints_is_prefered(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py3_none_any": "default_whl",
+            "is_py3_none_any_linux_aarch64": "whl",
+            "is_py3_none_any_linux_x86_64": "amd64_whl",
+        },
+        want = "whl",
+    )
+
+_tests.append(_test_whl_with_constraints_is_prefered)
+
+def _test_cp_whl_is_prefered_over_py3(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3x_none_any": "cp",
+            "is_py3_abi3_any": "py3_abi3",
+            "is_py3_none_any": "py3",
+        },
+        want = "cp",
+    )
+
+_tests.append(_test_cp_whl_is_prefered_over_py3)
+
+def _test_cp_abi_whl_is_prefered_over_py3(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3x_abi3_any": "cp",
+            "is_py3_abi3_any": "py3",
+        },
+        want = "cp",
+    )
+
+_tests.append(_test_cp_abi_whl_is_prefered_over_py3)
+
+def _test_cp_version_is_selected_when_python_version_is_specified(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3.10_cp3x_none_any": "cp310",
+            "is_cp3.8_cp3x_none_any": "cp38",
+            "is_cp3.9_cp3x_none_any": "cp39",
+            "is_cp3x_none_any": "cp_default",
+        },
+        want = "cp310",
+        config_settings = [
+            _flag.python_version("3.10.9"),
+            _flag.platform("linux_aarch64"),
+        ],
+    )
+
+_tests.append(_test_cp_version_is_selected_when_python_version_is_specified)
+
+def _test_py_none_any_versioned(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3.10_py_none_any": "whl",
+            "is_cp3.9_py_none_any": "too-low",
+        },
+        want = "whl",
+        config_settings = [
+            _flag.python_version("3.10.9"),
+            _flag.platform("linux_aarch64"),
+        ],
+    )
+
+_tests.append(_test_py_none_any_versioned)
+
+def _test_cp_cp_whl(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3.10_cp3x_cp_linux_aarch64": "whl",
+        },
+        want = "whl",
+        config_settings = [
+            _flag.python_version("3.10.9"),
+            _flag.platform("linux_aarch64"),
+        ],
+    )
+
+_tests.append(_test_cp_cp_whl)
+
+def _test_cp_version_sdist_is_selected(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3.10_sdist": "sdist",
+        },
+        want = "sdist",
+        config_settings = [
+            _flag.python_version("3.10.9"),
+            _flag.platform("linux_aarch64"),
+        ],
+    )
+
+_tests.append(_test_cp_version_sdist_is_selected)
+
+def _test_platform_whl_is_prefered_over_any_whl_with_constraints(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py3_abi3_any": "better_default_whl",
+            "is_py3_abi3_any_linux_aarch64": "better_default_any_whl",
+            "is_py3_none_any": "default_whl",
+            "is_py3_none_any_linux_aarch64": "whl",
+            "is_py3_none_linux_aarch64": "platform_whl",
+        },
+        want = "platform_whl",
+    )
+
+_tests.append(_test_platform_whl_is_prefered_over_any_whl_with_constraints)
+
+def _test_abi3_platform_whl_preference(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_py3_abi3_linux_aarch64": "abi3_platform",
+            "is_py3_none_linux_aarch64": "platform",
+        },
+        want = "abi3_platform",
+    )
+
+_tests.append(_test_abi3_platform_whl_preference)
+
+def _test_glibc(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3x_cp_manylinux_aarch64": "glibc",
+            "is_py3_abi3_linux_aarch64": "abi3_platform",
+        },
+        want = "glibc",
+    )
+
+_tests.append(_test_glibc)
+
+def _test_glibc_versioned(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3x_cp_manylinux_2_14_aarch64": "glibc",
+            "is_cp3x_cp_manylinux_2_17_aarch64": "glibc",
+            "is_py3_abi3_linux_aarch64": "abi3_platform",
+        },
+        want = "glibc",
+        config_settings = [
+            _flag.py_linux_libc("glibc"),
+            _flag.pip_whl_glibc_version("2.17"),
+            _flag.platform("linux_aarch64"),
+        ],
+    )
+
+_tests.append(_test_glibc_versioned)
+
+def _test_glibc_compatible_exists(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            # Code using the conditions will need to construct selects, which
+            # do the version matching correctly.
+            "is_cp3x_cp_manylinux_2_14_aarch64": "2_14_whl_via_2_14_branch",
+            "is_cp3x_cp_manylinux_2_17_aarch64": "2_14_whl_via_2_17_branch",
+        },
+        want = "2_14_whl_via_2_17_branch",
+        config_settings = [
+            _flag.py_linux_libc("glibc"),
+            _flag.pip_whl_glibc_version("2.17"),
+            _flag.platform("linux_aarch64"),
+        ],
+    )
+
+_tests.append(_test_glibc_compatible_exists)
+
+def _test_musl(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3x_cp_musllinux_aarch64": "musl",
+        },
+        want = "musl",
+        config_settings = [
+            _flag.py_linux_libc("musl"),
+            _flag.platform("linux_aarch64"),
+        ],
+    )
+
+_tests.append(_test_musl)
+
+def _test_windows(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_cp3x_cp_windows_x86_64": "whl",
+        },
+        want = "whl",
+        config_settings = [
+            _flag.platform("windows_x86_64"),
+        ],
+    )
+
+_tests.append(_test_windows)
+
+def _test_osx(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            # We prefer arch specific whls over universal
+            "is_cp3x_cp_osx_x86_64": "whl",
+            "is_cp3x_cp_osx_x86_64_universal2": "universal_whl",
+        },
+        want = "whl",
+        config_settings = [
+            _flag.platform("mac_x86_64"),
+        ],
+    )
+
+_tests.append(_test_osx)
+
+def _test_osx_universal_default(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            # We default to universal if only that exists
+            "is_cp3x_cp_osx_x86_64_universal2": "whl",
+        },
+        want = "whl",
+        config_settings = [
+            _flag.platform("mac_x86_64"),
+        ],
+    )
+
+_tests.append(_test_osx_universal_default)
+
+def _test_osx_universal_only(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            # If we prefer universal, then we use that
+            "is_cp3x_cp_osx_x86_64": "whl",
+            "is_cp3x_cp_osx_x86_64_universal2": "universal",
+        },
+        want = "universal",
+        config_settings = [
+            _flag.pip_whl_osx_arch("universal"),
+            _flag.platform("mac_x86_64"),
+        ],
+    )
+
+_tests.append(_test_osx_universal_only)
+
+def _test_osx_os_version(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            # Similarly to the libc version, the user of the config settings will have to
+            # construct the select so that the version selection is correct.
+            "is_cp3x_cp_osx_10_9_x86_64": "whl",
+        },
+        want = "whl",
+        config_settings = [
+            _flag.pip_whl_osx_version("10.9"),
+            _flag.platform("mac_x86_64"),
+        ],
+    )
+
+_tests.append(_test_osx_os_version)
+
+def _test_all(name):
+    _analysis_test(
+        name = name,
+        dist = {
+            "is_" + f: f
+            for f in [
+                "{py}_{abi}_{plat}".format(py = valid_py, abi = valid_abi, plat = valid_plat)
+                # we have py2.py3, py3, cp3x
+                for valid_py in ["py", "py3", "cp3x"]
+                # cp abi usually comes with a version and we only need one
+                # config setting variant for all of them because the python
+                # version will discriminate between different versions.
+                for valid_abi in ["none", "abi3", "cp"]
+                for valid_plat in [
+                    "any",
+                    "manylinux_2_17_x86_64",
+                    "manylinux_2_17_aarch64",
+                    "osx_x86_64",
+                    "windows_x86_64",
+                ]
+                if not (
+                    valid_abi == "abi3" and valid_py == "py" or
+                    valid_abi == "cp" and valid_py != "cp3x"
+                )
+            ]
+        },
+        want = "cp3x_cp_manylinux_2_17_x86_64",
+        config_settings = [
+            _flag.pip_whl_glibc_version("2.17"),
+            _flag.platform("linux_x86_64"),
+        ],
+    )
+
+_tests.append(_test_all)
+
+def config_settings_test_suite(name):  # buildifier: disable=function-docstring
+    test_suite(
+        name = name,
+        tests = _tests,
+    )
+
+    config_settings(
+        name = "dummy",
+        python_versions = ["3.8", "3.9", "3.10"],
+        glibc_versions = [(2, 14), (2, 17)],
+        muslc_versions = [(1, 1)],
+        osx_versions = [(10, 9), (11, 0)],
+        target_platforms = [
+            "windows_x86_64",
+            "windows_aarch64",
+            "linux_x86_64",
+            "linux_ppc",
+            "linux_aarch64",
+            "osx_x86_64",
+            "osx_aarch64",
+        ],
+    )
diff --git a/tests/pypi/evaluate_markers/BUILD.bazel b/tests/pypi/evaluate_markers/BUILD.bazel
new file mode 100644
index 0000000..aba9264
--- /dev/null
+++ b/tests/pypi/evaluate_markers/BUILD.bazel
@@ -0,0 +1,7 @@
+load("@bazel_skylib//rules:build_test.bzl", "build_test")
+load("@dev_pip//:requirements.bzl", "all_whl_requirements")
+
+build_test(
+    name = "all_dev_wheels",
+    targets = all_whl_requirements,
+)
diff --git a/tests/pypi/generate_group_library_build_bazel/BUILD.bazel b/tests/pypi/generate_group_library_build_bazel/BUILD.bazel
new file mode 100644
index 0000000..df5ab82
--- /dev/null
+++ b/tests/pypi/generate_group_library_build_bazel/BUILD.bazel
@@ -0,0 +1,3 @@
+load(":generate_group_library_build_bazel_tests.bzl", "generate_group_library_build_bazel_test_suite")
+
+generate_group_library_build_bazel_test_suite(name = "generate_group_library_build_bazel_tests")
diff --git a/tests/pypi/generate_group_library_build_bazel/generate_group_library_build_bazel_tests.bzl b/tests/pypi/generate_group_library_build_bazel/generate_group_library_build_bazel_tests.bzl
new file mode 100644
index 0000000..a46aa41
--- /dev/null
+++ b/tests/pypi/generate_group_library_build_bazel/generate_group_library_build_bazel_tests.bzl
@@ -0,0 +1,104 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private/pypi:generate_group_library_build_bazel.bzl", "generate_group_library_build_bazel")  # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def _test_simple(env):
+    want = """\
+load("@rules_python//python:defs.bzl", "py_library")
+
+
+## Group vbap
+
+filegroup(
+    name = "vbap_whl",
+    srcs = [],
+    data = [
+        "@pypi_oletools//:_whl",
+        "@pypi_pcodedmp//:_whl",
+    ],
+    visibility = [
+        "@pypi_oletools//:__pkg__",
+        "@pypi_pcodedmp//:__pkg__",
+    ],
+)
+
+py_library(
+    name = "vbap_pkg",
+    srcs = [],
+    deps = [
+        "@pypi_oletools//:_pkg",
+        "@pypi_pcodedmp//:_pkg",
+    ],
+    visibility = [
+        "@pypi_oletools//:__pkg__",
+        "@pypi_pcodedmp//:__pkg__",
+    ],
+)
+"""
+    actual = generate_group_library_build_bazel(
+        repo_prefix = "pypi_",
+        groups = {"vbap": ["pcodedmp", "oletools"]},
+    )
+    env.expect.that_str(actual).equals(want)
+
+_tests.append(_test_simple)
+
+def _test_in_hub(env):
+    want = """\
+load("@rules_python//python:defs.bzl", "py_library")
+
+
+## Group vbap
+
+filegroup(
+    name = "vbap_whl",
+    srcs = [],
+    data = [
+        "//oletools:_whl",
+        "//pcodedmp:_whl",
+    ],
+    visibility = ["//:__subpackages__"],
+)
+
+py_library(
+    name = "vbap_pkg",
+    srcs = [],
+    deps = [
+        "//oletools:_pkg",
+        "//pcodedmp:_pkg",
+    ],
+    visibility = ["//:__subpackages__"],
+)
+"""
+    actual = generate_group_library_build_bazel(
+        repo_prefix = "",
+        groups = {"vbap": ["pcodedmp", "oletools"]},
+    )
+    env.expect.that_str(actual).equals(want)
+
+_tests.append(_test_in_hub)
+
+def generate_group_library_build_bazel_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/pypi/generate_whl_library_build_bazel/BUILD.bazel b/tests/pypi/generate_whl_library_build_bazel/BUILD.bazel
new file mode 100644
index 0000000..bea8e82
--- /dev/null
+++ b/tests/pypi/generate_whl_library_build_bazel/BUILD.bazel
@@ -0,0 +1,3 @@
+load(":generate_whl_library_build_bazel_tests.bzl", "generate_whl_library_build_bazel_test_suite")
+
+generate_whl_library_build_bazel_test_suite(name = "generate_whl_library_build_bazel_tests")
diff --git a/tests/pypi/generate_whl_library_build_bazel/generate_whl_library_build_bazel_tests.bzl b/tests/pypi/generate_whl_library_build_bazel/generate_whl_library_build_bazel_tests.bzl
new file mode 100644
index 0000000..3d4df14
--- /dev/null
+++ b/tests/pypi/generate_whl_library_build_bazel/generate_whl_library_build_bazel_tests.bzl
@@ -0,0 +1,578 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private/pypi:generate_whl_library_build_bazel.bzl", "generate_whl_library_build_bazel")  # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def _test_simple(env):
+    want = """\
+load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
+load("@rules_python//python:defs.bzl", "py_library", "py_binary")
+
+package(default_visibility = ["//visibility:public"])
+
+filegroup(
+    name = "dist_info",
+    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "data",
+    srcs = glob(["data/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "whl",
+    srcs = ["foo.whl"],
+    data = [
+        "@pypi_bar_baz//:whl",
+        "@pypi_foo//:whl",
+    ],
+    visibility = ["//visibility:public"],
+)
+
+py_library(
+    name = "pkg",
+    srcs = glob(
+        ["site-packages/**/*.py"],
+        exclude=[],
+        # Empty sources are allowed to support wheels that don't have any
+        # pure-Python code, e.g. pymssql, which is written in Cython.
+        allow_empty = True,
+    ),
+    data = [] + glob(
+        ["site-packages/**/*"],
+        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD"],
+    ),
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["site-packages"],
+    deps = [
+        "@pypi_bar_baz//:pkg",
+        "@pypi_foo//:pkg",
+    ],
+    tags = ["tag1", "tag2"],
+    visibility = ["//visibility:public"],
+)
+"""
+    actual = generate_whl_library_build_bazel(
+        dep_template = "@pypi_{name}//:{target}",
+        whl_name = "foo.whl",
+        dependencies = ["foo", "bar-baz"],
+        dependencies_by_platform = {},
+        data_exclude = [],
+        tags = ["tag1", "tag2"],
+        entry_points = {},
+        annotation = None,
+    )
+    env.expect.that_str(actual).equals(want)
+
+_tests.append(_test_simple)
+
+def _test_dep_selects(env):
+    want = """\
+load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
+load("@rules_python//python/config_settings:config_settings.bzl", "is_python_config_setting")
+load("@rules_python//python:defs.bzl", "py_library", "py_binary")
+
+package(default_visibility = ["//visibility:public"])
+
+filegroup(
+    name = "dist_info",
+    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "data",
+    srcs = glob(["data/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "whl",
+    srcs = ["foo.whl"],
+    data = [
+        "@pypi_bar_baz//:whl",
+        "@pypi_foo//:whl",
+    ] + select(
+        {
+            "@//python/config_settings:is_python_3.9": ["@pypi_py39_dep//:whl"],
+            "@platforms//cpu:aarch64": ["@pypi_arm_dep//:whl"],
+            "@platforms//os:windows": ["@pypi_win_dep//:whl"],
+            ":is_python_3.10_linux_ppc": ["@pypi_py310_linux_ppc_dep//:whl"],
+            ":is_python_3.9_anyos_aarch64": ["@pypi_py39_arm_dep//:whl"],
+            ":is_python_3.9_linux_anyarch": ["@pypi_py39_linux_dep//:whl"],
+            ":is_linux_x86_64": ["@pypi_linux_intel_dep//:whl"],
+            "//conditions:default": [],
+        },
+    ),
+    visibility = ["//visibility:public"],
+)
+
+py_library(
+    name = "pkg",
+    srcs = glob(
+        ["site-packages/**/*.py"],
+        exclude=[],
+        # Empty sources are allowed to support wheels that don't have any
+        # pure-Python code, e.g. pymssql, which is written in Cython.
+        allow_empty = True,
+    ),
+    data = [] + glob(
+        ["site-packages/**/*"],
+        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD"],
+    ),
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["site-packages"],
+    deps = [
+        "@pypi_bar_baz//:pkg",
+        "@pypi_foo//:pkg",
+    ] + select(
+        {
+            "@//python/config_settings:is_python_3.9": ["@pypi_py39_dep//:pkg"],
+            "@platforms//cpu:aarch64": ["@pypi_arm_dep//:pkg"],
+            "@platforms//os:windows": ["@pypi_win_dep//:pkg"],
+            ":is_python_3.10_linux_ppc": ["@pypi_py310_linux_ppc_dep//:pkg"],
+            ":is_python_3.9_anyos_aarch64": ["@pypi_py39_arm_dep//:pkg"],
+            ":is_python_3.9_linux_anyarch": ["@pypi_py39_linux_dep//:pkg"],
+            ":is_linux_x86_64": ["@pypi_linux_intel_dep//:pkg"],
+            "//conditions:default": [],
+        },
+    ),
+    tags = ["tag1", "tag2"],
+    visibility = ["//visibility:public"],
+)
+
+is_python_config_setting(
+    name = "is_python_3.10_linux_ppc",
+    python_version = "3.10",
+    constraint_values = [
+        "@platforms//cpu:ppc",
+        "@platforms//os:linux",
+    ],
+    visibility = ["//visibility:private"],
+)
+
+is_python_config_setting(
+    name = "is_python_3.9_anyos_aarch64",
+    python_version = "3.9",
+    constraint_values = ["@platforms//cpu:aarch64"],
+    visibility = ["//visibility:private"],
+)
+
+is_python_config_setting(
+    name = "is_python_3.9_linux_anyarch",
+    python_version = "3.9",
+    constraint_values = ["@platforms//os:linux"],
+    visibility = ["//visibility:private"],
+)
+
+config_setting(
+    name = "is_linux_x86_64",
+    constraint_values = [
+        "@platforms//cpu:x86_64",
+        "@platforms//os:linux",
+    ],
+    visibility = ["//visibility:private"],
+)
+"""
+    actual = generate_whl_library_build_bazel(
+        dep_template = "@pypi_{name}//:{target}",
+        whl_name = "foo.whl",
+        dependencies = ["foo", "bar-baz"],
+        dependencies_by_platform = {
+            "@//python/config_settings:is_python_3.9": ["py39_dep"],
+            "@platforms//cpu:aarch64": ["arm_dep"],
+            "@platforms//os:windows": ["win_dep"],
+            "cp310_linux_ppc": ["py310_linux_ppc_dep"],
+            "cp39_anyos_aarch64": ["py39_arm_dep"],
+            "cp39_linux_anyarch": ["py39_linux_dep"],
+            "linux_x86_64": ["linux_intel_dep"],
+        },
+        data_exclude = [],
+        tags = ["tag1", "tag2"],
+        entry_points = {},
+        annotation = None,
+    )
+    env.expect.that_str(actual.replace("@@", "@")).equals(want)
+
+_tests.append(_test_dep_selects)
+
+def _test_with_annotation(env):
+    want = """\
+load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
+load("@rules_python//python:defs.bzl", "py_library", "py_binary")
+
+package(default_visibility = ["//visibility:public"])
+
+filegroup(
+    name = "dist_info",
+    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "data",
+    srcs = glob(["data/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "whl",
+    srcs = ["foo.whl"],
+    data = [
+        "@pypi_bar_baz//:whl",
+        "@pypi_foo//:whl",
+    ],
+    visibility = ["//visibility:public"],
+)
+
+py_library(
+    name = "pkg",
+    srcs = glob(
+        ["site-packages/**/*.py"],
+        exclude=["srcs_exclude_all"],
+        # Empty sources are allowed to support wheels that don't have any
+        # pure-Python code, e.g. pymssql, which is written in Cython.
+        allow_empty = True,
+    ),
+    data = ["file_dest", "exec_dest"] + glob(
+        ["site-packages/**/*"],
+        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD", "data_exclude_all"],
+    ),
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["site-packages"],
+    deps = [
+        "@pypi_bar_baz//:pkg",
+        "@pypi_foo//:pkg",
+    ],
+    tags = ["tag1", "tag2"],
+    visibility = ["//visibility:public"],
+)
+
+copy_file(
+    name = "file_dest.copy",
+    src = "file_src",
+    out = "file_dest",
+    is_executable = False,
+)
+
+copy_file(
+    name = "exec_dest.copy",
+    src = "exec_src",
+    out = "exec_dest",
+    is_executable = True,
+)
+
+# SOMETHING SPECIAL AT THE END
+"""
+    actual = generate_whl_library_build_bazel(
+        dep_template = "@pypi_{name}//:{target}",
+        whl_name = "foo.whl",
+        dependencies = ["foo", "bar-baz"],
+        dependencies_by_platform = {},
+        data_exclude = [],
+        tags = ["tag1", "tag2"],
+        entry_points = {},
+        annotation = struct(
+            copy_files = {"file_src": "file_dest"},
+            copy_executables = {"exec_src": "exec_dest"},
+            data = [],
+            data_exclude_glob = ["data_exclude_all"],
+            srcs_exclude_glob = ["srcs_exclude_all"],
+            additive_build_content = """# SOMETHING SPECIAL AT THE END""",
+        ),
+    )
+    env.expect.that_str(actual).equals(want)
+
+_tests.append(_test_with_annotation)
+
+def _test_with_entry_points(env):
+    want = """\
+load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
+load("@rules_python//python:defs.bzl", "py_library", "py_binary")
+
+package(default_visibility = ["//visibility:public"])
+
+filegroup(
+    name = "dist_info",
+    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "data",
+    srcs = glob(["data/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "whl",
+    srcs = ["foo.whl"],
+    data = [
+        "@pypi_bar_baz//:whl",
+        "@pypi_foo//:whl",
+    ],
+    visibility = ["//visibility:public"],
+)
+
+py_library(
+    name = "pkg",
+    srcs = glob(
+        ["site-packages/**/*.py"],
+        exclude=[],
+        # Empty sources are allowed to support wheels that don't have any
+        # pure-Python code, e.g. pymssql, which is written in Cython.
+        allow_empty = True,
+    ),
+    data = [] + glob(
+        ["site-packages/**/*"],
+        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD"],
+    ),
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["site-packages"],
+    deps = [
+        "@pypi_bar_baz//:pkg",
+        "@pypi_foo//:pkg",
+    ],
+    tags = ["tag1", "tag2"],
+    visibility = ["//visibility:public"],
+)
+
+py_binary(
+    name = "rules_python_wheel_entry_point_fizz",
+    srcs = ["buzz.py"],
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["."],
+    deps = [":pkg"],
+)
+"""
+    actual = generate_whl_library_build_bazel(
+        dep_template = "@pypi_{name}//:{target}",
+        whl_name = "foo.whl",
+        dependencies = ["foo", "bar-baz"],
+        dependencies_by_platform = {},
+        data_exclude = [],
+        tags = ["tag1", "tag2"],
+        entry_points = {"fizz": "buzz.py"},
+        annotation = None,
+    )
+    env.expect.that_str(actual).equals(want)
+
+_tests.append(_test_with_entry_points)
+
+def _test_group_member(env):
+    want = """\
+load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
+load("@rules_python//python:defs.bzl", "py_library", "py_binary")
+
+package(default_visibility = ["//visibility:public"])
+
+filegroup(
+    name = "dist_info",
+    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "data",
+    srcs = glob(["data/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "_whl",
+    srcs = ["foo.whl"],
+    data = ["@pypi_bar_baz//:whl"] + select(
+        {
+            "@platforms//os:linux": ["@pypi_box//:whl"],
+            ":is_linux_x86_64": [
+                "@pypi_box//:whl",
+                "@pypi_box_amd64//:whl",
+            ],
+            "//conditions:default": [],
+        },
+    ),
+    visibility = ["@pypi__groups//:__pkg__"],
+)
+
+py_library(
+    name = "_pkg",
+    srcs = glob(
+        ["site-packages/**/*.py"],
+        exclude=[],
+        # Empty sources are allowed to support wheels that don't have any
+        # pure-Python code, e.g. pymssql, which is written in Cython.
+        allow_empty = True,
+    ),
+    data = [] + glob(
+        ["site-packages/**/*"],
+        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD"],
+    ),
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["site-packages"],
+    deps = ["@pypi_bar_baz//:pkg"] + select(
+        {
+            "@platforms//os:linux": ["@pypi_box//:pkg"],
+            ":is_linux_x86_64": [
+                "@pypi_box//:pkg",
+                "@pypi_box_amd64//:pkg",
+            ],
+            "//conditions:default": [],
+        },
+    ),
+    tags = [],
+    visibility = ["@pypi__groups//:__pkg__"],
+)
+
+config_setting(
+    name = "is_linux_x86_64",
+    constraint_values = [
+        "@platforms//cpu:x86_64",
+        "@platforms//os:linux",
+    ],
+    visibility = ["//visibility:private"],
+)
+
+alias(
+    name = "pkg",
+    actual = "@pypi__groups//:qux_pkg",
+)
+
+alias(
+    name = "whl",
+    actual = "@pypi__groups//:qux_whl",
+)
+"""
+    actual = generate_whl_library_build_bazel(
+        dep_template = "@pypi_{name}//:{target}",
+        whl_name = "foo.whl",
+        dependencies = ["foo", "bar-baz", "qux"],
+        dependencies_by_platform = {
+            "linux_x86_64": ["box", "box-amd64"],
+            "windows_x86_64": ["fox"],
+            "@platforms//os:linux": ["box"],  # buildifier: disable=unsorted-dict-items to check that we sort inside the test
+        },
+        tags = [],
+        entry_points = {},
+        data_exclude = [],
+        annotation = None,
+        group_name = "qux",
+        group_deps = ["foo", "fox", "qux"],
+    )
+    env.expect.that_str(actual.replace("@@", "@")).equals(want)
+
+_tests.append(_test_group_member)
+
+def _test_group_member_deps_to_hub(env):
+    want = """\
+load("@bazel_skylib//rules:copy_file.bzl", "copy_file")
+load("@rules_python//python:defs.bzl", "py_library", "py_binary")
+
+package(default_visibility = ["//visibility:public"])
+
+filegroup(
+    name = "dist_info",
+    srcs = glob(["site-packages/*.dist-info/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "data",
+    srcs = glob(["data/**"], allow_empty = True),
+)
+
+filegroup(
+    name = "whl",
+    srcs = ["foo.whl"],
+    data = ["@pypi//bar_baz:whl"] + select(
+        {
+            "@platforms//os:linux": ["@pypi//box:whl"],
+            ":is_linux_x86_64": [
+                "@pypi//box:whl",
+                "@pypi//box_amd64:whl",
+            ],
+            "//conditions:default": [],
+        },
+    ),
+    visibility = ["@pypi//:__subpackages__"],
+)
+
+py_library(
+    name = "pkg",
+    srcs = glob(
+        ["site-packages/**/*.py"],
+        exclude=[],
+        # Empty sources are allowed to support wheels that don't have any
+        # pure-Python code, e.g. pymssql, which is written in Cython.
+        allow_empty = True,
+    ),
+    data = [] + glob(
+        ["site-packages/**/*"],
+        exclude=["**/* *", "**/*.py", "**/*.pyc", "**/*.pyc.*", "**/*.dist-info/RECORD"],
+    ),
+    # This makes this directory a top-level in the python import
+    # search path for anything that depends on this.
+    imports = ["site-packages"],
+    deps = ["@pypi//bar_baz:pkg"] + select(
+        {
+            "@platforms//os:linux": ["@pypi//box:pkg"],
+            ":is_linux_x86_64": [
+                "@pypi//box:pkg",
+                "@pypi//box_amd64:pkg",
+            ],
+            "//conditions:default": [],
+        },
+    ),
+    tags = [],
+    visibility = ["@pypi//:__subpackages__"],
+)
+
+config_setting(
+    name = "is_linux_x86_64",
+    constraint_values = [
+        "@platforms//cpu:x86_64",
+        "@platforms//os:linux",
+    ],
+    visibility = ["//visibility:private"],
+)
+"""
+    actual = generate_whl_library_build_bazel(
+        dep_template = "@pypi//{name}:{target}",
+        whl_name = "foo.whl",
+        dependencies = ["foo", "bar-baz", "qux"],
+        dependencies_by_platform = {
+            "linux_x86_64": ["box", "box-amd64"],
+            "windows_x86_64": ["fox"],
+            "@platforms//os:linux": ["box"],  # buildifier: disable=unsorted-dict-items to check that we sort inside the test
+        },
+        tags = [],
+        entry_points = {},
+        data_exclude = [],
+        annotation = None,
+        group_name = "qux",
+        group_deps = ["foo", "fox", "qux"],
+    )
+    env.expect.that_str(actual.replace("@@", "@")).equals(want)
+
+_tests.append(_test_group_member_deps_to_hub)
+
+def generate_whl_library_build_bazel_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/pypi/index_sources/BUILD.bazel b/tests/pypi/index_sources/BUILD.bazel
new file mode 100644
index 0000000..7cd327a
--- /dev/null
+++ b/tests/pypi/index_sources/BUILD.bazel
@@ -0,0 +1,3 @@
+load(":index_sources_tests.bzl", "index_sources_test_suite")
+
+index_sources_test_suite(name = "index_sources_tests")
diff --git a/tests/pypi/index_sources/index_sources_tests.bzl b/tests/pypi/index_sources/index_sources_tests.bzl
new file mode 100644
index 0000000..0a76707
--- /dev/null
+++ b/tests/pypi/index_sources/index_sources_tests.bzl
@@ -0,0 +1,60 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private/pypi:index_sources.bzl", "index_sources")  # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def _test_no_simple_api_sources(env):
+    inputs = [
+        "foo==0.0.1",
+        "foo==0.0.1 @ https://someurl.org",
+        "foo==0.0.1 @ https://someurl.org --hash=sha256:deadbeef",
+        "foo==0.0.1 @ https://someurl.org; python_version < 2.7 --hash=sha256:deadbeef",
+    ]
+    for input in inputs:
+        got = index_sources(input)
+        env.expect.that_collection(got.shas).contains_exactly([])
+        env.expect.that_str(got.version).equals("0.0.1")
+
+_tests.append(_test_no_simple_api_sources)
+
+def _test_simple_api_sources(env):
+    tests = {
+        "foo==0.0.2 --hash=sha256:deafbeef    --hash=sha256:deadbeef": [
+            "deadbeef",
+            "deafbeef",
+        ],
+        "foo[extra]==0.0.2; (python_version < 2.7 or something_else == \"@\") --hash=sha256:deafbeef    --hash=sha256:deadbeef": [
+            "deadbeef",
+            "deafbeef",
+        ],
+    }
+    for input, want_shas in tests.items():
+        got = index_sources(input)
+        env.expect.that_collection(got.shas).contains_exactly(want_shas)
+        env.expect.that_str(got.version).equals("0.0.2")
+
+_tests.append(_test_simple_api_sources)
+
+def index_sources_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/pypi/parse_requirements/BUILD.bazel b/tests/pypi/parse_requirements/BUILD.bazel
new file mode 100644
index 0000000..3d7976e
--- /dev/null
+++ b/tests/pypi/parse_requirements/BUILD.bazel
@@ -0,0 +1,3 @@
+load(":parse_requirements_tests.bzl", "parse_requirements_test_suite")
+
+parse_requirements_test_suite(name = "parse_requirements_tests")
diff --git a/tests/pypi/parse_requirements/parse_requirements_tests.bzl b/tests/pypi/parse_requirements/parse_requirements_tests.bzl
new file mode 100644
index 0000000..25d2961
--- /dev/null
+++ b/tests/pypi/parse_requirements/parse_requirements_tests.bzl
@@ -0,0 +1,271 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private/pypi:parse_requirements.bzl", "parse_requirements", "select_requirement")  # buildifier: disable=bzl-visibility
+
+def _mock_ctx():
+    testdata = {
+        "requirements_direct": """\
+foo[extra] @ https://some-url
+""",
+        "requirements_linux": """\
+foo==0.0.3 --hash=sha256:deadbaaf
+""",
+        "requirements_lock": """\
+foo[extra]==0.0.1 --hash=sha256:deadbeef
+""",
+        "requirements_lock_dupe": """\
+foo[extra,extra_2]==0.0.1 --hash=sha256:deadbeef
+foo==0.0.1 --hash=sha256:deadbeef
+foo[extra]==0.0.1 --hash=sha256:deadbeef
+""",
+        "requirements_marker": """\
+foo[extra]==0.0.1 ;marker --hash=sha256:deadbeef
+bar==0.0.1 --hash=sha256:deadbeef
+""",
+        "requirements_osx": """\
+foo==0.0.3 --hash=sha256:deadbaaf
+""",
+        "requirements_windows": """\
+foo[extra]==0.0.2 --hash=sha256:deadbeef
+bar==0.0.1 --hash=sha256:deadb00f
+""",
+    }
+
+    return struct(
+        os = struct(
+            name = "linux",
+            arch = "x86_64",
+        ),
+        read = lambda x: testdata[x],
+    )
+
+_tests = []
+
+def _test_simple(env):
+    got = parse_requirements(
+        ctx = _mock_ctx(),
+        requirements_by_platform = {
+            "requirements_lock": ["linux_x86_64", "windows_x86_64"],
+        },
+    )
+    env.expect.that_dict(got).contains_exactly({
+        "foo": [
+            struct(
+                distribution = "foo",
+                extra_pip_args = [],
+                requirement_line = "foo[extra]==0.0.1 --hash=sha256:deadbeef",
+                srcs = struct(
+                    requirement = "foo[extra]==0.0.1",
+                    shas = ["deadbeef"],
+                    version = "0.0.1",
+                ),
+                target_platforms = [
+                    "linux_x86_64",
+                    "windows_x86_64",
+                ],
+                whls = [],
+                sdist = None,
+                is_exposed = True,
+            ),
+        ],
+    })
+    env.expect.that_str(
+        select_requirement(
+            got["foo"],
+            platform = "linux_x86_64",
+        ).srcs.version,
+    ).equals("0.0.1")
+
+_tests.append(_test_simple)
+
+def _test_dupe_requirements(env):
+    got = parse_requirements(
+        ctx = _mock_ctx(),
+        requirements_by_platform = {
+            "requirements_lock_dupe": ["linux_x86_64"],
+        },
+    )
+    env.expect.that_dict(got).contains_exactly({
+        "foo": [
+            struct(
+                distribution = "foo",
+                extra_pip_args = [],
+                requirement_line = "foo[extra,extra_2]==0.0.1 --hash=sha256:deadbeef",
+                srcs = struct(
+                    requirement = "foo[extra,extra_2]==0.0.1",
+                    shas = ["deadbeef"],
+                    version = "0.0.1",
+                ),
+                target_platforms = ["linux_x86_64"],
+                whls = [],
+                sdist = None,
+                is_exposed = True,
+            ),
+        ],
+    })
+
+_tests.append(_test_dupe_requirements)
+
+def _test_multi_os(env):
+    got = parse_requirements(
+        ctx = _mock_ctx(),
+        requirements_by_platform = {
+            "requirements_linux": ["linux_x86_64"],
+            "requirements_windows": ["windows_x86_64"],
+        },
+    )
+
+    env.expect.that_dict(got).contains_exactly({
+        "bar": [
+            struct(
+                distribution = "bar",
+                extra_pip_args = [],
+                requirement_line = "bar==0.0.1 --hash=sha256:deadb00f",
+                srcs = struct(
+                    requirement = "bar==0.0.1",
+                    shas = ["deadb00f"],
+                    version = "0.0.1",
+                ),
+                target_platforms = ["windows_x86_64"],
+                whls = [],
+                sdist = None,
+                is_exposed = False,
+            ),
+        ],
+        "foo": [
+            struct(
+                distribution = "foo",
+                extra_pip_args = [],
+                requirement_line = "foo==0.0.3 --hash=sha256:deadbaaf",
+                srcs = struct(
+                    requirement = "foo==0.0.3",
+                    shas = ["deadbaaf"],
+                    version = "0.0.3",
+                ),
+                target_platforms = ["linux_x86_64"],
+                whls = [],
+                sdist = None,
+                is_exposed = True,
+            ),
+            struct(
+                distribution = "foo",
+                extra_pip_args = [],
+                requirement_line = "foo[extra]==0.0.2 --hash=sha256:deadbeef",
+                srcs = struct(
+                    requirement = "foo[extra]==0.0.2",
+                    shas = ["deadbeef"],
+                    version = "0.0.2",
+                ),
+                target_platforms = ["windows_x86_64"],
+                whls = [],
+                sdist = None,
+                is_exposed = True,
+            ),
+        ],
+    })
+    env.expect.that_str(
+        select_requirement(
+            got["foo"],
+            platform = "windows_x86_64",
+        ).srcs.version,
+    ).equals("0.0.2")
+
+_tests.append(_test_multi_os)
+
+def _test_select_requirement_none_platform(env):
+    got = select_requirement(
+        [
+            struct(
+                some_attr = "foo",
+                target_platforms = ["linux_x86_64"],
+            ),
+        ],
+        platform = None,
+    )
+    env.expect.that_str(got.some_attr).equals("foo")
+
+_tests.append(_test_select_requirement_none_platform)
+
+def _test_env_marker_resolution(env):
+    def _mock_eval_markers(_, input):
+        ret = {
+            "foo[extra]==0.0.1 ;marker --hash=sha256:deadbeef": ["cp311_windows_x86_64"],
+        }
+
+        env.expect.that_collection(input.keys()).contains_exactly(ret.keys())
+        env.expect.that_collection(input.values()[0]).contains_exactly(["cp311_linux_super_exotic", "cp311_windows_x86_64"])
+        return ret
+
+    got = parse_requirements(
+        ctx = _mock_ctx(),
+        requirements_by_platform = {
+            "requirements_marker": ["cp311_linux_super_exotic", "cp311_windows_x86_64"],
+        },
+        evaluate_markers = _mock_eval_markers,
+    )
+    env.expect.that_dict(got).contains_exactly({
+        "bar": [
+            struct(
+                distribution = "bar",
+                extra_pip_args = [],
+                is_exposed = True,
+                requirement_line = "bar==0.0.1 --hash=sha256:deadbeef",
+                sdist = None,
+                srcs = struct(
+                    requirement = "bar==0.0.1",
+                    shas = ["deadbeef"],
+                    version = "0.0.1",
+                ),
+                target_platforms = ["cp311_linux_super_exotic", "cp311_windows_x86_64"],
+                whls = [],
+            ),
+        ],
+        "foo": [
+            struct(
+                distribution = "foo",
+                extra_pip_args = [],
+                # This is not exposed because we also have `linux_super_exotic` in the platform list
+                is_exposed = False,
+                requirement_line = "foo[extra]==0.0.1 ;marker --hash=sha256:deadbeef",
+                sdist = None,
+                srcs = struct(
+                    requirement = "foo[extra]==0.0.1 ;marker",
+                    shas = ["deadbeef"],
+                    version = "0.0.1",
+                ),
+                target_platforms = ["cp311_windows_x86_64"],
+                whls = [],
+            ),
+        ],
+    })
+    env.expect.that_str(
+        select_requirement(
+            got["foo"],
+            platform = "windows_x86_64",
+        ).srcs.version,
+    ).equals("0.0.1")
+
+_tests.append(_test_env_marker_resolution)
+
+def parse_requirements_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/pypi/parse_requirements_txt/BUILD.bazel b/tests/pypi/parse_requirements_txt/BUILD.bazel
new file mode 100644
index 0000000..526fa73
--- /dev/null
+++ b/tests/pypi/parse_requirements_txt/BUILD.bazel
@@ -0,0 +1,3 @@
+load(":parse_requirements_txt_tests.bzl", "parse_requirements_txt_test_suite")
+
+parse_requirements_txt_test_suite(name = "parse_requirements_txt_tests")
diff --git a/python/pip_install/private/test/requirements_parser_tests.bzl b/tests/pypi/parse_requirements_txt/parse_requirements_txt_tests.bzl
similarity index 87%
rename from python/pip_install/private/test/requirements_parser_tests.bzl
rename to tests/pypi/parse_requirements_txt/parse_requirements_txt_tests.bzl
index 5ea742e..f4e8990 100644
--- a/python/pip_install/private/test/requirements_parser_tests.bzl
+++ b/tests/pypi/parse_requirements_txt/parse_requirements_txt_tests.bzl
@@ -15,83 +15,83 @@
 "Unit tests for yaml.bzl"
 
 load("@bazel_skylib//lib:unittest.bzl", "asserts", "unittest")
-load("//python/pip_install:requirements_parser.bzl", "parse")
+load("//python/private/pypi:parse_requirements_txt.bzl", "parse_requirements_txt")  # buildifier: disable=bzl-visibility
 
 def _parse_basic_test_impl(ctx):
     env = unittest.begin(ctx)
 
     # Base cases
-    asserts.equals(env, [], parse("").requirements)
-    asserts.equals(env, [], parse("\n").requirements)
+    asserts.equals(env, [], parse_requirements_txt("").requirements)
+    asserts.equals(env, [], parse_requirements_txt("\n").requirements)
 
     # Various requirement specifiers (https://pip.pypa.io/en/stable/reference/requirement-specifiers/#requirement-specifiers)
-    asserts.equals(env, [("SomeProject", "SomeProject")], parse("SomeProject\n").requirements)
-    asserts.equals(env, [("SomeProject", "SomeProject == 1.3")], parse("SomeProject == 1.3\n").requirements)
-    asserts.equals(env, [("SomeProject", "SomeProject >= 1.2, < 2.0")], parse("SomeProject >= 1.2, < 2.0\n").requirements)
-    asserts.equals(env, [("SomeProject", "SomeProject[foo, bar]")], parse("SomeProject[foo, bar]\n").requirements)
-    asserts.equals(env, [("SomeProject", "SomeProject ~= 1.4.2")], parse("SomeProject ~= 1.4.2\n").requirements)
-    asserts.equals(env, [("SomeProject", "SomeProject == 5.4 ; python_version < '3.8'")], parse("SomeProject == 5.4 ; python_version < '3.8'\n").requirements)
-    asserts.equals(env, [("SomeProject", "SomeProject ; sys_platform == 'win32'")], parse("SomeProject ; sys_platform == 'win32'\n").requirements)
-    asserts.equals(env, [("requests", "requests [security] >= 2.8.1, == 2.8.* ; python_version < 2.7")], parse("requests [security] >= 2.8.1, == 2.8.* ; python_version < 2.7\n").requirements)
+    asserts.equals(env, [("SomeProject", "SomeProject")], parse_requirements_txt("SomeProject\n").requirements)
+    asserts.equals(env, [("SomeProject", "SomeProject == 1.3")], parse_requirements_txt("SomeProject == 1.3\n").requirements)
+    asserts.equals(env, [("SomeProject", "SomeProject >= 1.2, < 2.0")], parse_requirements_txt("SomeProject >= 1.2, < 2.0\n").requirements)
+    asserts.equals(env, [("SomeProject", "SomeProject[foo, bar]")], parse_requirements_txt("SomeProject[foo, bar]\n").requirements)
+    asserts.equals(env, [("SomeProject", "SomeProject ~= 1.4.2")], parse_requirements_txt("SomeProject ~= 1.4.2\n").requirements)
+    asserts.equals(env, [("SomeProject", "SomeProject == 5.4 ; python_version < '3.8'")], parse_requirements_txt("SomeProject == 5.4 ; python_version < '3.8'\n").requirements)
+    asserts.equals(env, [("SomeProject", "SomeProject ; sys_platform == 'win32'")], parse_requirements_txt("SomeProject ; sys_platform == 'win32'\n").requirements)
+    asserts.equals(env, [("requests", "requests [security] >= 2.8.1, == 2.8.* ; python_version < 2.7")], parse_requirements_txt("requests [security] >= 2.8.1, == 2.8.* ; python_version < 2.7\n").requirements)
 
     # Multiple requirements
-    asserts.equals(env, [("FooProject", "FooProject==1.0.0"), ("BarProject", "BarProject==2.0.0")], parse("""\
+    asserts.equals(env, [("FooProject", "FooProject==1.0.0"), ("BarProject", "BarProject==2.0.0")], parse_requirements_txt("""\
 FooProject==1.0.0
 BarProject==2.0.0
 """).requirements)
 
-    asserts.equals(env, [("FooProject", "FooProject==1.0.0"), ("BarProject", "BarProject==2.0.0")], parse("""\
+    asserts.equals(env, [("FooProject", "FooProject==1.0.0"), ("BarProject", "BarProject==2.0.0")], parse_requirements_txt("""\
 FooProject==1.0.0
 
 BarProject==2.0.0
 """).requirements)
 
     # Comments
-    asserts.equals(env, [("SomeProject", "SomeProject")], parse("""\
+    asserts.equals(env, [("SomeProject", "SomeProject")], parse_requirements_txt("""\
 # This is a comment
 SomeProject
 """).requirements)
-    asserts.equals(env, [("SomeProject", "SomeProject")], parse("""\
+    asserts.equals(env, [("SomeProject", "SomeProject")], parse_requirements_txt("""\
 SomeProject
 # This is a comment
 """).requirements)
-    asserts.equals(env, [("SomeProject", "SomeProject == 1.3")], parse("""\
+    asserts.equals(env, [("SomeProject", "SomeProject == 1.3")], parse_requirements_txt("""\
 SomeProject == 1.3 # This is a comment
 """).requirements)
-    asserts.equals(env, [("FooProject", "FooProject==1.0.0"), ("BarProject", "BarProject==2.0.0")], parse("""\
+    asserts.equals(env, [("FooProject", "FooProject==1.0.0"), ("BarProject", "BarProject==2.0.0")], parse_requirements_txt("""\
 FooProject==1.0.0
 # Comment
 BarProject==2.0.0 #Comment
 """).requirements)
-    asserts.equals(env, [("requests", "requests @ https://github.com/psf/requests/releases/download/v2.29.0/requests-2.29.0.tar.gz#sha1=3897c249b51a1a405d615a8c9cb92e5fdbf0dd49")], parse("""\
+    asserts.equals(env, [("requests", "requests @ https://github.com/psf/requests/releases/download/v2.29.0/requests-2.29.0.tar.gz#sha1=3897c249b51a1a405d615a8c9cb92e5fdbf0dd49")], parse_requirements_txt("""\
 requests @ https://github.com/psf/requests/releases/download/v2.29.0/requests-2.29.0.tar.gz#sha1=3897c249b51a1a405d615a8c9cb92e5fdbf0dd49
 """).requirements)
 
     # Multiline
-    asserts.equals(env, [("certifi", "certifi==2021.10.8     --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872     --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569")], parse("""\
+    asserts.equals(env, [("certifi", "certifi==2021.10.8     --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872     --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569")], parse_requirements_txt("""\
 certifi==2021.10.8 \
     --hash=sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872 \
     --hash=sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569
     # via requests
 """).requirements)
-    asserts.equals(env, [("requests", "requests @ https://github.com/psf/requests/releases/download/v2.29.0/requests-2.29.0.tar.gz#sha1=3897c249b51a1a405d615a8c9cb92e5fdbf0dd49     --hash=sha256:eca58eb564b134e4ff521a02aa6f566c653835753e1fc8a50a20cb6bee4673cd")], parse("""\
+    asserts.equals(env, [("requests", "requests @ https://github.com/psf/requests/releases/download/v2.29.0/requests-2.29.0.tar.gz#sha1=3897c249b51a1a405d615a8c9cb92e5fdbf0dd49     --hash=sha256:eca58eb564b134e4ff521a02aa6f566c653835753e1fc8a50a20cb6bee4673cd")], parse_requirements_txt("""\
 requests @ https://github.com/psf/requests/releases/download/v2.29.0/requests-2.29.0.tar.gz#sha1=3897c249b51a1a405d615a8c9cb92e5fdbf0dd49 \
     --hash=sha256:eca58eb564b134e4ff521a02aa6f566c653835753e1fc8a50a20cb6bee4673cd
     # via requirements.txt
 """).requirements)
 
     # Options
-    asserts.equals(env, ["--pre"], parse("--pre\n").options)
-    asserts.equals(env, ["--find-links", "/my/local/archives"], parse("--find-links /my/local/archives\n").options)
-    asserts.equals(env, ["--pre", "--find-links", "/my/local/archives"], parse("""\
+    asserts.equals(env, ["--pre"], parse_requirements_txt("--pre\n").options)
+    asserts.equals(env, ["--find-links", "/my/local/archives"], parse_requirements_txt("--find-links /my/local/archives\n").options)
+    asserts.equals(env, ["--pre", "--find-links", "/my/local/archives"], parse_requirements_txt("""\
 --pre
 --find-links /my/local/archives
 """).options)
-    asserts.equals(env, ["--pre", "--find-links", "/my/local/archives"], parse("""\
+    asserts.equals(env, ["--pre", "--find-links", "/my/local/archives"], parse_requirements_txt("""\
 --pre # Comment
 --find-links /my/local/archives
 """).options)
-    asserts.equals(env, struct(requirements = [("FooProject", "FooProject==1.0.0")], options = ["--pre", "--find-links", "/my/local/archives"]), parse("""\
+    asserts.equals(env, struct(requirements = [("FooProject", "FooProject==1.0.0")], options = ["--pre", "--find-links", "/my/local/archives"]), parse_requirements_txt("""\
 --pre # Comment
 FooProject==1.0.0
 --find-links /my/local/archives
@@ -116,7 +116,7 @@
         ("urllib3", "urllib3==1.26.7     --hash=sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece     --hash=sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"),
         ("yamllint", "yamllint==1.26.3     --hash=sha256:3934dcde484374596d6b52d8db412929a169f6d9e52e20f9ade5bf3523d9b96e"),
         ("setuptools", "setuptools==59.6.0     --hash=sha256:22c7348c6d2976a52632c67f7ab0cdf40147db7789f9aed18734643fe9cf3373     --hash=sha256:4ce92f1e1f8f01233ee9952c04f6b81d1e02939d6e1b488428154974a4d0783e"),
-    ], parse("""\
+    ], parse_requirements_txt("""\
 #
 # This file is autogenerated by pip-compile with python 3.9
 # To update, run:
@@ -220,5 +220,5 @@
     attrs = {},
 )
 
-def parse_tests(name):
+def parse_requirements_txt_test_suite(name):
     unittest.suite(name, parse_basic_test, parse_requirements_lockfile_test)
diff --git a/tests/pypi/parse_simpleapi_html/BUILD.bazel b/tests/pypi/parse_simpleapi_html/BUILD.bazel
new file mode 100644
index 0000000..e63ef0d
--- /dev/null
+++ b/tests/pypi/parse_simpleapi_html/BUILD.bazel
@@ -0,0 +1,3 @@
+load(":parse_simpleapi_html_tests.bzl", "parse_simpleapi_html_test_suite")
+
+parse_simpleapi_html_test_suite(name = "parse_simpleapi_html_tests")
diff --git a/tests/pypi/parse_simpleapi_html/parse_simpleapi_html_tests.bzl b/tests/pypi/parse_simpleapi_html/parse_simpleapi_html_tests.bzl
new file mode 100644
index 0000000..aa735b8
--- /dev/null
+++ b/tests/pypi/parse_simpleapi_html/parse_simpleapi_html_tests.bzl
@@ -0,0 +1,294 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("@rules_testing//lib:truth.bzl", "subjects")
+load("//python/private/pypi:parse_simpleapi_html.bzl", "parse_simpleapi_html")  # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def _generate_html(*items):
+    return """\
+<html>
+  <head>
+    <meta name="pypi:repository-version" content="1.1">
+    <title>Links for foo</title>
+  </head>
+  <body>
+    <h1>Links for cengal</h1>
+{}
+</body>
+</html>
+""".format(
+        "\n".join([
+            "<a {}>{}</a><br />".format(
+                " ".join(item.attrs),
+                item.filename,
+            )
+            for item in items
+        ]),
+    )
+
+def _test_sdist(env):
+    # buildifier: disable=unsorted-dict-items
+    tests = [
+        (
+            struct(
+                attrs = [
+                    'href="https://example.org/full-url/foo-0.0.1.tar.gz#sha256=deadbeefasource"',
+                    'data-requires-python="&gt;=3.7"',
+                ],
+                filename = "foo-0.0.1.tar.gz",
+                url = "ignored",
+            ),
+            struct(
+                filename = "foo-0.0.1.tar.gz",
+                sha256 = "deadbeefasource",
+                url = "https://example.org/full-url/foo-0.0.1.tar.gz",
+                yanked = False,
+            ),
+        ),
+        (
+            struct(
+                attrs = [
+                    'href="https://example.org/full-url/foo-0.0.1.tar.gz#sha256=deadbeefasource"',
+                    'data-requires-python=">=3.7"',
+                ],
+                filename = "foo-0.0.1.tar.gz",
+                url = "ignored",
+            ),
+            struct(
+                filename = "foo-0.0.1.tar.gz",
+                sha256 = "deadbeefasource",
+                url = "https://example.org/full-url/foo-0.0.1.tar.gz",
+                yanked = False,
+            ),
+        ),
+    ]
+
+    for (input, want) in tests:
+        html = _generate_html(input)
+        got = parse_simpleapi_html(url = input.url, content = html)
+        env.expect.that_collection(got.sdists).has_size(1)
+        env.expect.that_collection(got.whls).has_size(0)
+        if not got:
+            fail("expected at least one element, but did not get anything from:\n{}".format(html))
+
+        actual = env.expect.that_struct(
+            got.sdists[want.sha256],
+            attrs = dict(
+                filename = subjects.str,
+                sha256 = subjects.str,
+                url = subjects.str,
+                yanked = subjects.bool,
+            ),
+        )
+        actual.filename().equals(want.filename)
+        actual.sha256().equals(want.sha256)
+        actual.url().equals(want.url)
+        actual.yanked().equals(want.yanked)
+
+_tests.append(_test_sdist)
+
+def _test_whls(env):
+    # buildifier: disable=unsorted-dict-items
+    tests = [
+        (
+            struct(
+                attrs = [
+                    'href="https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=deadbeef"',
+                    'data-requires-python="&gt;=3.7"',
+                    'data-dist-info-metadata="sha256=deadb00f"',
+                    'data-core-metadata="sha256=deadb00f"',
+                ],
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                url = "ignored",
+            ),
+            struct(
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                metadata_sha256 = "deadb00f",
+                metadata_url = "https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata",
+                sha256 = "deadbeef",
+                url = "https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                yanked = False,
+            ),
+        ),
+        (
+            struct(
+                attrs = [
+                    'href="https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=deadbeef"',
+                    'data-requires-python=">=3.7"',
+                    'data-dist-info-metadata="sha256=deadb00f"',
+                    'data-core-metadata="sha256=deadb00f"',
+                ],
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                url = "ignored",
+            ),
+            struct(
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                metadata_sha256 = "deadb00f",
+                metadata_url = "https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata",
+                sha256 = "deadbeef",
+                url = "https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                yanked = False,
+            ),
+        ),
+        (
+            struct(
+                attrs = [
+                    'href="https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=deadbeef"',
+                    'data-requires-python="&gt;=3.7"',
+                    'data-core-metadata="sha256=deadb00f"',
+                ],
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                url = "ignored",
+            ),
+            struct(
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                metadata_sha256 = "deadb00f",
+                metadata_url = "https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata",
+                sha256 = "deadbeef",
+                url = "https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                yanked = False,
+            ),
+        ),
+        (
+            struct(
+                attrs = [
+                    'href="https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=deadbeef"',
+                    'data-requires-python="&gt;=3.7"',
+                    'data-dist-info-metadata="sha256=deadb00f"',
+                ],
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                url = "ignored",
+            ),
+            struct(
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                metadata_sha256 = "deadb00f",
+                metadata_url = "https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata",
+                sha256 = "deadbeef",
+                url = "https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                yanked = False,
+            ),
+        ),
+        (
+            struct(
+                attrs = [
+                    'href="https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=deadbeef"',
+                    'data-requires-python="&gt;=3.7"',
+                ],
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                url = "ignored",
+            ),
+            struct(
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                metadata_sha256 = "",
+                metadata_url = "",
+                sha256 = "deadbeef",
+                url = "https://example.org/full-url/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                yanked = False,
+            ),
+        ),
+        (
+            struct(
+                attrs = [
+                    'href="../../foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl#sha256=deadbeef"',
+                    'data-requires-python="&gt;=3.7"',
+                    'data-dist-info-metadata="sha256=deadb00f"',
+                ],
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                url = "https://example.org/python-wheels/bar/foo/",
+            ),
+            struct(
+                filename = "foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                metadata_sha256 = "deadb00f",
+                metadata_url = "https://example.org/python-wheels/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata",
+                sha256 = "deadbeef",
+                url = "https://example.org/python-wheels/foo-0.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+                yanked = False,
+            ),
+        ),
+        (
+            struct(
+                attrs = [
+                    'href="/whl/torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl#sha256=deadbeef"',
+                ],
+                filename = "torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl",
+                url = "https://download.pytorch.org/whl/cpu/torch",
+            ),
+            struct(
+                filename = "torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl",
+                metadata_sha256 = "",
+                metadata_url = "",
+                sha256 = "deadbeef",
+                url = "https://download.pytorch.org/whl/torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl",
+                yanked = False,
+            ),
+        ),
+        (
+            struct(
+                attrs = [
+                    'href="/whl/torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl#sha256=notdeadbeef"',
+                ],
+                filename = "torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl",
+                url = "http://download.pytorch.org/whl/cpu/torch",
+            ),
+            struct(
+                filename = "torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl",
+                metadata_sha256 = "",
+                metadata_url = "",
+                sha256 = "notdeadbeef",
+                url = "http://download.pytorch.org/whl/torch-2.0.0-cp38-cp38-manylinux2014_aarch64.whl",
+                yanked = False,
+            ),
+        ),
+    ]
+
+    for (input, want) in tests:
+        html = _generate_html(input)
+        got = parse_simpleapi_html(url = input.url, content = html)
+        env.expect.that_collection(got.sdists).has_size(0)
+        env.expect.that_collection(got.whls).has_size(1)
+        if not got:
+            fail("expected at least one element, but did not get anything from:\n{}".format(html))
+
+        actual = env.expect.that_struct(
+            got.whls[want.sha256],
+            attrs = dict(
+                filename = subjects.str,
+                metadata_sha256 = subjects.str,
+                metadata_url = subjects.str,
+                sha256 = subjects.str,
+                url = subjects.str,
+                yanked = subjects.bool,
+            ),
+        )
+        actual.filename().equals(want.filename)
+        actual.metadata_sha256().equals(want.metadata_sha256)
+        actual.metadata_url().equals(want.metadata_url)
+        actual.sha256().equals(want.sha256)
+        actual.url().equals(want.url)
+        actual.yanked().equals(want.yanked)
+
+_tests.append(_test_whls)
+
+def parse_simpleapi_html_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/private/parse_whl_name/BUILD.bazel b/tests/pypi/parse_whl_name/BUILD.bazel
similarity index 100%
rename from tests/private/parse_whl_name/BUILD.bazel
rename to tests/pypi/parse_whl_name/BUILD.bazel
diff --git a/tests/private/parse_whl_name/parse_whl_name_tests.bzl b/tests/pypi/parse_whl_name/parse_whl_name_tests.bzl
similarity index 96%
rename from tests/private/parse_whl_name/parse_whl_name_tests.bzl
rename to tests/pypi/parse_whl_name/parse_whl_name_tests.bzl
index c249f9f..4a88a6e 100644
--- a/tests/private/parse_whl_name/parse_whl_name_tests.bzl
+++ b/tests/pypi/parse_whl_name/parse_whl_name_tests.bzl
@@ -15,7 +15,7 @@
 ""
 
 load("@rules_testing//lib:test_suite.bzl", "test_suite")
-load("//python/private:parse_whl_name.bzl", "parse_whl_name")  # buildifier: disable=bzl-visibility
+load("//python/private/pypi:parse_whl_name.bzl", "parse_whl_name")  # buildifier: disable=bzl-visibility
 
 _tests = []
 
diff --git a/tests/pip_hub_repository/render_pkg_aliases/BUILD.bazel b/tests/pypi/render_pkg_aliases/BUILD.bazel
similarity index 100%
rename from tests/pip_hub_repository/render_pkg_aliases/BUILD.bazel
rename to tests/pypi/render_pkg_aliases/BUILD.bazel
diff --git a/tests/pypi/render_pkg_aliases/render_pkg_aliases_test.bzl b/tests/pypi/render_pkg_aliases/render_pkg_aliases_test.bzl
new file mode 100644
index 0000000..09a0631
--- /dev/null
+++ b/tests/pypi/render_pkg_aliases/render_pkg_aliases_test.bzl
@@ -0,0 +1,951 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""render_pkg_aliases tests"""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")  # buildifier: disable=bzl-visibility
+load("//python/private/pypi:config_settings.bzl", "config_settings")  # buildifier: disable=bzl-visibility
+load(
+    "//python/private/pypi:render_pkg_aliases.bzl",
+    "get_filename_config_settings",
+    "get_whl_flag_versions",
+    "multiplatform_whl_aliases",
+    "render_multiplatform_pkg_aliases",
+    "render_pkg_aliases",
+    "whl_alias",
+)  # buildifier: disable=bzl-visibility
+
+def _normalize_label_strings(want):
+    """normalize expected strings.
+
+    This function ensures that the desired `render_pkg_aliases` outputs are
+    normalized from `bzlmod` to `WORKSPACE` values so that we don't have to
+    have to sets of expected strings. The main difference is that under
+    `bzlmod` the `str(Label("//my_label"))` results in `"@@//my_label"` whereas
+    under `non-bzlmod` we have `"@//my_label"`. This function does
+    `string.replace("@@", "@")` to normalize the strings.
+
+    NOTE, in tests, we should only use keep `@@` usage in expectation values
+    for the test cases where the whl_alias has the `config_setting` constructed
+    from a `Label` instance.
+    """
+    if "@@" not in want:
+        fail("The expected string does not have '@@' labels, consider not using the function")
+
+    if BZLMOD_ENABLED:
+        # our expectations are already with double @
+        return want
+
+    return want.replace("@@", "@")
+
+_tests = []
+
+def _test_empty(env):
+    actual = render_pkg_aliases(
+        aliases = None,
+    )
+
+    want = {}
+
+    env.expect.that_dict(actual).contains_exactly(want)
+
+_tests.append(_test_empty)
+
+def _test_legacy_aliases(env):
+    actual = render_pkg_aliases(
+        aliases = {
+            "foo": [
+                whl_alias(repo = "pypi_foo"),
+            ],
+        },
+    )
+
+    want_key = "foo/BUILD.bazel"
+    want_content = """\
+load("@bazel_skylib//lib:selects.bzl", "selects")
+
+package(default_visibility = ["//visibility:public"])
+
+alias(
+    name = "foo",
+    actual = ":pkg",
+)
+
+alias(
+    name = "pkg",
+    actual = "@pypi_foo//:pkg",
+)
+
+alias(
+    name = "whl",
+    actual = "@pypi_foo//:whl",
+)
+
+alias(
+    name = "data",
+    actual = "@pypi_foo//:data",
+)
+
+alias(
+    name = "dist_info",
+    actual = "@pypi_foo//:dist_info",
+)"""
+
+    env.expect.that_dict(actual).contains_exactly({want_key: want_content})
+
+_tests.append(_test_legacy_aliases)
+
+def _test_bzlmod_aliases(env):
+    # Use this function as it is used in pip_repository
+    actual = render_multiplatform_pkg_aliases(
+        default_config_setting = "//:my_config_setting",
+        aliases = {
+            "bar-baz": [
+                whl_alias(version = "3.2", repo = "pypi_32_bar_baz", config_setting = "//:my_config_setting"),
+            ],
+        },
+    )
+
+    want_key = "bar_baz/BUILD.bazel"
+    want_content = """\
+load("@bazel_skylib//lib:selects.bzl", "selects")
+
+package(default_visibility = ["//visibility:public"])
+
+alias(
+    name = "bar_baz",
+    actual = ":pkg",
+)
+
+alias(
+    name = "pkg",
+    actual = selects.with_or(
+        {
+            (
+                "//:my_config_setting",
+                "//conditions:default",
+            ): "@pypi_32_bar_baz//:pkg",
+        },
+    ),
+)
+
+alias(
+    name = "whl",
+    actual = selects.with_or(
+        {
+            (
+                "//:my_config_setting",
+                "//conditions:default",
+            ): "@pypi_32_bar_baz//:whl",
+        },
+    ),
+)
+
+alias(
+    name = "data",
+    actual = selects.with_or(
+        {
+            (
+                "//:my_config_setting",
+                "//conditions:default",
+            ): "@pypi_32_bar_baz//:data",
+        },
+    ),
+)
+
+alias(
+    name = "dist_info",
+    actual = selects.with_or(
+        {
+            (
+                "//:my_config_setting",
+                "//conditions:default",
+            ): "@pypi_32_bar_baz//:dist_info",
+        },
+    ),
+)"""
+
+    env.expect.that_str(actual.pop("_config/BUILD.bazel")).equals(
+        """\
+load("@rules_python//python/private/pypi:config_settings.bzl", "config_settings")
+
+config_settings(
+    name = "config_settings",
+    glibc_versions = [],
+    muslc_versions = [],
+    osx_versions = [],
+    python_versions = ["3.2"],
+    target_platforms = [],
+    visibility = ["//:__subpackages__"],
+)""",
+    )
+    env.expect.that_collection(actual.keys()).contains_exactly([want_key])
+    env.expect.that_str(actual[want_key]).equals(want_content)
+
+_tests.append(_test_bzlmod_aliases)
+
+def _test_bzlmod_aliases_with_no_default_version(env):
+    actual = render_multiplatform_pkg_aliases(
+        default_config_setting = None,
+        aliases = {
+            "bar-baz": [
+                whl_alias(
+                    version = "3.2",
+                    repo = "pypi_32_bar_baz",
+                    # pass the label to ensure that it gets converted to string
+                    config_setting = Label("//python/config_settings:is_python_3.2"),
+                ),
+                whl_alias(version = "3.1", repo = "pypi_31_bar_baz"),
+            ],
+        },
+    )
+
+    want_key = "bar_baz/BUILD.bazel"
+    want_content = """\
+load("@bazel_skylib//lib:selects.bzl", "selects")
+
+package(default_visibility = ["//visibility:public"])
+
+_NO_MATCH_ERROR = \"\"\"\\
+No matching wheel for current configuration's Python version.
+
+The current build configuration's Python version doesn't match any of the Python
+wheels available for this wheel. This wheel supports the following Python
+configuration settings:
+    //_config:is_python_3.1
+    @@//python/config_settings:is_python_3.2
+
+To determine the current configuration's Python version, run:
+    `bazel config <config id>` (shown further below)
+and look for
+    rules_python//python/config_settings:python_version
+
+If the value is missing, then the "default" Python version is being used,
+which has a "null" version value and will not match version constraints.
+\"\"\"
+
+alias(
+    name = "bar_baz",
+    actual = ":pkg",
+)
+
+alias(
+    name = "pkg",
+    actual = selects.with_or(
+        {
+            "//_config:is_python_3.1": "@pypi_31_bar_baz//:pkg",
+            "@@//python/config_settings:is_python_3.2": "@pypi_32_bar_baz//:pkg",
+        },
+        no_match_error = _NO_MATCH_ERROR,
+    ),
+)
+
+alias(
+    name = "whl",
+    actual = selects.with_or(
+        {
+            "//_config:is_python_3.1": "@pypi_31_bar_baz//:whl",
+            "@@//python/config_settings:is_python_3.2": "@pypi_32_bar_baz//:whl",
+        },
+        no_match_error = _NO_MATCH_ERROR,
+    ),
+)
+
+alias(
+    name = "data",
+    actual = selects.with_or(
+        {
+            "//_config:is_python_3.1": "@pypi_31_bar_baz//:data",
+            "@@//python/config_settings:is_python_3.2": "@pypi_32_bar_baz//:data",
+        },
+        no_match_error = _NO_MATCH_ERROR,
+    ),
+)
+
+alias(
+    name = "dist_info",
+    actual = selects.with_or(
+        {
+            "//_config:is_python_3.1": "@pypi_31_bar_baz//:dist_info",
+            "@@//python/config_settings:is_python_3.2": "@pypi_32_bar_baz//:dist_info",
+        },
+        no_match_error = _NO_MATCH_ERROR,
+    ),
+)"""
+
+    actual.pop("_config/BUILD.bazel")
+    env.expect.that_collection(actual.keys()).contains_exactly([want_key])
+    env.expect.that_str(actual[want_key]).equals(_normalize_label_strings(want_content))
+
+_tests.append(_test_bzlmod_aliases_with_no_default_version)
+
+def _test_bzlmod_aliases_for_non_root_modules(env):
+    actual = render_pkg_aliases(
+        # NOTE @aignas 2024-01-17: if the default X.Y version coincides with the
+        # versions that are used in the root module, then this would be the same as
+        # as _test_bzlmod_aliases.
+        #
+        # However, if the root module uses a different default version than the
+        # non-root module, then we will have a no-match-error because the
+        # default_config_setting is not in the list of the versions in the
+        # whl_map.
+        default_config_setting = "//_config:is_python_3.3",
+        aliases = {
+            "bar-baz": [
+                whl_alias(version = "3.2", repo = "pypi_32_bar_baz"),
+                whl_alias(version = "3.1", repo = "pypi_31_bar_baz"),
+            ],
+        },
+    )
+
+    want_key = "bar_baz/BUILD.bazel"
+    want_content = """\
+load("@bazel_skylib//lib:selects.bzl", "selects")
+
+package(default_visibility = ["//visibility:public"])
+
+_NO_MATCH_ERROR = \"\"\"\\
+No matching wheel for current configuration's Python version.
+
+The current build configuration's Python version doesn't match any of the Python
+wheels available for this wheel. This wheel supports the following Python
+configuration settings:
+    //_config:is_python_3.1
+    //_config:is_python_3.2
+
+To determine the current configuration's Python version, run:
+    `bazel config <config id>` (shown further below)
+and look for
+    rules_python//python/config_settings:python_version
+
+If the value is missing, then the "default" Python version is being used,
+which has a "null" version value and will not match version constraints.
+\"\"\"
+
+alias(
+    name = "bar_baz",
+    actual = ":pkg",
+)
+
+alias(
+    name = "pkg",
+    actual = selects.with_or(
+        {
+            "//_config:is_python_3.1": "@pypi_31_bar_baz//:pkg",
+            "//_config:is_python_3.2": "@pypi_32_bar_baz//:pkg",
+        },
+        no_match_error = _NO_MATCH_ERROR,
+    ),
+)
+
+alias(
+    name = "whl",
+    actual = selects.with_or(
+        {
+            "//_config:is_python_3.1": "@pypi_31_bar_baz//:whl",
+            "//_config:is_python_3.2": "@pypi_32_bar_baz//:whl",
+        },
+        no_match_error = _NO_MATCH_ERROR,
+    ),
+)
+
+alias(
+    name = "data",
+    actual = selects.with_or(
+        {
+            "//_config:is_python_3.1": "@pypi_31_bar_baz//:data",
+            "//_config:is_python_3.2": "@pypi_32_bar_baz//:data",
+        },
+        no_match_error = _NO_MATCH_ERROR,
+    ),
+)
+
+alias(
+    name = "dist_info",
+    actual = selects.with_or(
+        {
+            "//_config:is_python_3.1": "@pypi_31_bar_baz//:dist_info",
+            "//_config:is_python_3.2": "@pypi_32_bar_baz//:dist_info",
+        },
+        no_match_error = _NO_MATCH_ERROR,
+    ),
+)"""
+
+    env.expect.that_collection(actual.keys()).contains_exactly([want_key])
+    env.expect.that_str(actual[want_key]).equals(want_content)
+
+_tests.append(_test_bzlmod_aliases_for_non_root_modules)
+
+def _test_aliases_are_created_for_all_wheels(env):
+    actual = render_pkg_aliases(
+        default_config_setting = "//_config:is_python_3.2",
+        aliases = {
+            "bar": [
+                whl_alias(version = "3.1", repo = "pypi_31_bar"),
+                whl_alias(version = "3.2", repo = "pypi_32_bar"),
+            ],
+            "foo": [
+                whl_alias(version = "3.1", repo = "pypi_32_foo"),
+                whl_alias(version = "3.2", repo = "pypi_31_foo"),
+            ],
+        },
+    )
+
+    want_files = [
+        "bar/BUILD.bazel",
+        "foo/BUILD.bazel",
+    ]
+
+    env.expect.that_dict(actual).keys().contains_exactly(want_files)
+
+_tests.append(_test_aliases_are_created_for_all_wheels)
+
+def _test_aliases_with_groups(env):
+    actual = render_pkg_aliases(
+        default_config_setting = "//_config:is_python_3.2",
+        aliases = {
+            "bar": [
+                whl_alias(version = "3.1", repo = "pypi_31_bar"),
+                whl_alias(version = "3.2", repo = "pypi_32_bar"),
+            ],
+            "baz": [
+                whl_alias(version = "3.1", repo = "pypi_31_baz"),
+                whl_alias(version = "3.2", repo = "pypi_32_baz"),
+            ],
+            "foo": [
+                whl_alias(version = "3.1", repo = "pypi_32_foo"),
+                whl_alias(version = "3.2", repo = "pypi_31_foo"),
+            ],
+        },
+        requirement_cycles = {
+            "group": ["bar", "baz"],
+        },
+    )
+
+    want_files = [
+        "bar/BUILD.bazel",
+        "foo/BUILD.bazel",
+        "baz/BUILD.bazel",
+        "_groups/BUILD.bazel",
+    ]
+    env.expect.that_dict(actual).keys().contains_exactly(want_files)
+
+    want_key = "_groups/BUILD.bazel"
+
+    # Just check that it contains a private whl
+    env.expect.that_str(actual[want_key]).contains("//bar:_whl")
+
+    want_key = "bar/BUILD.bazel"
+
+    # Just check that it contains a private whl
+    env.expect.that_str(actual[want_key]).contains("name = \"_whl\"")
+    env.expect.that_str(actual[want_key]).contains("name = \"whl\"")
+    env.expect.that_str(actual[want_key]).contains("\"//_groups:group_whl\"")
+
+_tests.append(_test_aliases_with_groups)
+
+def _test_empty_flag_versions(env):
+    got = get_whl_flag_versions(
+        aliases = [],
+    )
+    want = {}
+    env.expect.that_dict(got).contains_exactly(want)
+
+_tests.append(_test_empty_flag_versions)
+
+def _test_get_python_versions(env):
+    got = get_whl_flag_versions(
+        aliases = [
+            whl_alias(repo = "foo", version = "3.3"),
+            whl_alias(repo = "foo", version = "3.2"),
+        ],
+    )
+    want = {
+        "python_versions": ["3.2", "3.3"],
+    }
+    env.expect.that_dict(got).contains_exactly(want)
+
+_tests.append(_test_get_python_versions)
+
+def _test_get_python_versions_from_filenames(env):
+    got = get_whl_flag_versions(
+        aliases = [
+            whl_alias(
+                repo = "foo",
+                version = "3.3",
+                filename = "foo-0.0.0-py3-none-" + plat + ".whl",
+            )
+            for plat in [
+                "linux_x86_64",
+                "manylinux_2_17_x86_64",
+                "manylinux_2_14_aarch64.musllinux_1_1_aarch64",
+                "musllinux_1_0_x86_64",
+                "manylinux2014_x86_64.manylinux_2_17_x86_64",
+                "macosx_11_0_arm64",
+                "macosx_10_9_x86_64",
+                "macosx_10_9_universal2",
+                "windows_x86_64",
+            ]
+        ],
+    )
+    want = {
+        "glibc_versions": [(2, 14), (2, 17)],
+        "muslc_versions": [(1, 0), (1, 1)],
+        "osx_versions": [(10, 9), (11, 0)],
+        "python_versions": ["3.3"],
+        "target_platforms": [
+            "linux_aarch64",
+            "linux_x86_64",
+            "osx_aarch64",
+            "osx_x86_64",
+            "windows_x86_64",
+        ],
+    }
+    env.expect.that_dict(got).contains_exactly(want)
+
+_tests.append(_test_get_python_versions_from_filenames)
+
+def _test_get_flag_versions_from_alias_target_platforms(env):
+    got = get_whl_flag_versions(
+        aliases = [
+            whl_alias(
+                repo = "foo",
+                version = "3.3",
+                filename = "foo-0.0.0-py3-none-" + plat + ".whl",
+            )
+            for plat in [
+                "windows_x86_64",
+            ]
+        ] + [
+            whl_alias(
+                repo = "foo",
+                version = "3.3",
+                filename = "foo-0.0.0-py3-none-any.whl",
+                target_platforms = [
+                    "cp33_linux_x86_64",
+                ],
+            ),
+        ],
+    )
+    want = {
+        "python_versions": ["3.3"],
+        "target_platforms": [
+            "linux_x86_64",
+            "windows_x86_64",
+        ],
+    }
+    env.expect.that_dict(got).contains_exactly(want)
+
+_tests.append(_test_get_flag_versions_from_alias_target_platforms)
+
+def _test_config_settings(
+        env,
+        *,
+        filename,
+        want,
+        want_versions = {},
+        target_platforms = [],
+        glibc_versions = [],
+        muslc_versions = [],
+        osx_versions = [],
+        python_version = "",
+        python_default = True):
+    got, got_default_version_settings = get_filename_config_settings(
+        filename = filename,
+        target_platforms = target_platforms,
+        glibc_versions = glibc_versions,
+        muslc_versions = muslc_versions,
+        osx_versions = osx_versions,
+        python_version = python_version,
+        python_default = python_default,
+    )
+    env.expect.that_collection(got).contains_exactly(want)
+    env.expect.that_dict(got_default_version_settings).contains_exactly(want_versions)
+
+def _test_sdist(env):
+    # Do the first test for multiple extensions
+    for ext in [".tar.gz", ".zip"]:
+        _test_config_settings(
+            env,
+            filename = "foo-0.0.1" + ext,
+            want = [":is_sdist"],
+        )
+
+    ext = ".zip"
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1" + ext,
+        target_platforms = [
+            "linux_aarch64",
+        ],
+        want = [":is_sdist_linux_aarch64"],
+    )
+
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1" + ext,
+        python_version = "3.2",
+        want = [
+            ":is_sdist",
+            ":is_cp3.2_sdist",
+        ],
+    )
+
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1" + ext,
+        python_version = "3.2",
+        python_default = True,
+        target_platforms = [
+            "linux_aarch64",
+            "linux_x86_64",
+        ],
+        want = [
+            ":is_sdist_linux_aarch64",
+            ":is_cp3.2_sdist_linux_aarch64",
+            ":is_sdist_linux_x86_64",
+            ":is_cp3.2_sdist_linux_x86_64",
+        ],
+    )
+
+_tests.append(_test_sdist)
+
+def _test_py2_py3_none_any(env):
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-py2.py3-none-any.whl",
+        want = [":is_py_none_any"],
+    )
+
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-py2.py3-none-any.whl",
+        target_platforms = [
+            "linux_aarch64",
+        ],
+        want = [":is_py_none_any_linux_aarch64"],
+    )
+
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-py2.py3-none-any.whl",
+        python_version = "3.2",
+        python_default = True,
+        want = [
+            ":is_py_none_any",
+            ":is_cp3.2_py_none_any",
+        ],
+    )
+
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-py2.py3-none-any.whl",
+        python_version = "3.2",
+        python_default = False,
+        target_platforms = [
+            "osx_x86_64",
+        ],
+        want = [
+            ":is_cp3.2_py_none_any_osx_x86_64",
+        ],
+    )
+
+_tests.append(_test_py2_py3_none_any)
+
+def _test_py3_none_any(env):
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-py3-none-any.whl",
+        want = [":is_py3_none_any"],
+    )
+
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-py3-none-any.whl",
+        target_platforms = ["linux_x86_64"],
+        want = [":is_py3_none_any_linux_x86_64"],
+    )
+
+_tests.append(_test_py3_none_any)
+
+def _test_py3_none_macosx_10_9_universal2(env):
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-py3-none-macosx_10_9_universal2.whl",
+        osx_versions = [
+            (10, 9),
+            (11, 0),
+        ],
+        want = [],
+        want_versions = {
+            ":is_py3_none_osx_aarch64_universal2": {
+                (10, 9): ":is_py3_none_osx_10_9_aarch64_universal2",
+                (11, 0): ":is_py3_none_osx_11_0_aarch64_universal2",
+            },
+            ":is_py3_none_osx_x86_64_universal2": {
+                (10, 9): ":is_py3_none_osx_10_9_x86_64_universal2",
+                (11, 0): ":is_py3_none_osx_11_0_x86_64_universal2",
+            },
+        },
+    )
+
+_tests.append(_test_py3_none_macosx_10_9_universal2)
+
+def _test_cp37_abi3_linux_x86_64(env):
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-cp37-abi3-linux_x86_64.whl",
+        want = [
+            ":is_cp3x_abi3_linux_x86_64",
+        ],
+    )
+
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-cp37-abi3-linux_x86_64.whl",
+        python_version = "3.2",
+        python_default = True,
+        want = [
+            ":is_cp3x_abi3_linux_x86_64",
+            ":is_cp3.2_cp3x_abi3_linux_x86_64",
+        ],
+    )
+
+_tests.append(_test_cp37_abi3_linux_x86_64)
+
+def _test_cp37_abi3_windows_x86_64(env):
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-cp37-abi3-windows_x86_64.whl",
+        want = [
+            ":is_cp3x_abi3_windows_x86_64",
+        ],
+    )
+
+_tests.append(_test_cp37_abi3_windows_x86_64)
+
+def _test_cp37_abi3_manylinux_2_17_x86_64(env):
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl",
+        glibc_versions = [
+            (2, 16),
+            (2, 17),
+            (2, 18),
+        ],
+        want = [],
+        want_versions = {
+            ":is_cp3x_abi3_manylinux_x86_64": {
+                (2, 17): ":is_cp3x_abi3_manylinux_2_17_x86_64",
+                (2, 18): ":is_cp3x_abi3_manylinux_2_18_x86_64",
+            },
+        },
+    )
+
+_tests.append(_test_cp37_abi3_manylinux_2_17_x86_64)
+
+def _test_cp37_abi3_manylinux_2_17_musllinux_1_1_aarch64(env):
+    # I've seen such a wheel being built for `uv`
+    _test_config_settings(
+        env,
+        filename = "foo-0.0.1-cp37-cp37-manylinux_2_17_arm64.musllinux_1_1_arm64.whl",
+        glibc_versions = [
+            (2, 16),
+            (2, 17),
+            (2, 18),
+        ],
+        muslc_versions = [
+            (1, 1),
+        ],
+        want = [],
+        want_versions = {
+            ":is_cp3x_cp_manylinux_aarch64": {
+                (2, 17): ":is_cp3x_cp_manylinux_2_17_aarch64",
+                (2, 18): ":is_cp3x_cp_manylinux_2_18_aarch64",
+            },
+            ":is_cp3x_cp_musllinux_aarch64": {
+                (1, 1): ":is_cp3x_cp_musllinux_1_1_aarch64",
+            },
+        },
+    )
+
+_tests.append(_test_cp37_abi3_manylinux_2_17_musllinux_1_1_aarch64)
+
+def _test_multiplatform_whl_aliases_empty(env):
+    # Check that we still work with an empty requirements.txt
+    got = multiplatform_whl_aliases(aliases = [], default_version = None)
+    env.expect.that_collection(got).contains_exactly([])
+
+_tests.append(_test_multiplatform_whl_aliases_empty)
+
+def _test_multiplatform_whl_aliases_nofilename(env):
+    aliases = [
+        whl_alias(
+            repo = "foo",
+            config_setting = "//:label",
+            version = "3.1",
+        ),
+    ]
+    got = multiplatform_whl_aliases(aliases = aliases, default_version = None)
+    env.expect.that_collection(got).contains_exactly(aliases)
+
+_tests.append(_test_multiplatform_whl_aliases_nofilename)
+
+def _test_multiplatform_whl_aliases_filename(env):
+    aliases = [
+        whl_alias(
+            repo = "foo-py3-0.0.3",
+            filename = "foo-0.0.3-py3-none-any.whl",
+            version = "3.2",
+        ),
+        whl_alias(
+            repo = "foo-py3-0.0.1",
+            filename = "foo-0.0.1-py3-none-any.whl",
+            version = "3.1",
+        ),
+        whl_alias(
+            repo = "foo-0.0.2",
+            filename = "foo-0.0.2-py3-none-any.whl",
+            version = "3.1",
+            target_platforms = [
+                "cp31_linux_x86_64",
+                "cp31_linux_aarch64",
+            ],
+        ),
+    ]
+    got = multiplatform_whl_aliases(
+        aliases = aliases,
+        default_version = "3.1",
+        glibc_versions = [],
+        muslc_versions = [],
+        osx_versions = [],
+    )
+    want = [
+        whl_alias(config_setting = "//_config:is_cp3.1_py3_none_any", repo = "foo-py3-0.0.1", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_cp3.1_py3_none_any_linux_aarch64", repo = "foo-0.0.2", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_cp3.1_py3_none_any_linux_x86_64", repo = "foo-0.0.2", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_cp3.2_py3_none_any", repo = "foo-py3-0.0.3", version = "3.2"),
+        whl_alias(config_setting = "//_config:is_py3_none_any", repo = "foo-py3-0.0.1", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_py3_none_any_linux_aarch64", repo = "foo-0.0.2", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_py3_none_any_linux_x86_64", repo = "foo-0.0.2", version = "3.1"),
+    ]
+    env.expect.that_collection(got).contains_exactly(want)
+
+_tests.append(_test_multiplatform_whl_aliases_filename)
+
+def _test_multiplatform_whl_aliases_filename_versioned(env):
+    aliases = [
+        whl_alias(
+            repo = "glibc-2.17",
+            filename = "foo-0.0.1-py3-none-manylinux_2_17_x86_64.whl",
+            version = "3.1",
+        ),
+        whl_alias(
+            repo = "glibc-2.18",
+            filename = "foo-0.0.1-py3-none-manylinux_2_18_x86_64.whl",
+            version = "3.1",
+        ),
+        whl_alias(
+            repo = "musl",
+            filename = "foo-0.0.1-py3-none-musllinux_1_1_x86_64.whl",
+            version = "3.1",
+        ),
+    ]
+    got = multiplatform_whl_aliases(
+        aliases = aliases,
+        default_version = None,
+        glibc_versions = [(2, 17), (2, 18)],
+        muslc_versions = [(1, 1), (1, 2)],
+        osx_versions = [],
+    )
+    want = [
+        whl_alias(config_setting = "//_config:is_cp3.1_py3_none_manylinux_2_17_x86_64", repo = "glibc-2.17", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_cp3.1_py3_none_manylinux_2_18_x86_64", repo = "glibc-2.18", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_cp3.1_py3_none_manylinux_x86_64", repo = "glibc-2.17", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_cp3.1_py3_none_musllinux_1_1_x86_64", repo = "musl", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_cp3.1_py3_none_musllinux_1_2_x86_64", repo = "musl", version = "3.1"),
+        whl_alias(config_setting = "//_config:is_cp3.1_py3_none_musllinux_x86_64", repo = "musl", version = "3.1"),
+    ]
+    env.expect.that_collection(got).contains_exactly(want)
+
+_tests.append(_test_multiplatform_whl_aliases_filename_versioned)
+
+def _test_config_settings_exist(env):
+    for py_tag in ["py2.py3", "py3", "py311", "cp311"]:
+        if py_tag == "py2.py3":
+            abis = ["none"]
+        elif py_tag.startswith("py"):
+            abis = ["none", "abi3"]
+        else:
+            abis = ["none", "abi3", "cp311"]
+
+        for abi_tag in abis:
+            for platform_tag, kwargs in {
+                "any": {},
+                "macosx_11_0_arm64": {
+                    "osx_versions": [(11, 0)],
+                    "target_platforms": ["osx_aarch64"],
+                },
+                "manylinux_2_17_x86_64": {
+                    "glibc_versions": [(2, 17), (2, 18)],
+                    "target_platforms": ["linux_x86_64"],
+                },
+                "manylinux_2_18_x86_64": {
+                    "glibc_versions": [(2, 17), (2, 18)],
+                    "target_platforms": ["linux_x86_64"],
+                },
+                "musllinux_1_1_aarch64": {
+                    "muslc_versions": [(1, 2), (1, 1), (1, 0)],
+                    "target_platforms": ["linux_aarch64"],
+                },
+            }.items():
+                aliases = [
+                    whl_alias(
+                        repo = "repo",
+                        filename = "foo-0.0.1-{}-{}-{}.whl".format(py_tag, abi_tag, platform_tag),
+                        version = "3.11",
+                    ),
+                ]
+                available_config_settings = []
+                mock_rule = lambda name, **kwargs: available_config_settings.append(name)
+                config_settings(
+                    python_versions = ["3.11"],
+                    native = struct(
+                        alias = mock_rule,
+                        config_setting = mock_rule,
+                    ),
+                    **kwargs
+                )
+
+                got_aliases = multiplatform_whl_aliases(
+                    aliases = aliases,
+                    default_version = None,
+                    glibc_versions = kwargs.get("glibc_versions", []),
+                    muslc_versions = kwargs.get("muslc_versions", []),
+                    osx_versions = kwargs.get("osx_versions", []),
+                )
+                got = [a.config_setting.partition(":")[-1] for a in got_aliases]
+
+                env.expect.that_collection(available_config_settings).contains_at_least(got)
+
+_tests.append(_test_config_settings_exist)
+
+def render_pkg_aliases_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/pypi/requirements_files_by_platform/BUILD.bazel b/tests/pypi/requirements_files_by_platform/BUILD.bazel
new file mode 100644
index 0000000..d78d459
--- /dev/null
+++ b/tests/pypi/requirements_files_by_platform/BUILD.bazel
@@ -0,0 +1,3 @@
+load(":requirements_files_by_platform_tests.bzl", "requirements_files_by_platform_test_suite")
+
+requirements_files_by_platform_test_suite(name = "requirements_files_by_platform_tests")
diff --git a/tests/pypi/requirements_files_by_platform/requirements_files_by_platform_tests.bzl b/tests/pypi/requirements_files_by_platform/requirements_files_by_platform_tests.bzl
new file mode 100644
index 0000000..b729b0e
--- /dev/null
+++ b/tests/pypi/requirements_files_by_platform/requirements_files_by_platform_tests.bzl
@@ -0,0 +1,205 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private/pypi:requirements_files_by_platform.bzl", "requirements_files_by_platform")  # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def _test_fail_no_requirements(env):
+    errors = []
+    requirements_files_by_platform(
+        fail_fn = errors.append,
+    )
+    env.expect.that_str(errors[0]).equals("""\
+A 'requirements_lock' attribute must be specified, a platform-specific lockfiles via 'requirements_by_platform' or an os-specific lockfiles must be specified via 'requirements_*' attributes""")
+
+_tests.append(_test_fail_no_requirements)
+
+def _test_fail_duplicate_platforms(env):
+    errors = []
+    requirements_files_by_platform(
+        requirements_by_platform = {
+            "requirements_linux": "linux_x86_64",
+            "requirements_lock": "*",
+        },
+        fail_fn = errors.append,
+    )
+    env.expect.that_collection(errors).has_size(1)
+    env.expect.that_str(",".join(errors)).equals("Expected the platform 'linux_x86_64' to be map only to a single requirements file, but got multiple: 'requirements_linux', 'requirements_lock'")
+
+_tests.append(_test_fail_duplicate_platforms)
+
+def _test_fail_download_only_bad_attr(env):
+    errors = []
+    requirements_files_by_platform(
+        requirements_linux = "requirements_linux",
+        requirements_osx = "requirements_osx",
+        extra_pip_args = [
+            "--platform",
+            "manylinux_2_27_x86_64",
+            "--platform=manylinux_2_12_x86_64",
+            "--platform manylinux_2_5_x86_64",
+        ],
+        fail_fn = errors.append,
+    )
+    env.expect.that_str(errors[0]).equals("only a single 'requirements_lock' file can be used when using '--platform' pip argument, consider specifying it via 'requirements_lock' attribute")
+
+_tests.append(_test_fail_download_only_bad_attr)
+
+def _test_simple(env):
+    for got in [
+        requirements_files_by_platform(
+            requirements_lock = "requirements_lock",
+        ),
+        requirements_files_by_platform(
+            requirements_by_platform = {
+                "requirements_lock": "*",
+            },
+        ),
+    ]:
+        env.expect.that_dict(got).contains_exactly({
+            "requirements_lock": [
+                "linux_aarch64",
+                "linux_arm",
+                "linux_ppc",
+                "linux_s390x",
+                "linux_x86_64",
+                "osx_aarch64",
+                "osx_x86_64",
+                "windows_x86_64",
+            ],
+        })
+
+_tests.append(_test_simple)
+
+def _test_simple_with_python_version(env):
+    for got in [
+        requirements_files_by_platform(
+            requirements_lock = "requirements_lock",
+            python_version = "3.11",
+        ),
+        requirements_files_by_platform(
+            requirements_by_platform = {
+                "requirements_lock": "*",
+            },
+            python_version = "3.11",
+        ),
+        # TODO @aignas 2024-07-15: consider supporting this way of specifying
+        # the requirements without the need of the `python_version` attribute
+        # setting. However, this might need more tweaks, hence only leaving a
+        # comment in the test.
+        # requirements_files_by_platform(
+        #     requirements_by_platform = {
+        #         "requirements_lock": "cp311_*",
+        #     },
+        # ),
+    ]:
+        env.expect.that_dict(got).contains_exactly({
+            "requirements_lock": [
+                "cp311_linux_aarch64",
+                "cp311_linux_arm",
+                "cp311_linux_ppc",
+                "cp311_linux_s390x",
+                "cp311_linux_x86_64",
+                "cp311_osx_aarch64",
+                "cp311_osx_x86_64",
+                "cp311_windows_x86_64",
+            ],
+        })
+
+_tests.append(_test_simple_with_python_version)
+
+def _test_multi_os(env):
+    for got in [
+        requirements_files_by_platform(
+            requirements_linux = "requirements_linux",
+            requirements_osx = "requirements_osx",
+            requirements_windows = "requirements_windows",
+        ),
+        requirements_files_by_platform(
+            requirements_by_platform = {
+                "requirements_linux": "linux_*",
+                "requirements_osx": "osx_*",
+                "requirements_windows": "windows_*",
+            },
+        ),
+    ]:
+        env.expect.that_dict(got).contains_exactly({
+            "requirements_linux": [
+                "linux_aarch64",
+                "linux_arm",
+                "linux_ppc",
+                "linux_s390x",
+                "linux_x86_64",
+            ],
+            "requirements_osx": [
+                "osx_aarch64",
+                "osx_x86_64",
+            ],
+            "requirements_windows": [
+                "windows_x86_64",
+            ],
+        })
+
+_tests.append(_test_multi_os)
+
+def _test_multi_os_download_only_platform(env):
+    got = requirements_files_by_platform(
+        requirements_lock = "requirements_linux",
+        extra_pip_args = [
+            "--platform",
+            "manylinux_2_27_x86_64",
+            "--platform=manylinux_2_12_x86_64",
+            "--platform manylinux_2_5_x86_64",
+        ],
+    )
+    env.expect.that_dict(got).contains_exactly({
+        "requirements_linux": ["linux_x86_64"],
+    })
+
+_tests.append(_test_multi_os_download_only_platform)
+
+def _test_os_arch_requirements_with_default(env):
+    got = requirements_files_by_platform(
+        requirements_by_platform = {
+            "requirements_exotic": "linux_super_exotic",
+            "requirements_linux": "linux_x86_64,linux_aarch64",
+        },
+        requirements_lock = "requirements_lock",
+    )
+    env.expect.that_dict(got).contains_exactly({
+        "requirements_exotic": ["linux_super_exotic"],
+        "requirements_linux": ["linux_x86_64", "linux_aarch64"],
+        "requirements_lock": [
+            "linux_arm",
+            "linux_ppc",
+            "linux_s390x",
+            "osx_aarch64",
+            "osx_x86_64",
+            "windows_x86_64",
+        ],
+    })
+
+_tests.append(_test_os_arch_requirements_with_default)
+
+def requirements_files_by_platform_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/pypi/whl_installer/BUILD.bazel b/tests/pypi/whl_installer/BUILD.bazel
new file mode 100644
index 0000000..e25c4a0
--- /dev/null
+++ b/tests/pypi/whl_installer/BUILD.bazel
@@ -0,0 +1,64 @@
+load("//python:defs.bzl", "py_test")
+
+alias(
+    name = "lib",
+    actual = "//python/private/pypi/whl_installer:lib",
+)
+
+py_test(
+    name = "arguments_test",
+    size = "small",
+    srcs = [
+        "arguments_test.py",
+    ],
+    deps = [
+        ":lib",
+    ],
+)
+
+py_test(
+    name = "namespace_pkgs_test",
+    size = "small",
+    srcs = [
+        "namespace_pkgs_test.py",
+    ],
+    deps = [
+        ":lib",
+    ],
+)
+
+py_test(
+    name = "platform_test",
+    size = "small",
+    srcs = [
+        "platform_test.py",
+    ],
+    data = ["//examples/wheel:minimal_with_py_package"],
+    deps = [
+        ":lib",
+    ],
+)
+
+py_test(
+    name = "wheel_installer_test",
+    size = "small",
+    srcs = [
+        "wheel_installer_test.py",
+    ],
+    data = ["//examples/wheel:minimal_with_py_package"],
+    deps = [
+        ":lib",
+    ],
+)
+
+py_test(
+    name = "wheel_test",
+    size = "small",
+    srcs = [
+        "wheel_test.py",
+    ],
+    data = ["//examples/wheel:minimal_with_py_package"],
+    deps = [
+        ":lib",
+    ],
+)
diff --git a/python/pip_install/tools/wheel_installer/arguments_test.py b/tests/pypi/whl_installer/arguments_test.py
similarity index 91%
rename from python/pip_install/tools/wheel_installer/arguments_test.py
rename to tests/pypi/whl_installer/arguments_test.py
index 840c2fa..5538054 100644
--- a/python/pip_install/tools/wheel_installer/arguments_test.py
+++ b/tests/pypi/whl_installer/arguments_test.py
@@ -12,18 +12,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import argparse
 import json
 import unittest
 
-from python.pip_install.tools.wheel_installer import arguments, wheel
+from python.private.pypi.whl_installer import arguments, wheel
 
 
 class ArgumentsTestCase(unittest.TestCase):
     def test_arguments(self) -> None:
         parser = arguments.parser()
-        repo_name = "foo"
-        repo_prefix = "pypi_"
         index_url = "--index_url=pypi.org/simple"
         extra_pip_args = [index_url]
         requirement = "foo==1.0.0 --hash=sha256:deadbeef"
@@ -56,9 +53,9 @@
         parser = arguments.parser()
         args = parser.parse_args(
             args=[
-                "--platform=host",
                 "--platform=linux_*",
-                "--platform=all",
+                "--platform=osx_*",
+                "--platform=windows_*",
                 "--requirement=foo",
             ]
         )
diff --git a/python/pip_install/tools/wheel_installer/namespace_pkgs_test.py b/tests/pypi/whl_installer/namespace_pkgs_test.py
similarity index 98%
rename from python/pip_install/tools/wheel_installer/namespace_pkgs_test.py
rename to tests/pypi/whl_installer/namespace_pkgs_test.py
index 4aa0fea..fbbd509 100644
--- a/python/pip_install/tools/wheel_installer/namespace_pkgs_test.py
+++ b/tests/pypi/whl_installer/namespace_pkgs_test.py
@@ -19,7 +19,7 @@
 import unittest
 from typing import Optional, Set
 
-from python.pip_install.tools.wheel_installer import namespace_pkgs
+from python.private.pypi.whl_installer import namespace_pkgs
 
 
 class TempDir:
diff --git a/tests/pypi/whl_installer/platform_test.py b/tests/pypi/whl_installer/platform_test.py
new file mode 100644
index 0000000..7ced1e9
--- /dev/null
+++ b/tests/pypi/whl_installer/platform_test.py
@@ -0,0 +1,152 @@
+import unittest
+from random import shuffle
+
+from python.private.pypi.whl_installer.platform import (
+    OS,
+    Arch,
+    Platform,
+    host_interpreter_minor_version,
+)
+
+
+class MinorVersionTest(unittest.TestCase):
+    def test_host(self):
+        host = host_interpreter_minor_version()
+        self.assertIsNotNone(host)
+
+
+class PlatformTest(unittest.TestCase):
+    def test_can_get_host(self):
+        host = Platform.host()
+        self.assertIsNotNone(host)
+        self.assertEqual(1, len(Platform.from_string("host")))
+        self.assertEqual(host, Platform.from_string("host"))
+
+    def test_can_get_linux_x86_64_without_py_version(self):
+        got = Platform.from_string("linux_x86_64")
+        want = Platform(os=OS.linux, arch=Arch.x86_64)
+        self.assertEqual(want, got[0])
+
+    def test_can_get_specific_from_string(self):
+        got = Platform.from_string("cp33_linux_x86_64")
+        want = Platform(os=OS.linux, arch=Arch.x86_64, minor_version=3)
+        self.assertEqual(want, got[0])
+
+    def test_can_get_all_for_py_version(self):
+        cp39 = Platform.all(minor_version=9)
+        self.assertEqual(18, len(cp39), f"Got {cp39}")
+        self.assertEqual(cp39, Platform.from_string("cp39_*"))
+
+    def test_can_get_all_for_os(self):
+        linuxes = Platform.all(OS.linux, minor_version=9)
+        self.assertEqual(6, len(linuxes))
+        self.assertEqual(linuxes, Platform.from_string("cp39_linux_*"))
+
+    def test_can_get_all_for_os_for_host_python(self):
+        linuxes = Platform.all(OS.linux)
+        self.assertEqual(6, len(linuxes))
+        self.assertEqual(linuxes, Platform.from_string("linux_*"))
+
+    def test_specific_version_specializations(self):
+        any_py33 = Platform(minor_version=3)
+
+        # When
+        all_specializations = list(any_py33.all_specializations())
+
+        want = (
+            [any_py33]
+            + [
+                Platform(arch=arch, minor_version=any_py33.minor_version)
+                for arch in Arch
+            ]
+            + [Platform(os=os, minor_version=any_py33.minor_version) for os in OS]
+            + Platform.all(minor_version=any_py33.minor_version)
+        )
+        self.assertEqual(want, all_specializations)
+
+    def test_aarch64_specializations(self):
+        any_aarch64 = Platform(arch=Arch.aarch64)
+        all_specializations = list(any_aarch64.all_specializations())
+        want = [
+            Platform(os=None, arch=Arch.aarch64),
+            Platform(os=OS.linux, arch=Arch.aarch64),
+            Platform(os=OS.osx, arch=Arch.aarch64),
+            Platform(os=OS.windows, arch=Arch.aarch64),
+        ]
+        self.assertEqual(want, all_specializations)
+
+    def test_linux_specializations(self):
+        any_linux = Platform(os=OS.linux)
+        all_specializations = list(any_linux.all_specializations())
+        want = [
+            Platform(os=OS.linux, arch=None),
+            Platform(os=OS.linux, arch=Arch.x86_64),
+            Platform(os=OS.linux, arch=Arch.x86_32),
+            Platform(os=OS.linux, arch=Arch.aarch64),
+            Platform(os=OS.linux, arch=Arch.ppc),
+            Platform(os=OS.linux, arch=Arch.s390x),
+            Platform(os=OS.linux, arch=Arch.arm),
+        ]
+        self.assertEqual(want, all_specializations)
+
+    def test_osx_specializations(self):
+        any_osx = Platform(os=OS.osx)
+        all_specializations = list(any_osx.all_specializations())
+        # NOTE @aignas 2024-01-14: even though in practice we would only have
+        # Python on osx aarch64 and osx x86_64, we return all arch posibilities
+        # to make the code simpler.
+        want = [
+            Platform(os=OS.osx, arch=None),
+            Platform(os=OS.osx, arch=Arch.x86_64),
+            Platform(os=OS.osx, arch=Arch.x86_32),
+            Platform(os=OS.osx, arch=Arch.aarch64),
+            Platform(os=OS.osx, arch=Arch.ppc),
+            Platform(os=OS.osx, arch=Arch.s390x),
+            Platform(os=OS.osx, arch=Arch.arm),
+        ]
+        self.assertEqual(want, all_specializations)
+
+    def test_platform_sort(self):
+        platforms = [
+            Platform(os=OS.linux, arch=None),
+            Platform(os=OS.linux, arch=Arch.x86_64),
+            Platform(os=OS.osx, arch=None),
+            Platform(os=OS.osx, arch=Arch.x86_64),
+            Platform(os=OS.osx, arch=Arch.aarch64),
+        ]
+        shuffle(platforms)
+        platforms.sort()
+        want = [
+            Platform(os=OS.linux, arch=None),
+            Platform(os=OS.linux, arch=Arch.x86_64),
+            Platform(os=OS.osx, arch=None),
+            Platform(os=OS.osx, arch=Arch.x86_64),
+            Platform(os=OS.osx, arch=Arch.aarch64),
+        ]
+
+        self.assertEqual(want, platforms)
+
+    def test_wheel_os_alias(self):
+        self.assertEqual("osx", str(OS.osx))
+        self.assertEqual(str(OS.darwin), str(OS.osx))
+
+    def test_wheel_arch_alias(self):
+        self.assertEqual("x86_64", str(Arch.x86_64))
+        self.assertEqual(str(Arch.amd64), str(Arch.x86_64))
+
+    def test_wheel_platform_alias(self):
+        give = Platform(
+            os=OS.darwin,
+            arch=Arch.amd64,
+        )
+        alias = Platform(
+            os=OS.osx,
+            arch=Arch.x86_64,
+        )
+
+        self.assertEqual("osx_x86_64", str(give))
+        self.assertEqual(str(alias), str(give))
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/python/pip_install/tools/wheel_installer/wheel_installer_test.py b/tests/pypi/whl_installer/wheel_installer_test.py
similarity index 80%
rename from python/pip_install/tools/wheel_installer/wheel_installer_test.py
rename to tests/pypi/whl_installer/wheel_installer_test.py
index 6eacd1f..7139779 100644
--- a/python/pip_install/tools/wheel_installer/wheel_installer_test.py
+++ b/tests/pypi/whl_installer/wheel_installer_test.py
@@ -19,7 +19,7 @@
 import unittest
 from pathlib import Path
 
-from python.pip_install.tools.wheel_installer import wheel, wheel_installer
+from python.private.pypi.whl_installer import wheel_installer
 
 
 class TestRequirementExtrasParsing(unittest.TestCase):
@@ -101,28 +101,5 @@
         self.assertEqual(want, metadata_file_content)
 
 
-class TestWheelPlatform(unittest.TestCase):
-    def test_wheel_os_alias(self):
-        self.assertEqual("OS.osx", str(wheel.OS.osx))
-        self.assertEqual(str(wheel.OS.darwin), str(wheel.OS.osx))
-
-    def test_wheel_arch_alias(self):
-        self.assertEqual("Arch.x86_64", str(wheel.Arch.x86_64))
-        self.assertEqual(str(wheel.Arch.amd64), str(wheel.Arch.x86_64))
-
-    def test_wheel_platform_alias(self):
-        give = wheel.Platform(
-            os=wheel.OS.darwin,
-            arch=wheel.Arch.amd64,
-        )
-        alias = wheel.Platform(
-            os=wheel.OS.osx,
-            arch=wheel.Arch.x86_64,
-        )
-
-        self.assertEqual("osx_x86_64", str(give))
-        self.assertEqual(str(alias), str(give))
-
-
 if __name__ == "__main__":
     unittest.main()
diff --git a/tests/pypi/whl_installer/wheel_test.py b/tests/pypi/whl_installer/wheel_test.py
new file mode 100644
index 0000000..404218e
--- /dev/null
+++ b/tests/pypi/whl_installer/wheel_test.py
@@ -0,0 +1,371 @@
+import unittest
+from unittest import mock
+
+from python.private.pypi.whl_installer import wheel
+from python.private.pypi.whl_installer.platform import OS, Arch, Platform
+
+_HOST_INTERPRETER_FN = (
+    "python.private.pypi.whl_installer.wheel.host_interpreter_minor_version"
+)
+
+
+class DepsTest(unittest.TestCase):
+    def test_simple(self):
+        deps = wheel.Deps("foo", requires_dist=["bar"])
+
+        got = deps.build()
+
+        self.assertIsInstance(got, wheel.FrozenDeps)
+        self.assertEqual(["bar"], got.deps)
+        self.assertEqual({}, got.deps_select)
+
+    def test_can_add_os_specific_deps(self):
+        deps = wheel.Deps(
+            "foo",
+            requires_dist=[
+                "bar",
+                "an_osx_dep; sys_platform=='darwin'",
+                "posix_dep; os_name=='posix'",
+                "win_dep; os_name=='nt'",
+            ],
+            platforms={
+                Platform(os=OS.linux, arch=Arch.x86_64),
+                Platform(os=OS.osx, arch=Arch.x86_64),
+                Platform(os=OS.osx, arch=Arch.aarch64),
+                Platform(os=OS.windows, arch=Arch.x86_64),
+            },
+        )
+
+        got = deps.build()
+
+        self.assertEqual(["bar"], got.deps)
+        self.assertEqual(
+            {
+                "@platforms//os:linux": ["posix_dep"],
+                "@platforms//os:osx": ["an_osx_dep", "posix_dep"],
+                "@platforms//os:windows": ["win_dep"],
+            },
+            got.deps_select,
+        )
+
+    def test_can_add_os_specific_deps_with_specific_python_version(self):
+        deps = wheel.Deps(
+            "foo",
+            requires_dist=[
+                "bar",
+                "an_osx_dep; sys_platform=='darwin'",
+                "posix_dep; os_name=='posix'",
+                "win_dep; os_name=='nt'",
+            ],
+            platforms={
+                Platform(os=OS.linux, arch=Arch.x86_64, minor_version=8),
+                Platform(os=OS.osx, arch=Arch.x86_64, minor_version=8),
+                Platform(os=OS.osx, arch=Arch.aarch64, minor_version=8),
+                Platform(os=OS.windows, arch=Arch.x86_64, minor_version=8),
+            },
+        )
+
+        got = deps.build()
+
+        self.assertEqual(["bar"], got.deps)
+        self.assertEqual(
+            {
+                "@platforms//os:linux": ["posix_dep"],
+                "@platforms//os:osx": ["an_osx_dep", "posix_dep"],
+                "@platforms//os:windows": ["win_dep"],
+            },
+            got.deps_select,
+        )
+
+    def test_deps_are_added_to_more_specialized_platforms(self):
+        got = wheel.Deps(
+            "foo",
+            requires_dist=[
+                "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
+                "mac_dep; sys_platform=='darwin'",
+            ],
+            platforms={
+                Platform(os=OS.osx, arch=Arch.x86_64),
+                Platform(os=OS.osx, arch=Arch.aarch64),
+            },
+        ).build()
+
+        self.assertEqual(
+            wheel.FrozenDeps(
+                deps=[],
+                deps_select={
+                    "osx_aarch64": ["m1_dep", "mac_dep"],
+                    "@platforms//os:osx": ["mac_dep"],
+                },
+            ),
+            got,
+        )
+
+    def test_deps_from_more_specialized_platforms_are_propagated(self):
+        got = wheel.Deps(
+            "foo",
+            requires_dist=[
+                "a_mac_dep; sys_platform=='darwin'",
+                "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
+            ],
+            platforms={
+                Platform(os=OS.osx, arch=Arch.x86_64),
+                Platform(os=OS.osx, arch=Arch.aarch64),
+            },
+        ).build()
+
+        self.assertEqual([], got.deps)
+        self.assertEqual(
+            {
+                "osx_aarch64": ["a_mac_dep", "m1_dep"],
+                "@platforms//os:osx": ["a_mac_dep"],
+            },
+            got.deps_select,
+        )
+
+    def test_non_platform_markers_are_added_to_common_deps(self):
+        got = wheel.Deps(
+            "foo",
+            requires_dist=[
+                "bar",
+                "baz; implementation_name=='cpython'",
+                "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
+            ],
+            platforms={
+                Platform(os=OS.linux, arch=Arch.x86_64),
+                Platform(os=OS.osx, arch=Arch.x86_64),
+                Platform(os=OS.osx, arch=Arch.aarch64),
+                Platform(os=OS.windows, arch=Arch.x86_64),
+            },
+        ).build()
+
+        self.assertEqual(["bar", "baz"], got.deps)
+        self.assertEqual(
+            {
+                "osx_aarch64": ["m1_dep"],
+            },
+            got.deps_select,
+        )
+
+    def test_self_is_ignored(self):
+        deps = wheel.Deps(
+            "foo",
+            requires_dist=[
+                "bar",
+                "req_dep; extra == 'requests'",
+                "foo[requests]; extra == 'ssl'",
+                "ssl_lib; extra == 'ssl'",
+            ],
+            extras={"ssl"},
+        )
+
+        got = deps.build()
+
+        self.assertEqual(["bar", "req_dep", "ssl_lib"], got.deps)
+        self.assertEqual({}, got.deps_select)
+
+    def test_self_dependencies_can_come_in_any_order(self):
+        deps = wheel.Deps(
+            "foo",
+            requires_dist=[
+                "bar",
+                "baz; extra == 'feat'",
+                "foo[feat2]; extra == 'all'",
+                "foo[feat]; extra == 'feat2'",
+                "zdep; extra == 'all'",
+            ],
+            extras={"all"},
+        )
+
+        got = deps.build()
+
+        self.assertEqual(["bar", "baz", "zdep"], got.deps)
+        self.assertEqual({}, got.deps_select)
+
+    def test_can_get_deps_based_on_specific_python_version(self):
+        requires_dist = [
+            "bar",
+            "baz; python_version < '3.8'",
+            "posix_dep; os_name=='posix' and python_version >= '3.8'",
+        ]
+
+        py38_deps = wheel.Deps(
+            "foo",
+            requires_dist=requires_dist,
+            platforms=[
+                Platform(os=OS.linux, arch=Arch.x86_64, minor_version=8),
+            ],
+        ).build()
+        py37_deps = wheel.Deps(
+            "foo",
+            requires_dist=requires_dist,
+            platforms=[
+                Platform(os=OS.linux, arch=Arch.x86_64, minor_version=7),
+            ],
+        ).build()
+
+        self.assertEqual(["bar", "baz"], py37_deps.deps)
+        self.assertEqual({}, py37_deps.deps_select)
+        self.assertEqual(["bar"], py38_deps.deps)
+        self.assertEqual({"@platforms//os:linux": ["posix_dep"]}, py38_deps.deps_select)
+
+    @mock.patch(_HOST_INTERPRETER_FN)
+    def test_no_version_select_when_single_version(self, mock_host_interpreter_version):
+        requires_dist = [
+            "bar",
+            "baz; python_version >= '3.8'",
+            "posix_dep; os_name=='posix'",
+            "posix_dep_with_version; os_name=='posix' and python_version >= '3.8'",
+            "arch_dep; platform_machine=='x86_64' and python_version >= '3.8'",
+        ]
+        mock_host_interpreter_version.return_value = 7
+
+        self.maxDiff = None
+
+        deps = wheel.Deps(
+            "foo",
+            requires_dist=requires_dist,
+            platforms=[
+                Platform(os=os, arch=Arch.x86_64, minor_version=minor)
+                for minor in [8]
+                for os in [OS.linux, OS.windows]
+            ],
+        )
+        got = deps.build()
+
+        self.assertEqual(["bar", "baz"], got.deps)
+        self.assertEqual(
+            {
+                "@platforms//os:linux": ["posix_dep", "posix_dep_with_version"],
+                "linux_x86_64": ["arch_dep", "posix_dep", "posix_dep_with_version"],
+                "windows_x86_64": ["arch_dep"],
+            },
+            got.deps_select,
+        )
+
+    @mock.patch(_HOST_INTERPRETER_FN)
+    def test_can_get_version_select(self, mock_host_interpreter_version):
+        requires_dist = [
+            "bar",
+            "baz; python_version < '3.8'",
+            "baz_new; python_version >= '3.8'",
+            "posix_dep; os_name=='posix'",
+            "posix_dep_with_version; os_name=='posix' and python_version >= '3.8'",
+            "arch_dep; platform_machine=='x86_64' and python_version < '3.8'",
+        ]
+        mock_host_interpreter_version.return_value = 7
+
+        self.maxDiff = None
+
+        deps = wheel.Deps(
+            "foo",
+            requires_dist=requires_dist,
+            platforms=[
+                Platform(os=os, arch=Arch.x86_64, minor_version=minor)
+                for minor in [7, 8, 9]
+                for os in [OS.linux, OS.windows]
+            ],
+        )
+        got = deps.build()
+
+        self.assertEqual(["bar"], got.deps)
+        self.assertEqual(
+            {
+                "//conditions:default": ["baz"],
+                "@//python/config_settings:is_python_3.7": ["baz"],
+                "@//python/config_settings:is_python_3.8": ["baz_new"],
+                "@//python/config_settings:is_python_3.9": ["baz_new"],
+                "@platforms//os:linux": ["baz", "posix_dep"],
+                "cp37_linux_x86_64": ["arch_dep", "baz", "posix_dep"],
+                "cp37_windows_x86_64": ["arch_dep", "baz"],
+                "cp37_linux_anyarch": ["baz", "posix_dep"],
+                "cp38_linux_anyarch": [
+                    "baz_new",
+                    "posix_dep",
+                    "posix_dep_with_version",
+                ],
+                "cp39_linux_anyarch": [
+                    "baz_new",
+                    "posix_dep",
+                    "posix_dep_with_version",
+                ],
+                "linux_x86_64": ["arch_dep", "baz", "posix_dep"],
+                "windows_x86_64": ["arch_dep", "baz"],
+            },
+            got.deps_select,
+        )
+
+    @mock.patch(_HOST_INTERPRETER_FN)
+    def test_deps_spanning_all_target_py_versions_are_added_to_common(
+        self, mock_host_version
+    ):
+        requires_dist = [
+            "bar",
+            "baz (<2,>=1.11) ; python_version < '3.8'",
+            "baz (<2,>=1.14) ; python_version >= '3.8'",
+        ]
+        mock_host_version.return_value = 8
+
+        deps = wheel.Deps(
+            "foo",
+            requires_dist=requires_dist,
+            platforms=Platform.from_string(["cp37_*", "cp38_*", "cp39_*"]),
+        )
+        got = deps.build()
+
+        self.assertEqual(["bar", "baz"], got.deps)
+        self.assertEqual({}, got.deps_select)
+
+    @mock.patch(_HOST_INTERPRETER_FN)
+    def test_deps_are_not_duplicated(self, mock_host_version):
+        mock_host_version.return_value = 7
+
+        # See an example in
+        # https://files.pythonhosted.org/packages/76/9e/db1c2d56c04b97981c06663384f45f28950a73d9acf840c4006d60d0a1ff/opencv_python-4.9.0.80-cp37-abi3-win32.whl.metadata
+        requires_dist = [
+            "bar >=0.1.0 ; python_version < '3.7'",
+            "bar >=0.2.0 ; python_version >= '3.7'",
+            "bar >=0.4.0 ; python_version >= '3.6' and platform_system == 'Linux' and platform_machine == 'aarch64'",
+            "bar >=0.4.0 ; python_version >= '3.9'",
+            "bar >=0.5.0 ; python_version <= '3.9' and platform_system == 'Darwin' and platform_machine == 'arm64'",
+            "bar >=0.5.0 ; python_version >= '3.10' and platform_system == 'Darwin'",
+            "bar >=0.5.0 ; python_version >= '3.10'",
+            "bar >=0.6.0 ; python_version >= '3.11'",
+        ]
+
+        deps = wheel.Deps(
+            "foo",
+            requires_dist=requires_dist,
+            platforms=Platform.from_string(["cp37_*", "cp310_*"]),
+        )
+        got = deps.build()
+
+        self.assertEqual(["bar"], got.deps)
+        self.assertEqual({}, got.deps_select)
+
+    @mock.patch(_HOST_INTERPRETER_FN)
+    def test_deps_are_not_duplicated_when_encountering_platform_dep_first(
+        self, mock_host_version
+    ):
+        mock_host_version.return_value = 7
+
+        # Note, that we are sorting the incoming `requires_dist` and we need to ensure that we are not getting any
+        # issues even if the platform-specific line comes first.
+        requires_dist = [
+            "bar >=0.4.0 ; python_version >= '3.6' and platform_system == 'Linux' and platform_machine == 'aarch64'",
+            "bar >=0.5.0 ; python_version >= '3.9'",
+        ]
+
+        deps = wheel.Deps(
+            "foo",
+            requires_dist=requires_dist,
+            platforms=Platform.from_string(["cp37_*", "cp310_*"]),
+        )
+        got = deps.build()
+
+        self.assertEqual(["bar"], got.deps)
+        self.assertEqual({}, got.deps_select)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/pypi/whl_repo_name/BUILD.bazel b/tests/pypi/whl_repo_name/BUILD.bazel
new file mode 100644
index 0000000..8671dd7
--- /dev/null
+++ b/tests/pypi/whl_repo_name/BUILD.bazel
@@ -0,0 +1,3 @@
+load(":whl_repo_name_tests.bzl", "whl_repo_name_test_suite")
+
+whl_repo_name_test_suite(name = "whl_repo_name_tests")
diff --git a/tests/pypi/whl_repo_name/whl_repo_name_tests.bzl b/tests/pypi/whl_repo_name/whl_repo_name_tests.bzl
new file mode 100644
index 0000000..8b7df83
--- /dev/null
+++ b/tests/pypi/whl_repo_name/whl_repo_name_tests.bzl
@@ -0,0 +1,52 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private/pypi:whl_repo_name.bzl", "whl_repo_name")  # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def _test_simple(env):
+    got = whl_repo_name("prefix", "foo-1.2.3-py3-none-any.whl", "deadbeef")
+    env.expect.that_str(got).equals("prefix_foo_py3_none_any_deadbeef")
+
+_tests.append(_test_simple)
+
+def _test_sdist(env):
+    got = whl_repo_name("prefix", "foo-1.2.3.tar.gz", "deadbeef000deadbeef")
+    env.expect.that_str(got).equals("prefix_foo_sdist_deadbeef")
+
+_tests.append(_test_sdist)
+
+def _test_platform_whl(env):
+    got = whl_repo_name(
+        "prefix",
+        "foo-1.2.3-cp39.cp310-abi3-manylinux1_x86_64.manylinux_2_17_x86_64.whl",
+        "deadbeef000deadbeef",
+    )
+
+    # We only need the first segment of each
+    env.expect.that_str(got).equals("prefix_foo_cp39_abi3_manylinux_2_5_x86_64_deadbeef")
+
+_tests.append(_test_platform_whl)
+
+def whl_repo_name_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/private/whl_target_platforms/BUILD.bazel b/tests/pypi/whl_target_platforms/BUILD.bazel
similarity index 87%
rename from tests/private/whl_target_platforms/BUILD.bazel
rename to tests/pypi/whl_target_platforms/BUILD.bazel
index fec25af..6c35b08 100644
--- a/tests/private/whl_target_platforms/BUILD.bazel
+++ b/tests/pypi/whl_target_platforms/BUILD.bazel
@@ -12,6 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+load(":select_whl_tests.bzl", "select_whl_test_suite")
 load(":whl_target_platforms_tests.bzl", "whl_target_platforms_test_suite")
 
+select_whl_test_suite(name = "select_whl_tests")
+
 whl_target_platforms_test_suite(name = "whl_target_platforms_tests")
diff --git a/tests/pypi/whl_target_platforms/select_whl_tests.bzl b/tests/pypi/whl_target_platforms/select_whl_tests.bzl
new file mode 100644
index 0000000..2994bd5
--- /dev/null
+++ b/tests/pypi/whl_target_platforms/select_whl_tests.bzl
@@ -0,0 +1,278 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "REPO_VERBOSITY_ENV_VAR", "repo_utils")  # buildifier: disable=bzl-visibility
+load("//python/private/pypi:whl_target_platforms.bzl", "select_whls")  # buildifier: disable=bzl-visibility
+
+WHL_LIST = [
+    "pkg-0.0.1-cp311-cp311-macosx_10_9_universal2.whl",
+    "pkg-0.0.1-cp311-cp311-macosx_10_9_x86_64.whl",
+    "pkg-0.0.1-cp311-cp311-macosx_11_0_arm64.whl",
+    "pkg-0.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+    "pkg-0.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",
+    "pkg-0.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl",
+    "pkg-0.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+    "pkg-0.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
+    "pkg-0.0.1-cp311-cp311-musllinux_1_1_aarch64.whl",
+    "pkg-0.0.1-cp311-cp311-musllinux_1_1_i686.whl",
+    "pkg-0.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl",
+    "pkg-0.0.1-cp311-cp311-musllinux_1_1_s390x.whl",
+    "pkg-0.0.1-cp311-cp311-musllinux_1_1_x86_64.whl",
+    "pkg-0.0.1-cp311-cp311-win32.whl",
+    "pkg-0.0.1-cp311-cp311-win_amd64.whl",
+    "pkg-0.0.1-cp37-cp37m-macosx_10_9_x86_64.whl",
+    "pkg-0.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+    "pkg-0.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",
+    "pkg-0.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl",
+    "pkg-0.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+    "pkg-0.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
+    "pkg-0.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl",
+    "pkg-0.0.1-cp37-cp37m-musllinux_1_1_i686.whl",
+    "pkg-0.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl",
+    "pkg-0.0.1-cp37-cp37m-musllinux_1_1_s390x.whl",
+    "pkg-0.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl",
+    "pkg-0.0.1-cp37-cp37m-win32.whl",
+    "pkg-0.0.1-cp37-cp37m-win_amd64.whl",
+    "pkg-0.0.1-cp39-cp39-macosx_10_9_universal2.whl",
+    "pkg-0.0.1-cp39-cp39-macosx_10_9_x86_64.whl",
+    "pkg-0.0.1-cp39-cp39-macosx_11_0_arm64.whl",
+    "pkg-0.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
+    "pkg-0.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl",
+    "pkg-0.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl",
+    "pkg-0.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+    "pkg-0.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
+    "pkg-0.0.1-cp39-cp39-musllinux_1_1_aarch64.whl",
+    "pkg-0.0.1-cp39-cp39-musllinux_1_1_i686.whl",
+    "pkg-0.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl",
+    "pkg-0.0.1-cp39-cp39-musllinux_1_1_s390x.whl",
+    "pkg-0.0.1-cp39-cp39-musllinux_1_1_x86_64.whl",
+    "pkg-0.0.1-cp39-cp39-win32.whl",
+    "pkg-0.0.1-cp39-cp39-win_amd64.whl",
+    "pkg-0.0.1-cp39-abi3-any.whl",
+    "pkg-0.0.1-py310-abi3-any.whl",
+    "pkg-0.0.1-py3-abi3-any.whl",
+    "pkg-0.0.1-py3-none-any.whl",
+]
+
+def _match(env, got, *want_filenames):
+    if not want_filenames:
+        env.expect.that_collection(got).has_size(len(want_filenames))
+        return
+
+    got_filenames = [g.filename for g in got]
+    env.expect.that_collection(got_filenames).contains_exactly(want_filenames)
+
+    if got:
+        # Check that we pass the original structs
+        env.expect.that_str(got[0].other).equals("dummy")
+
+def _select_whls(whls, debug = False, **kwargs):
+    return select_whls(
+        whls = [
+            struct(
+                filename = f,
+                other = "dummy",
+            )
+            for f in whls
+        ],
+        logger = repo_utils.logger(struct(
+            os = struct(
+                environ = {
+                    REPO_DEBUG_ENV_VAR: "1",
+                    REPO_VERBOSITY_ENV_VAR: "TRACE" if debug else "INFO",
+                },
+            ),
+        ), "unit-test"),
+        **kwargs
+    )
+
+_tests = []
+
+def _test_simplest(env):
+    got = _select_whls(
+        whls = [
+            "pkg-0.0.1-py2.py3-abi3-any.whl",
+            "pkg-0.0.1-py3-abi3-any.whl",
+            "pkg-0.0.1-py3-none-any.whl",
+        ],
+        want_platforms = ["cp30_ignored"],
+    )
+    _match(
+        env,
+        got,
+        "pkg-0.0.1-py3-abi3-any.whl",
+        "pkg-0.0.1-py3-none-any.whl",
+    )
+
+_tests.append(_test_simplest)
+
+def _test_select_by_supported_py_version(env):
+    for minor_version, match in {
+        8: "pkg-0.0.1-py3-abi3-any.whl",
+        11: "pkg-0.0.1-py311-abi3-any.whl",
+    }.items():
+        got = _select_whls(
+            whls = [
+                "pkg-0.0.1-py2.py3-abi3-any.whl",
+                "pkg-0.0.1-py3-abi3-any.whl",
+                "pkg-0.0.1-py311-abi3-any.whl",
+            ],
+            want_platforms = ["cp3{}_ignored".format(minor_version)],
+        )
+        _match(env, got, match)
+
+_tests.append(_test_select_by_supported_py_version)
+
+def _test_select_by_supported_cp_version(env):
+    for minor_version, match in {
+        11: "pkg-0.0.1-cp311-abi3-any.whl",
+        8: "pkg-0.0.1-py3-abi3-any.whl",
+    }.items():
+        got = _select_whls(
+            whls = [
+                "pkg-0.0.1-py2.py3-abi3-any.whl",
+                "pkg-0.0.1-py3-abi3-any.whl",
+                "pkg-0.0.1-py311-abi3-any.whl",
+                "pkg-0.0.1-cp311-abi3-any.whl",
+            ],
+            want_platforms = ["cp3{}_ignored".format(minor_version)],
+        )
+        _match(env, got, match)
+
+_tests.append(_test_select_by_supported_cp_version)
+
+def _test_supported_cp_version_manylinux(env):
+    for minor_version, match in {
+        8: "pkg-0.0.1-py3-none-manylinux_x86_64.whl",
+        11: "pkg-0.0.1-cp311-none-manylinux_x86_64.whl",
+    }.items():
+        got = _select_whls(
+            whls = [
+                "pkg-0.0.1-py2.py3-none-manylinux_x86_64.whl",
+                "pkg-0.0.1-py3-none-manylinux_x86_64.whl",
+                "pkg-0.0.1-py311-none-manylinux_x86_64.whl",
+                "pkg-0.0.1-cp311-none-manylinux_x86_64.whl",
+            ],
+            want_platforms = ["cp3{}_linux_x86_64".format(minor_version)],
+        )
+        _match(env, got, match)
+
+_tests.append(_test_supported_cp_version_manylinux)
+
+def _test_ignore_unsupported(env):
+    got = _select_whls(
+        whls = [
+            "pkg-0.0.1-xx3-abi3-any.whl",
+        ],
+        want_platforms = ["cp30_ignored"],
+    )
+    _match(env, got)
+
+_tests.append(_test_ignore_unsupported)
+
+def _test_match_abi_and_not_py_version(env):
+    # Check we match the ABI and not the py version
+    got = _select_whls(whls = WHL_LIST, want_platforms = ["cp37_linux_x86_64"])
+    _match(
+        env,
+        got,
+        "pkg-0.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+        "pkg-0.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl",
+        "pkg-0.0.1-py3-abi3-any.whl",
+        "pkg-0.0.1-py3-none-any.whl",
+    )
+
+_tests.append(_test_match_abi_and_not_py_version)
+
+def _test_select_filename_with_many_tags(env):
+    # Check we can select a filename with many platform tags
+    got = _select_whls(whls = WHL_LIST, want_platforms = ["cp39_linux_x86_32"])
+    _match(
+        env,
+        got,
+        "pkg-0.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl",
+        "pkg-0.0.1-cp39-cp39-musllinux_1_1_i686.whl",
+        "pkg-0.0.1-cp39-abi3-any.whl",
+        "pkg-0.0.1-py3-none-any.whl",
+    )
+
+_tests.append(_test_select_filename_with_many_tags)
+
+def _test_osx_prefer_arch_specific(env):
+    # Check that we prefer the specific wheel
+    got = _select_whls(
+        whls = WHL_LIST,
+        want_platforms = ["cp311_osx_x86_64", "cp311_osx_x86_32"],
+    )
+    _match(
+        env,
+        got,
+        "pkg-0.0.1-cp311-cp311-macosx_10_9_universal2.whl",
+        "pkg-0.0.1-cp311-cp311-macosx_10_9_x86_64.whl",
+        "pkg-0.0.1-cp39-abi3-any.whl",
+        "pkg-0.0.1-py3-none-any.whl",
+    )
+
+    got = _select_whls(whls = WHL_LIST, want_platforms = ["cp311_osx_aarch64"])
+    _match(
+        env,
+        got,
+        "pkg-0.0.1-cp311-cp311-macosx_10_9_universal2.whl",
+        "pkg-0.0.1-cp311-cp311-macosx_11_0_arm64.whl",
+        "pkg-0.0.1-cp39-abi3-any.whl",
+        "pkg-0.0.1-py3-none-any.whl",
+    )
+
+_tests.append(_test_osx_prefer_arch_specific)
+
+def _test_osx_fallback_to_universal2(env):
+    # Check that we can use the universal2 if the arm wheel is not available
+    got = _select_whls(
+        whls = [w for w in WHL_LIST if "arm64" not in w],
+        want_platforms = ["cp311_osx_aarch64"],
+    )
+    _match(
+        env,
+        got,
+        "pkg-0.0.1-cp311-cp311-macosx_10_9_universal2.whl",
+        "pkg-0.0.1-cp39-abi3-any.whl",
+        "pkg-0.0.1-py3-none-any.whl",
+    )
+
+_tests.append(_test_osx_fallback_to_universal2)
+
+def _test_prefer_manylinux_wheels(env):
+    # Check we prefer platform specific wheels
+    got = _select_whls(whls = WHL_LIST, want_platforms = ["cp39_linux_x86_64"])
+    _match(
+        env,
+        got,
+        "pkg-0.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
+        "pkg-0.0.1-cp39-cp39-musllinux_1_1_x86_64.whl",
+        "pkg-0.0.1-cp39-abi3-any.whl",
+        "pkg-0.0.1-py3-none-any.whl",
+    )
+
+_tests.append(_test_prefer_manylinux_wheels)
+
+def select_whl_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/pypi/whl_target_platforms/whl_target_platforms_tests.bzl b/tests/pypi/whl_target_platforms/whl_target_platforms_tests.bzl
new file mode 100644
index 0000000..a72bdc2
--- /dev/null
+++ b/tests/pypi/whl_target_platforms/whl_target_platforms_tests.bzl
@@ -0,0 +1,135 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private/pypi:whl_target_platforms.bzl", "whl_target_platforms")  # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def _test_simple(env):
+    tests = {
+        "macosx_10_9_arm64": [
+            struct(os = "osx", cpu = "aarch64", abi = None, target_platform = "osx_aarch64", version = (10, 9)),
+        ],
+        "macosx_10_9_universal2": [
+            struct(os = "osx", cpu = "x86_64", abi = None, target_platform = "osx_x86_64", version = (10, 9)),
+            struct(os = "osx", cpu = "aarch64", abi = None, target_platform = "osx_aarch64", version = (10, 9)),
+        ],
+        "manylinux_2_17_i686": [
+            struct(os = "linux", cpu = "x86_32", abi = None, target_platform = "linux_x86_32", version = (2, 17)),
+        ],
+        "musllinux_1_1_ppc64le": [
+            struct(os = "linux", cpu = "ppc", abi = None, target_platform = "linux_ppc", version = (1, 1)),
+        ],
+        "win_amd64": [
+            struct(os = "windows", cpu = "x86_64", abi = None, target_platform = "windows_x86_64", version = (0, 0)),
+        ],
+    }
+
+    for give, want in tests.items():
+        for abi in ["", "abi3", "none"]:
+            got = whl_target_platforms(give, abi)
+            env.expect.that_collection(got).contains_exactly(want)
+
+_tests.append(_test_simple)
+
+def _test_with_abi(env):
+    tests = {
+        "macosx_10_9_arm64": [
+            struct(os = "osx", cpu = "aarch64", abi = "cp39", target_platform = "cp39_osx_aarch64", version = (10, 9)),
+        ],
+        "macosx_10_9_universal2": [
+            struct(os = "osx", cpu = "x86_64", abi = "cp310", target_platform = "cp310_osx_x86_64", version = (10, 9)),
+            struct(os = "osx", cpu = "aarch64", abi = "cp310", target_platform = "cp310_osx_aarch64", version = (10, 9)),
+        ],
+        # This should use version 0 because there are two platform_tags. This is
+        # just to ensure that the code is robust
+        "manylinux1_i686.manylinux_2_17_i686": [
+            struct(os = "linux", cpu = "x86_32", abi = "cp38", target_platform = "cp38_linux_x86_32", version = (0, 0)),
+        ],
+        "musllinux_1_1_ppc64le": [
+            struct(os = "linux", cpu = "ppc", abi = "cp311", target_platform = "cp311_linux_ppc", version = (1, 1)),
+        ],
+        "win_amd64": [
+            struct(os = "windows", cpu = "x86_64", abi = "cp311", target_platform = "cp311_windows_x86_64", version = (0, 0)),
+        ],
+    }
+
+    for give, want in tests.items():
+        got = whl_target_platforms(give, want[0].abi)
+        env.expect.that_collection(got).contains_exactly(want)
+
+_tests.append(_test_with_abi)
+
+def _can_parse_existing_tags(env):
+    examples = {
+        "linux_armv6l": 1,
+        "linux_armv7l": 1,
+        "macosx_11_12_arm64": 1,
+        "macosx_11_12_i386": 1,
+        "macosx_11_12_intel": 1,
+        "macosx_11_12_universal": 2,
+        "macosx_11_12_universal2": 2,
+        "macosx_11_12_x86_64": 1,
+        "manylinux1_i686": 1,
+        "manylinux1_x86_64": 1,
+        "manylinux2010_i686": 1,
+        "manylinux2010_x86_64": 1,
+        "manylinux2014_aarch64": 1,
+        "manylinux2014_armv7l": 1,
+        "manylinux2014_i686": 1,
+        "manylinux2014_ppc64": 1,
+        "manylinux2014_ppc64le": 1,
+        "manylinux2014_s390x": 1,
+        "manylinux2014_x86_64": 1,
+        "manylinux_11_12_aarch64": 1,
+        "manylinux_11_12_armv7l": 1,
+        "manylinux_11_12_i686": 1,
+        "manylinux_11_12_ppc64": 1,
+        "manylinux_11_12_ppc64le": 1,
+        "manylinux_11_12_s390x": 1,
+        "manylinux_11_12_x86_64": 1,
+        "manylinux_1_2_aarch64": 1,
+        "manylinux_1_2_x86_64": 1,
+        "musllinux_11_12_aarch64": 1,
+        "musllinux_11_12_armv7l": 1,
+        "musllinux_11_12_i686": 1,
+        "musllinux_11_12_ppc64le": 1,
+        "musllinux_11_12_s390x": 1,
+        "musllinux_11_12_x86_64": 1,
+        "win32": 1,
+        "win_amd64": 1,
+        "win_arm64": 1,
+        "win_ia64": 0,
+    }
+
+    for major_version in [2, 10, 13]:
+        for minor_version in [0, 1, 2, 10, 45]:
+            for give, want_size in examples.items():
+                give = give.replace("_11_", "_{}_".format(major_version))
+                give = give.replace("_12_", "_{}_".format(minor_version))
+                got = whl_target_platforms(give)
+                env.expect.that_str("{}: {}".format(give, len(got))).equals("{}: {}".format(give, want_size))
+
+_tests.append(_can_parse_existing_tags)
+
+def whl_target_platforms_test_suite(name):
+    """create the test suite.
+
+    args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, basic_tests = _tests)
diff --git a/tests/runfiles/BUILD.bazel b/tests/runfiles/BUILD.bazel
index 6193ee9..5c92026 100644
--- a/tests/runfiles/BUILD.bazel
+++ b/tests/runfiles/BUILD.bazel
@@ -1,3 +1,4 @@
+load("@bazel_skylib//rules:build_test.bzl", "build_test")
 load("@rules_python//python:py_test.bzl", "py_test")
 load("@rules_python//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")  # buildifier: disable=bzl-visibility
 
@@ -9,3 +10,10 @@
     },
     deps = ["//python/runfiles"],
 )
+
+build_test(
+    name = "publishing",
+    targets = [
+        "//python/runfiles:wheel.publish",
+    ],
+)
diff --git a/tests/runtime_env_toolchain/BUILD.bazel b/tests/runtime_env_toolchain/BUILD.bazel
new file mode 100644
index 0000000..99bdbab
--- /dev/null
+++ b/tests/runtime_env_toolchain/BUILD.bazel
@@ -0,0 +1,33 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//tests/support:sh_py_run_test.bzl", "py_reconfig_test")
+load(":runtime_env_toolchain_tests.bzl", "runtime_env_toolchain_test_suite")
+
+runtime_env_toolchain_test_suite(name = "runtime_env_toolchain_tests")
+
+py_reconfig_test(
+    name = "toolchain_runs_test",
+    srcs = ["toolchain_runs_test.py"],
+    data = [
+        "//tests/support:current_build_settings",
+    ],
+    extra_toolchains = [
+        "//python/runtime_env_toolchains:all",
+        # Necessary for RBE CI
+        "//tests/cc:all",
+    ],
+    main = "toolchain_runs_test.py",
+    deps = ["//python/runfiles"],
+)
diff --git a/tests/runtime_env_toolchain/runtime_env_toolchain_tests.bzl b/tests/runtime_env_toolchain/runtime_env_toolchain_tests.bzl
new file mode 100644
index 0000000..9885a1e
--- /dev/null
+++ b/tests/runtime_env_toolchain/runtime_env_toolchain_tests.bzl
@@ -0,0 +1,101 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Starlark tests for py_runtime rule."""
+
+load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("@rules_testing//lib:util.bzl", rt_util = "util")
+load(
+    "//python/private:toolchain_types.bzl",
+    "EXEC_TOOLS_TOOLCHAIN_TYPE",
+    "PY_CC_TOOLCHAIN_TYPE",
+    "TARGET_TOOLCHAIN_TYPE",
+)  # buildifier: disable=bzl-visibility
+load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER")  # buildifier: disable=bzl-visibility
+load("//tests/support:support.bzl", "CC_TOOLCHAIN", "EXEC_TOOLS_TOOLCHAIN", "VISIBLE_FOR_TESTING")
+
+_LookupInfo = provider()  # buildifier: disable=provider-params
+
+def _use_toolchains_impl(ctx):
+    return [
+        _LookupInfo(
+            target = ctx.toolchains[TARGET_TOOLCHAIN_TYPE],
+            exec = ctx.toolchains[EXEC_TOOLS_TOOLCHAIN_TYPE],
+            cc = ctx.toolchains[PY_CC_TOOLCHAIN_TYPE],
+        ),
+    ]
+
+_use_toolchains = rule(
+    implementation = _use_toolchains_impl,
+    toolchains = [
+        TARGET_TOOLCHAIN_TYPE,
+        EXEC_TOOLS_TOOLCHAIN_TYPE,
+        PY_CC_TOOLCHAIN_TYPE,
+    ],
+)
+
+_tests = []
+
+def _test_runtime_env_toolchain_matches(name):
+    rt_util.helper_target(
+        _use_toolchains,
+        name = name + "_subject",
+    )
+    extra_toolchains = [
+        str(Label("//python/runtime_env_toolchains:all")),
+    ]
+
+    # We have to add a cc toolchain because py_cc toolchain depends on it.
+    # However, that package also defines a different fake py_cc toolchain we
+    # don't want to use, so we need to ensure the runtime_env toolchain has
+    # higher precendence.
+    # However, Bazel 6 and Bazel 7 process --extra_toolchains in different
+    # orders:
+    #  * Bazel 6 goes left to right
+    #  * Bazel 7 goes right to left
+    # We could just put our preferred toolchain before *and* after
+    # the undesired toolchain...
+    # However, Bazel 7 has a bug where *duplicate* entries are ignored,
+    # and only the *first* entry is respected.
+    if IS_BAZEL_7_OR_HIGHER:
+        extra_toolchains.insert(0, CC_TOOLCHAIN)
+    else:
+        extra_toolchains.append(CC_TOOLCHAIN)
+    analysis_test(
+        name = name,
+        impl = _test_runtime_env_toolchain_matches_impl,
+        target = name + "_subject",
+        config_settings = {
+            "//command_line_option:extra_toolchains": extra_toolchains,
+            EXEC_TOOLS_TOOLCHAIN: "enabled",
+            VISIBLE_FOR_TESTING: True,
+        },
+    )
+
+def _test_runtime_env_toolchain_matches_impl(env, target):
+    env.expect.that_str(
+        str(target[_LookupInfo].target.toolchain_label),
+    ).contains("runtime_env_py_runtime_pair")
+    env.expect.that_str(
+        str(target[_LookupInfo].exec.toolchain_label),
+    ).contains("runtime_env_py_exec_tools")
+    env.expect.that_str(
+        str(target[_LookupInfo].cc.toolchain_label),
+    ).contains("runtime_env_py_cc")
+
+_tests.append(_test_runtime_env_toolchain_matches)
+
+def runtime_env_toolchain_test_suite(name):
+    test_suite(name = name, tests = _tests)
diff --git a/tests/runtime_env_toolchain/toolchain_runs_test.py b/tests/runtime_env_toolchain/toolchain_runs_test.py
new file mode 100644
index 0000000..7be2472
--- /dev/null
+++ b/tests/runtime_env_toolchain/toolchain_runs_test.py
@@ -0,0 +1,28 @@
+import json
+import pathlib
+import platform
+import unittest
+
+from python.runfiles import runfiles
+
+
+class RunTest(unittest.TestCase):
+    def test_ran(self):
+        rf = runfiles.Create()
+        settings_path = rf.Rlocation(
+            "rules_python/tests/support/current_build_settings.json"
+        )
+        settings = json.loads(pathlib.Path(settings_path).read_text())
+        if platform.system() == "Windows":
+            self.assertEqual(
+                "/_magic_pyruntime_sentinel_do_not_use", settings["interpreter_path"]
+            )
+        else:
+            self.assertIn(
+                "runtime_env_toolchain_interpreter.sh",
+                settings["interpreter"]["short_path"],
+            )
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/support/BUILD.bazel b/tests/support/BUILD.bazel
index 316e9ab..58c74d6 100644
--- a/tests/support/BUILD.bazel
+++ b/tests/support/BUILD.bazel
@@ -17,6 +17,15 @@
 # Otherwise, you'll probably have to manually call Label() on these targets
 # to force them to resolve in the proper context.
 # ====================
+
+load("//python:py_runtime.bzl", "py_runtime")
+load("//python:py_runtime_pair.bzl", "py_runtime_pair")
+load(":sh_py_run_test.bzl", "current_build_settings")
+
+package(
+    default_visibility = ["//:__subpackages__"],
+)
+
 platform(
     name = "mac",
     constraint_values = [
@@ -37,3 +46,70 @@
         "@platforms//os:windows",
     ],
 )
+
+# Used when testing downloading of toolchains for a different platform
+
+platform(
+    name = "linux_x86_64",
+    constraint_values = [
+        "@platforms//cpu:x86_64",
+        "@platforms//os:linux",
+    ],
+)
+
+platform(
+    name = "linux_aarch64",
+    constraint_values = [
+        "@platforms//cpu:aarch64",
+        "@platforms//os:linux",
+    ],
+)
+
+platform(
+    name = "mac_x86_64",
+    constraint_values = [
+        "@platforms//cpu:x86_64",
+        "@platforms//os:macos",
+    ],
+)
+
+platform(
+    name = "windows_x86_64",
+    constraint_values = [
+        "@platforms//cpu:x86_64",
+        "@platforms//os:windows",
+    ],
+)
+
+platform(
+    name = "win_aarch64",
+    constraint_values = [
+        "@platforms//os:windows",
+        "@platforms//cpu:aarch64",
+    ],
+)
+
+py_runtime(
+    name = "platform_runtime",
+    implementation_name = "fakepy",
+    interpreter_path = "/fake/python3.9",
+    interpreter_version_info = {
+        "major": "4",
+        "minor": "5",
+    },
+)
+
+py_runtime_pair(
+    name = "platform_runtime_pair",
+    py3_runtime = ":platform_runtime",
+)
+
+toolchain(
+    name = "platform_toolchain",
+    toolchain = ":platform_runtime_pair",
+    toolchain_type = "//python:toolchain_type",
+)
+
+current_build_settings(
+    name = "current_build_settings",
+)
diff --git a/tests/support/sh_py_run_test.bzl b/tests/support/sh_py_run_test.bzl
new file mode 100644
index 0000000..455f64e
--- /dev/null
+++ b/tests/support/sh_py_run_test.bzl
@@ -0,0 +1,207 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Run a py_binary with altered config settings in an sh_test.
+
+This facilitates verify running binaries with different configuration settings
+without the overhead of a bazel-in-bazel integration test.
+"""
+
+load("//python:py_binary.bzl", "py_binary")
+load("//python:py_test.bzl", "py_test")
+load("//python/private:toolchain_types.bzl", "TARGET_TOOLCHAIN_TYPE")  # buildifier: disable=bzl-visibility
+load("//tests/support:support.bzl", "VISIBLE_FOR_TESTING")
+
+def _perform_transition_impl(input_settings, attr):
+    settings = dict(input_settings)
+    settings[VISIBLE_FOR_TESTING] = True
+    settings["//command_line_option:build_python_zip"] = attr.build_python_zip
+    if attr.bootstrap_impl:
+        settings["//python/config_settings:bootstrap_impl"] = attr.bootstrap_impl
+    if attr.extra_toolchains:
+        settings["//command_line_option:extra_toolchains"] = attr.extra_toolchains
+    if attr.python_version:
+        settings["//python/config_settings:python_version"] = attr.python_version
+    return settings
+
+_perform_transition = transition(
+    implementation = _perform_transition_impl,
+    inputs = [
+        "//python/config_settings:bootstrap_impl",
+        "//command_line_option:extra_toolchains",
+        "//python/config_settings:python_version",
+    ],
+    outputs = [
+        "//command_line_option:build_python_zip",
+        "//command_line_option:extra_toolchains",
+        "//python/config_settings:bootstrap_impl",
+        "//python/config_settings:python_version",
+        VISIBLE_FOR_TESTING,
+    ],
+)
+
+def _py_reconfig_impl(ctx):
+    default_info = ctx.attr.target[DefaultInfo]
+    exe_ext = default_info.files_to_run.executable.extension
+    if exe_ext:
+        exe_ext = "." + exe_ext
+    exe_name = ctx.label.name + exe_ext
+
+    executable = ctx.actions.declare_file(exe_name)
+    ctx.actions.symlink(output = executable, target_file = default_info.files_to_run.executable)
+
+    default_outputs = [executable]
+
+    # todo: could probably check target.owner vs src.owner to check if it should
+    # be symlinked or included as-is
+    # For simplicity of implementation, we're assuming the target being run is
+    # py_binary-like. In order for Windows to work, we need to make sure the
+    # file that the .exe launcher runs (the .zip or underlying non-exe
+    # executable) is a sibling of the .exe file with the same base name.
+    for src in default_info.files.to_list():
+        if src.extension in ("", "zip"):
+            ext = ("." if src.extension else "") + src.extension
+            output = ctx.actions.declare_file(ctx.label.name + ext)
+            ctx.actions.symlink(output = output, target_file = src)
+            default_outputs.append(output)
+
+    return [
+        DefaultInfo(
+            executable = executable,
+            files = depset(default_outputs),
+            # On windows, the other default outputs must also be included
+            # in runfiles so the exe launcher can find the backing file.
+            runfiles = ctx.runfiles(default_outputs).merge(
+                default_info.default_runfiles,
+            ),
+        ),
+        testing.TestEnvironment(
+            environment = ctx.attr.env,
+        ),
+    ]
+
+def _make_reconfig_rule(**kwargs):
+    attrs = {
+        "bootstrap_impl": attr.string(),
+        "build_python_zip": attr.string(default = "auto"),
+        "env": attr.string_dict(),
+        "extra_toolchains": attr.string_list(
+            doc = """
+Value for the --extra_toolchains flag.
+
+NOTE: You'll likely have to also specify //tests/cc:all (or some CC toolchain)
+to make the RBE presubmits happy, which disable auto-detection of a CC
+toolchain.
+""",
+        ),
+        "python_version": attr.string(),
+        "target": attr.label(executable = True, cfg = "target"),
+        "_allowlist_function_transition": attr.label(
+            default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+        ),
+    }
+    return rule(
+        implementation = _py_reconfig_impl,
+        attrs = attrs,
+        cfg = _perform_transition,
+        **kwargs
+    )
+
+_py_reconfig_binary = _make_reconfig_rule(executable = True)
+
+_py_reconfig_test = _make_reconfig_rule(test = True)
+
+def py_reconfig_test(*, name, **kwargs):
+    """Create a py_test with customized build settings for testing.
+
+    Args:
+        name: str, name of teset target.
+        **kwargs: kwargs to pass along to _py_reconfig_test and py_test.
+    """
+    reconfig_kwargs = {}
+    reconfig_kwargs["bootstrap_impl"] = kwargs.pop("bootstrap_impl", None)
+    reconfig_kwargs["extra_toolchains"] = kwargs.pop("extra_toolchains", None)
+    reconfig_kwargs["python_version"] = kwargs.pop("python_version", None)
+    reconfig_kwargs["env"] = kwargs.get("env")
+    reconfig_kwargs["target_compatible_with"] = kwargs.get("target_compatible_with")
+
+    inner_name = "_{}_inner" + name
+    _py_reconfig_test(
+        name = name,
+        target = inner_name,
+        **reconfig_kwargs
+    )
+    py_test(
+        name = inner_name,
+        tags = ["manual"],
+        **kwargs
+    )
+
+def sh_py_run_test(*, name, sh_src, py_src, **kwargs):
+    bin_name = "_{}_bin".format(name)
+    native.sh_test(
+        name = name,
+        srcs = [sh_src],
+        data = [bin_name],
+        deps = [
+            "@bazel_tools//tools/bash/runfiles",
+        ],
+        env = {
+            "BIN_RLOCATION": "$(rlocationpath {})".format(bin_name),
+        },
+    )
+
+    _py_reconfig_binary(
+        name = bin_name,
+        tags = ["manual"],
+        target = "_{}_plain_bin".format(name),
+        **kwargs
+    )
+
+    py_binary(
+        name = "_{}_plain_bin".format(name),
+        srcs = [py_src],
+        main = py_src,
+        tags = ["manual"],
+    )
+
+def _current_build_settings_impl(ctx):
+    info = ctx.actions.declare_file(ctx.label.name + ".json")
+    toolchain = ctx.toolchains[TARGET_TOOLCHAIN_TYPE]
+    runtime = toolchain.py3_runtime
+    files = [info]
+    ctx.actions.write(
+        output = info,
+        content = json.encode({
+            "interpreter": {
+                "short_path": runtime.interpreter.short_path if runtime.interpreter else None,
+            },
+            "interpreter_path": runtime.interpreter_path,
+            "toolchain_label": str(getattr(toolchain, "toolchain_label", None)),
+        }),
+    )
+    return [DefaultInfo(
+        files = depset(files),
+    )]
+
+current_build_settings = rule(
+    doc = """
+Writes information about the current build config to JSON for testing.
+
+This is so tests can verify information about the build config used for them.
+""",
+    implementation = _current_build_settings_impl,
+    toolchains = [
+        TARGET_TOOLCHAIN_TYPE,
+    ],
+)
diff --git a/tests/support/support.bzl b/tests/support/support.bzl
new file mode 100644
index 0000000..a74346d
--- /dev/null
+++ b/tests/support/support.bzl
@@ -0,0 +1,40 @@
+# Copyright 2023 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Code that support testing of rules_python code."""
+
+# NOTE: Explicit Label() calls are required so that it resolves in @rules_python
+# context instead of e.g. the @rules_testing context.
+# NOTE: Some labels require str() around Label() because they are passed onto
+# rules_testing or as config_setting values, which don't support Label in some
+# places.
+
+MAC = Label("//tests/support:mac")
+MAC_X86_64 = Label("//tests/support:mac_x86_64")
+LINUX = Label("//tests/support:linux")
+LINUX_X86_64 = Label("//tests/support:linux_x86_64")
+WINDOWS = Label("//tests/support:windows")
+WINDOWS_X86_64 = Label("//tests/support:windows_x86_64")
+
+PLATFORM_TOOLCHAIN = str(Label("//tests/support:platform_toolchain"))
+CC_TOOLCHAIN = str(Label("//tests/cc:all"))
+
+# str() around Label() is necessary because rules_testing's config_settings
+# doesn't accept yet Label objects.
+EXEC_TOOLS_TOOLCHAIN = str(Label("//python/config_settings:exec_tools_toolchain"))
+PRECOMPILE = str(Label("//python/config_settings:precompile"))
+PRECOMPILE_ADD_TO_RUNFILES = str(Label("//python/config_settings:precompile_add_to_runfiles"))
+PRECOMPILE_SOURCE_RETENTION = str(Label("//python/config_settings:precompile_source_retention"))
+PYC_COLLECTION = str(Label("//python/config_settings:pyc_collection"))
+PYTHON_VERSION = str(Label("//python/config_settings:python_version"))
+VISIBLE_FOR_TESTING = str(Label("//python/private:visible_for_testing"))
diff --git a/tests/support/test_platforms.bzl b/tests/support/test_platforms.bzl
deleted file mode 100644
index 3ff3c50..0000000
--- a/tests/support/test_platforms.bzl
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright 2023 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-"""Constants for referring to platforms."""
-
-# Explicit Label() calls are required so that it resolves in @rules_python
-# context instead of e.g. the @rules_testing context.
-MAC = Label("//tests/support:mac")
-LINUX = Label("//tests/support:linux")
-WINDOWS = Label("//tests/support:windows")
diff --git a/tests/private/text_util/BUILD.bazel b/tests/text_util/BUILD.bazel
similarity index 100%
rename from tests/private/text_util/BUILD.bazel
rename to tests/text_util/BUILD.bazel
diff --git a/tests/private/text_util/render_tests.bzl b/tests/text_util/render_tests.bzl
similarity index 84%
rename from tests/private/text_util/render_tests.bzl
rename to tests/text_util/render_tests.bzl
index 7c3dddf..14967a9 100644
--- a/tests/private/text_util/render_tests.bzl
+++ b/tests/text_util/render_tests.bzl
@@ -54,6 +54,25 @@
 
 _tests.append(_test_render_alias)
 
+def _test_render_tuple_dict(env):
+    got = render.dict(
+        {
+            ("foo", "bar"): "baz",
+            ("foo",): "bar",
+        },
+        key_repr = render.tuple,
+    )
+    env.expect.that_str(got).equals("""\
+{
+    (
+        "foo",
+        "bar",
+    ): "baz",
+    ("foo",): "bar",
+}""")
+
+_tests.append(_test_render_tuple_dict)
+
 def render_test_suite(name):
     """Create the test suite.
 
diff --git a/tests/toolchains/BUILD.bazel b/tests/toolchains/BUILD.bazel
index 2f804a4..c55dc92 100644
--- a/tests/toolchains/BUILD.bazel
+++ b/tests/toolchains/BUILD.bazel
@@ -12,9 +12,8 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-load(":defs.bzl", "acceptance_tests")
-load(":versions_test.bzl", "versions_test_suite")
+load(":defs.bzl", "define_toolchain_tests")
 
-versions_test_suite(name = "versions_test")
-
-acceptance_tests()
+define_toolchain_tests(
+    name = "toolchain_tests",
+)
diff --git a/tests/toolchains/defs.bzl b/tests/toolchains/defs.bzl
index 8776eba..fbb7082 100644
--- a/tests/toolchains/defs.bzl
+++ b/tests/toolchains/defs.bzl
@@ -12,186 +12,41 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""This module contains the definition for the toolchains testing rules.
-"""
+""
 
 load("//python:versions.bzl", "PLATFORMS", "TOOL_VERSIONS")
-load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")  # buildifier: disable=bzl-visibility
+load("//tests/support:sh_py_run_test.bzl", "py_reconfig_test")
 
-_WINDOWS_RUNNER_TEMPLATE = """\
-@ECHO OFF
-set PATHEXT=.COM;.EXE;.BAT
-powershell.exe -c "& ./{interpreter_path} {run_acceptance_test_py}"
-"""
+def define_toolchain_tests(name):
+    """Define the toolchain tests.
 
-def _acceptance_test_impl(ctx):
-    files = []
-
-    if BZLMOD_ENABLED:
-        module_bazel = ctx.actions.declare_file("/".join([ctx.attr.python_version, "MODULE.bazel"]))
-        ctx.actions.expand_template(
-            template = ctx.file._module_bazel_tmpl,
-            output = module_bazel,
-            substitutions = {"%python_version%": ctx.attr.python_version},
-        )
-        files.append(module_bazel)
-
-        workspace = ctx.actions.declare_file("/".join([ctx.attr.python_version, "WORKSPACE"]))
-        ctx.actions.write(workspace, "")
-        files.append(workspace)
-    else:
-        workspace = ctx.actions.declare_file("/".join([ctx.attr.python_version, "WORKSPACE"]))
-        ctx.actions.expand_template(
-            template = ctx.file._workspace_tmpl,
-            output = workspace,
-            substitutions = {"%python_version%": ctx.attr.python_version},
-        )
-        files.append(workspace)
-
-    build_bazel = ctx.actions.declare_file("/".join([ctx.attr.python_version, "BUILD.bazel"]))
-    ctx.actions.expand_template(
-        template = ctx.file._build_bazel_tmpl,
-        output = build_bazel,
-        substitutions = {"%python_version%": ctx.attr.python_version},
-    )
-    files.append(build_bazel)
-
-    python_version_test = ctx.actions.declare_file("/".join([ctx.attr.python_version, "python_version_test.py"]))
-    ctx.actions.symlink(
-        target_file = ctx.file._python_version_test,
-        output = python_version_test,
-    )
-    files.append(python_version_test)
-
-    run_acceptance_test_py = ctx.actions.declare_file("/".join([ctx.attr.python_version, "run_acceptance_test.py"]))
-    ctx.actions.expand_template(
-        template = ctx.file._run_acceptance_test_tmpl,
-        output = run_acceptance_test_py,
-        substitutions = {
-            "%is_bzlmod%": str(BZLMOD_ENABLED),
-            "%is_windows%": str(ctx.attr.is_windows),
-            "%python_version%": ctx.attr.python_version,
-            "%test_location%": "/".join([ctx.attr.test_location, ctx.attr.python_version]),
-        },
-    )
-    files.append(run_acceptance_test_py)
-
-    toolchain = ctx.toolchains["@bazel_tools//tools/python:toolchain_type"]
-    py3_runtime = toolchain.py3_runtime
-    interpreter_path = py3_runtime.interpreter_path
-    if not interpreter_path:
-        interpreter_path = py3_runtime.interpreter.short_path
-
-    if ctx.attr.is_windows:
-        executable = ctx.actions.declare_file("run_test_{}.bat".format(ctx.attr.python_version))
-        ctx.actions.write(
-            output = executable,
-            content = _WINDOWS_RUNNER_TEMPLATE.format(
-                interpreter_path = interpreter_path.replace("../", "external/"),
-                run_acceptance_test_py = run_acceptance_test_py.short_path,
-            ),
-            is_executable = True,
-        )
-    else:
-        executable = ctx.actions.declare_file("run_test_{}.sh".format(ctx.attr.python_version))
-        ctx.actions.write(
-            output = executable,
-            content = "exec '{interpreter_path}' '{run_acceptance_test_py}'".format(
-                interpreter_path = interpreter_path,
-                run_acceptance_test_py = run_acceptance_test_py.short_path,
-            ),
-            is_executable = True,
-        )
-    files.append(executable)
-    files.extend(ctx.files._distribution)
-
-    return [DefaultInfo(
-        executable = executable,
-        files = depset(
-            direct = files,
-            transitive = [py3_runtime.files],
-        ),
-        runfiles = ctx.runfiles(
-            files = files,
-            transitive_files = py3_runtime.files,
-        ),
-    )]
-
-_acceptance_test = rule(
-    implementation = _acceptance_test_impl,
-    doc = "A rule for the toolchain acceptance tests.",
-    attrs = {
-        "is_windows": attr.bool(
-            doc = "(Provided by the macro) Whether this is running under Windows or not.",
-            mandatory = True,
-        ),
-        "python_version": attr.string(
-            doc = "The Python version to be used when requesting the toolchain.",
-            mandatory = True,
-        ),
-        "test_location": attr.string(
-            doc = "(Provided by the macro) The value of native.package_name().",
-            mandatory = True,
-        ),
-        "_build_bazel_tmpl": attr.label(
-            doc = "The BUILD.bazel template.",
-            allow_single_file = True,
-            default = Label("//tests/toolchains/workspace_template:BUILD.bazel.tmpl"),
-        ),
-        "_distribution": attr.label(
-            doc = "The rules_python source distribution.",
-            default = Label("//:distribution"),
-        ),
-        "_module_bazel_tmpl": attr.label(
-            doc = "The MODULE.bazel template.",
-            allow_single_file = True,
-            default = Label("//tests/toolchains/workspace_template:MODULE.bazel.tmpl"),
-        ),
-        "_python_version_test": attr.label(
-            doc = "The python_version_test.py used to test the Python version.",
-            allow_single_file = True,
-            default = Label("//tests/toolchains/workspace_template:python_version_test.py"),
-        ),
-        "_run_acceptance_test_tmpl": attr.label(
-            doc = "The run_acceptance_test.py template.",
-            allow_single_file = True,
-            default = Label("//tests/toolchains:run_acceptance_test.py.tmpl"),
-        ),
-        "_workspace_tmpl": attr.label(
-            doc = "The WORKSPACE template.",
-            allow_single_file = True,
-            default = Label("//tests/toolchains/workspace_template:WORKSPACE.tmpl"),
-        ),
-    },
-    test = True,
-    toolchains = ["@bazel_tools//tools/python:toolchain_type"],
-)
-
-def acceptance_test(python_version, **kwargs):
-    _acceptance_test(
-        is_windows = select({
-            "@bazel_tools//src/conditions:host_windows": True,
-            "//conditions:default": False,
-        }),
-        python_version = python_version,
-        test_location = native.package_name(),
-        **kwargs
-    )
-
-# buildifier: disable=unnamed-macro
-def acceptance_tests():
-    """Creates a matrix of acceptance_test targets for all the toolchains.
+    Args:
+        name: Only present to satisfy tooling.
     """
-    for python_version in TOOL_VERSIONS.keys():
-        for platform, meta in PLATFORMS.items():
-            if platform not in TOOL_VERSIONS[python_version]["sha256"]:
-                continue
-            acceptance_test(
-                name = "python_{python_version}_{platform}_test".format(
-                    python_version = python_version.replace(".", "_"),
-                    platform = platform,
-                ),
-                python_version = python_version,
-                target_compatible_with = meta.compatible_with,
-                tags = ["acceptance-test"],
-            )
+    for platform_key, platform_info in PLATFORMS.items():
+        native.config_setting(
+            name = "_is_{}".format(platform_key),
+            flag_values = platform_info.flag_values,
+            constraint_values = platform_info.compatible_with,
+        )
+
+    for python_version, meta in TOOL_VERSIONS.items():
+        target_compatible_with = {
+            "//conditions:default": ["@platforms//:incompatible"],
+        }
+        for platform_key in meta["sha256"].keys():
+            is_platform = "_is_{}".format(platform_key)
+            target_compatible_with[is_platform] = []
+
+        py_reconfig_test(
+            name = "python_{}_test".format(python_version),
+            srcs = ["python_toolchain_test.py"],
+            main = "python_toolchain_test.py",
+            python_version = python_version,
+            env = {
+                "EXPECT_PYTHON_VERSION": python_version,
+            },
+            deps = ["//python/runfiles"],
+            data = ["//tests/support:current_build_settings"],
+            target_compatible_with = select(target_compatible_with),
+        )
diff --git a/tests/toolchains/python_toolchain_test.py b/tests/toolchains/python_toolchain_test.py
new file mode 100644
index 0000000..371b252
--- /dev/null
+++ b/tests/toolchains/python_toolchain_test.py
@@ -0,0 +1,28 @@
+import json
+import os
+import pathlib
+import sys
+import unittest
+
+from python.runfiles import runfiles
+
+
+class PythonToolchainTest(unittest.TestCase):
+    def test_expected_toolchain_matches(self):
+        expect_version = os.environ["EXPECT_PYTHON_VERSION"]
+
+        rf = runfiles.Create()
+        settings_path = rf.Rlocation(
+            "rules_python/tests/support/current_build_settings.json"
+        )
+        settings = json.loads(pathlib.Path(settings_path).read_text())
+
+        expected = "python_{}".format(expect_version.replace(".", "_"))
+        self.assertIn(expected, settings["toolchain_label"], str(settings))
+
+        actual = "{v.major}.{v.minor}.{v.micro}".format(v=sys.version_info)
+        self.assertEqual(actual, expect_version)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/toolchains/run_acceptance_test.py.tmpl b/tests/toolchains/run_acceptance_test.py.tmpl
deleted file mode 100644
index c52e078..0000000
--- a/tests/toolchains/run_acceptance_test.py.tmpl
+++ /dev/null
@@ -1,90 +0,0 @@
-# Copyright 2022 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import subprocess
-import unittest
-import pathlib
-
-class TestPythonVersion(unittest.TestCase):
-    @classmethod
-    def setUpClass(cls):
-        os.chdir("%test_location%")
-        test_srcdir = os.environ["TEST_SRCDIR"]
-        # When bzlmod is enabled, the name of the directory in runfiles changes
-        # to _main instead of rules_python
-        if os.path.exists(os.path.join(test_srcdir, "_main")):
-          rules_python_path = os.path.join(test_srcdir, "_main")
-        else:
-          rules_python_path = os.path.join(test_srcdir, "rules_python")
-
-        test_tmpdir = os.environ["TEST_TMPDIR"]
-        if %is_windows%:
-            home = os.path.join(test_tmpdir, "HOME")
-            os.mkdir(home)
-            os.environ["HOME"] = home
-
-            local_app_data = os.path.join(test_tmpdir, "LocalAppData")
-            os.mkdir(local_app_data)
-            os.environ["LocalAppData"] = local_app_data
-
-        # Bazelisk requires a cache directory be set
-        os.environ["XDG_CACHE_HOME"] = os.path.join(test_tmpdir, "xdg-cache-home")
-
-        # Unset this so this works when called by Bazel's latest Bazel build
-        # pipeline. It sets the following combination, which interfere with each other:
-        # * --sandbox_tmpfs_path=/tmp
-        # * --test_env=USE_BAZEL_VERSION
-        # * USE_BAZEL_VERSION=/tmp/<something>
-        os.environ.pop("USE_BAZEL_VERSION", None)
-
-        bazelrc_lines = [
-            "build --test_output=errors",
-        ]
-
-        if %is_bzlmod%:
-            bazelrc_lines.extend(
-                [
-                    'build --override_module rules_python="{}"'.format(
-                        rules_python_path.replace("\\", "/")
-                    ),
-                    "common --enable_bzlmod",
-                ]
-            )
-        else:
-            bazelrc_lines.extend(
-                [
-                    'build --override_repository rules_python="{}"'.format(
-                        rules_python_path.replace("\\", "/")
-                    ),
-                    "common --noexperimental_enable_bzlmod",
-                ]
-            )
-
-        bazelrc = pathlib.Path(".bazelrc")
-        bazelrc.write_text(os.linesep.join(bazelrc_lines))
-
-    def test_match_toolchain(self):
-        output = subprocess.check_output(
-            f"bazel run --announce_rc @python//:python3 -- --version",
-            shell = True, # Shell needed to look up via PATH
-            text=True,
-        ).strip()
-        self.assertEqual(output, "Python %python_version%")
-
-        subprocess.run("bazel test --announce_rc //...", shell=True, check=True)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/tests/toolchains/versions_test.bzl b/tests/toolchains/versions_test.bzl
deleted file mode 100644
index b885d22..0000000
--- a/tests/toolchains/versions_test.bzl
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2022 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Unit tests for starlark helpers
-See https://docs.bazel.build/versions/main/skylark/testing.html#for-testing-starlark-utilities
-"""
-
-load("@bazel_skylib//lib:unittest.bzl", "asserts", "unittest")
-load("//python:versions.bzl", "MINOR_MAPPING", "TOOL_VERSIONS")
-
-required_platforms = [
-    "x86_64-apple-darwin",
-    "x86_64-unknown-linux-gnu",
-]
-
-def _smoke_test_impl(ctx):
-    env = unittest.begin(ctx)
-    for version in TOOL_VERSIONS.keys():
-        platforms = TOOL_VERSIONS[version]["sha256"]
-        for required_platform in required_platforms:
-            asserts.true(
-                env,
-                required_platform in platforms.keys(),
-                "Missing platform {} for version {}".format(required_platform, version),
-            )
-    for minor in MINOR_MAPPING:
-        version = MINOR_MAPPING[minor]
-        asserts.true(
-            env,
-            version in TOOL_VERSIONS.keys(),
-            "Missing version {} in TOOL_VERSIONS".format(version),
-        )
-    return unittest.end(env)
-
-# The unittest library requires that we export the test cases as named test rules,
-# but their names are arbitrary and don't appear anywhere.
-_t0_test = unittest.make(_smoke_test_impl)
-
-def versions_test_suite(name):
-    unittest.suite(name, _t0_test)
diff --git a/tests/toolchains/workspace_template/BUILD.bazel b/tests/toolchains/workspace_template/BUILD.bazel
deleted file mode 100644
index 7f3e7b0..0000000
--- a/tests/toolchains/workspace_template/BUILD.bazel
+++ /dev/null
@@ -1,6 +0,0 @@
-exports_files([
-    "BUILD.bazel.tmpl",
-    "MODULE.bazel.tmpl",
-    "WORKSPACE.tmpl",
-    "python_version_test.py",
-])
diff --git a/tests/toolchains/workspace_template/BUILD.bazel.tmpl b/tests/toolchains/workspace_template/BUILD.bazel.tmpl
deleted file mode 100644
index 4a45209..0000000
--- a/tests/toolchains/workspace_template/BUILD.bazel.tmpl
+++ /dev/null
@@ -1,9 +0,0 @@
-load("@rules_python//python:defs.bzl", "py_test")
-
-py_test(
-    name = "python_version_test",
-    srcs = ["python_version_test.py"],
-    env = {
-        "PYTHON_VERSION": "%python_version%",
-    },
-)
diff --git a/tests/toolchains/workspace_template/MODULE.bazel.tmpl b/tests/toolchains/workspace_template/MODULE.bazel.tmpl
deleted file mode 100644
index 9e3a844..0000000
--- a/tests/toolchains/workspace_template/MODULE.bazel.tmpl
+++ /dev/null
@@ -1,19 +0,0 @@
-module(
-    name = "module_test",
-    version = "0.0.0",
-    compatibility_level = 1,
-)
-
-bazel_dep(name = "bazel_skylib", version = "1.3.0")
-bazel_dep(name = "rules_python", version = "0.0.0")
-local_path_override(
-    module_name = "rules_python",
-    path = "",
-)
-
-python = use_extension("@rules_python//python/extensions:python.bzl", "python")
-python.toolchain(
-    is_default = True,
-    python_version = "%python_version%",
-)
-use_repo(python, "python_versions", python = "python_%python_version%".replace(".", "_"))
diff --git a/tests/toolchains/workspace_template/README.md b/tests/toolchains/workspace_template/README.md
deleted file mode 100644
index b4d6e6a..0000000
--- a/tests/toolchains/workspace_template/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# Toolchains testing WORKSPACE template
-
-This directory contains templates for generating acceptance tests for the
-toolchains.
diff --git a/tests/toolchains/workspace_template/WORKSPACE.tmpl b/tests/toolchains/workspace_template/WORKSPACE.tmpl
deleted file mode 100644
index 3335f4b..0000000
--- a/tests/toolchains/workspace_template/WORKSPACE.tmpl
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2022 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-workspace(name = "workspace_test")
-
-local_repository(
-    name = "rules_python",
-    path = "",
-)
-
-load("@rules_python//python:repositories.bzl", "python_register_toolchains", "py_repositories")
-
-py_repositories()
-
-python_register_toolchains(
-    name = "python",
-    python_version = "%python_version%",
-)
-
-load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
-http_archive(
-    name = "bazel_skylib",
-    urls = [
-        "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz",
-        "https://github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz",
-    ],
-    sha256 = "c6966ec828da198c5d9adbaa94c05e3a1c7f21bd012a0b29ba8ddbccb2c93b0d",
-)
-load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
-bazel_skylib_workspace()
diff --git a/tests/uv/toolchain/BUILD.bazel b/tests/uv/toolchain/BUILD.bazel
new file mode 100644
index 0000000..137b4e0
--- /dev/null
+++ b/tests/uv/toolchain/BUILD.bazel
@@ -0,0 +1,22 @@
+load("//python:py_test.bzl", "py_test")
+load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")  # buildifier: disable=bzl-visibility
+
+# We only test this feature when `bzlmod` is enabled.
+_TARGET_COMPATIBLE_WITH = [] if BZLMOD_ENABLED else ["@platforms//:incompatible"]
+
+genrule(
+    name = "uv_help",
+    outs = ["uv_help.txt"],
+    cmd = "$(UV_BIN) --python-fetch manual --help >$@",
+    target_compatible_with = _TARGET_COMPATIBLE_WITH,
+    toolchains = ["//python/uv:current_toolchain"],
+)
+
+py_test(
+    name = "uv_help_test",
+    srcs = ["uv_help_test.py"],
+    data = [":uv_help"],
+    env = {"DATA": "$(rlocationpath :uv_help)"},
+    target_compatible_with = _TARGET_COMPATIBLE_WITH,
+    deps = ["//python/runfiles"],
+)
diff --git a/tests/uv/toolchain/uv_help_test.py b/tests/uv/toolchain/uv_help_test.py
new file mode 100755
index 0000000..be5e755
--- /dev/null
+++ b/tests/uv/toolchain/uv_help_test.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+import os
+import unittest
+from pathlib import Path
+
+from python.runfiles import runfiles
+
+
+class TestUV(unittest.TestCase):
+    def test_uv_help(self):
+        rfiles = runfiles.Create()
+        assert rfiles is not None, "rfiles creation failed"
+
+        data_rpath = os.environ["DATA"]
+        uv_help_path = rfiles.Rlocation(data_rpath)
+        assert (
+            uv_help_path is not None
+        ), f"the rlocation path was not found: {data_rpath}"
+
+        uv_help = Path(uv_help_path).read_text()
+
+        self.assertIn("Usage: uv [OPTIONS] <COMMAND>", uv_help)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/whl_filegroup/BUILD.bazel b/tests/whl_filegroup/BUILD.bazel
new file mode 100644
index 0000000..d8b711d
--- /dev/null
+++ b/tests/whl_filegroup/BUILD.bazel
@@ -0,0 +1,69 @@
+load("@bazel_skylib//rules:write_file.bzl", "write_file")
+load("@rules_cc//cc:defs.bzl", "cc_library", "cc_test")
+load("//python:defs.bzl", "py_library", "py_test")
+load("//python:packaging.bzl", "py_package", "py_wheel")
+load("//python:pip.bzl", "whl_filegroup")
+load(":whl_filegroup_tests.bzl", "whl_filegroup_test_suite")
+
+whl_filegroup_test_suite(name = "whl_filegroup_tests")
+
+py_test(
+    name = "extract_wheel_files_test",
+    size = "small",
+    srcs = ["extract_wheel_files_test.py"],
+    data = ["//examples/wheel:minimal_with_py_package"],
+    deps = ["//python/private/whl_filegroup:extract_wheel_files"],
+)
+
+write_file(
+    name = "header",
+    out = "include/whl_headers/header.h",
+    content = [
+        "#pragma once",
+        "#include <Python.h>",
+        "#define CUSTOM_ZERO ((Py_ssize_t) 0)",
+    ],
+)
+
+write_file(
+    name = "lib_py",
+    out = "lib.py",
+)
+
+py_library(
+    name = "lib",
+    srcs = ["lib.py"],
+    data = [":header"],
+)
+
+py_package(
+    name = "pkg",
+    deps = [":lib"],
+)
+
+py_wheel(
+    name = "wheel",
+    distribution = "wheel",
+    python_tag = "py3",
+    version = "0.0.1",
+    deps = [":pkg"],
+)
+
+whl_filegroup(
+    name = "filegroup",
+    pattern = "tests/whl_filegroup/include/.*\\.h",
+    whl = ":wheel",
+)
+
+cc_library(
+    name = "whl_headers",
+    hdrs = [":filegroup"],
+    includes = ["filegroup/tests/whl_filegroup/include"],
+    deps = ["@rules_python//python/cc:current_py_cc_headers"],
+)
+
+cc_test(
+    name = "whl_headers_test",
+    srcs = ["whl_headers_test.c"],
+    deps = [":whl_headers"],
+)
diff --git a/tests/whl_filegroup/extract_wheel_files_test.py b/tests/whl_filegroup/extract_wheel_files_test.py
new file mode 100644
index 0000000..2ea175b
--- /dev/null
+++ b/tests/whl_filegroup/extract_wheel_files_test.py
@@ -0,0 +1,63 @@
+import tempfile
+import unittest
+from pathlib import Path
+
+from python.private.whl_filegroup import extract_wheel_files
+
+_WHEEL = Path("examples/wheel/example_minimal_package-0.0.1-py3-none-any.whl")
+
+
+class WheelRecordTest(unittest.TestCase):
+    def test_get_wheel_record(self) -> None:
+        record = extract_wheel_files.get_record(_WHEEL)
+        expected = {
+            "examples/wheel/lib/data.txt": (
+                "sha256=9vJKEdfLu8bZRArKLroPZJh1XKkK3qFMXiM79MBL2Sg",
+                12,
+            ),
+            "examples/wheel/lib/module_with_data.py": (
+                "sha256=8s0Khhcqz3yVsBKv2IB5u4l4TMKh7-c_V6p65WVHPms",
+                637,
+            ),
+            "examples/wheel/lib/simple_module.py": (
+                "sha256=z2hwciab_XPNIBNH8B1Q5fYgnJvQTeYf0ZQJpY8yLLY",
+                637,
+            ),
+            "examples/wheel/main.py": (
+                "sha256=sgg5iWN_9inYBjm6_Zw27hYdmo-l24fA-2rfphT-IlY",
+                909,
+            ),
+            "example_minimal_package-0.0.1.dist-info/WHEEL": (
+                "sha256=sobxWSyDDkdg_rinUth-jxhXHqoNqlmNMJY3aTZn2Us",
+                91,
+            ),
+            "example_minimal_package-0.0.1.dist-info/METADATA": (
+                "sha256=cfiQ2hFJhCKCUgbwtAwWG0fhW6NTzw4cr1uKOBcV_IM",
+                76,
+            ),
+        }
+        self.maxDiff = None
+        self.assertDictEqual(record, expected)
+
+    def test_get_files(self) -> None:
+        pattern = "(examples/wheel/lib/.*\.txt$|.*main)"
+        record = extract_wheel_files.get_record(_WHEEL)
+        files = extract_wheel_files.get_files(record, pattern)
+        expected = ["examples/wheel/lib/data.txt", "examples/wheel/main.py"]
+        self.assertEqual(files, expected)
+
+    def test_extract(self) -> None:
+        files = {"examples/wheel/lib/data.txt", "examples/wheel/main.py"}
+        with tempfile.TemporaryDirectory() as tmpdir:
+            outdir = Path(tmpdir)
+            extract_wheel_files.extract_files(_WHEEL, files, outdir)
+            extracted_files = {
+                f.relative_to(outdir).as_posix()
+                for f in outdir.glob("**/*")
+                if f.is_file()
+            }
+        self.assertEqual(extracted_files, files)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/whl_filegroup/whl_filegroup_tests.bzl b/tests/whl_filegroup/whl_filegroup_tests.bzl
new file mode 100644
index 0000000..acb9341
--- /dev/null
+++ b/tests/whl_filegroup/whl_filegroup_tests.bzl
@@ -0,0 +1,34 @@
+"""Test for py_wheel."""
+
+load("@rules_testing//lib:analysis_test.bzl", "analysis_test", "test_suite")
+load("@rules_testing//lib:util.bzl", "util")
+load("//python:pip.bzl", "whl_filegroup")
+
+def _test_runfiles(name):
+    for runfiles in [True, False]:
+        util.helper_target(
+            whl_filegroup,
+            name = name + "_subject_runfiles_{}".format(runfiles),
+            whl = ":wheel",
+            runfiles = runfiles,
+        )
+    analysis_test(
+        name = name,
+        impl = _test_runfiles_impl,
+        targets = {
+            "no_runfiles": name + "_subject_runfiles_False",
+            "with_runfiles": name + "_subject_runfiles_True",
+        },
+    )
+
+def _test_runfiles_impl(env, targets):
+    env.expect.that_target(targets.with_runfiles).runfiles().contains_exactly([env.ctx.workspace_name + "/{package}/{name}"])
+    env.expect.that_target(targets.no_runfiles).runfiles().contains_exactly([])
+
+def whl_filegroup_test_suite(name):
+    """Create the test suite.
+
+    Args:
+        name: the name of the test suite
+    """
+    test_suite(name = name, tests = [_test_runfiles])
diff --git a/tests/whl_filegroup/whl_headers_test.c b/tests/whl_filegroup/whl_headers_test.c
new file mode 100644
index 0000000..786395a
--- /dev/null
+++ b/tests/whl_filegroup/whl_headers_test.c
@@ -0,0 +1,5 @@
+#include <whl_headers/header.h>
+
+int main(int argc, char**argv) {
+    return CUSTOM_ZERO;
+}
diff --git a/third_party/rules_pycross/pycross/private/tools/BUILD.bazel b/third_party/rules_pycross/pycross/private/tools/BUILD.bazel
index a87e6aa..41485c1 100644
--- a/third_party/rules_pycross/pycross/private/tools/BUILD.bazel
+++ b/third_party/rules_pycross/pycross/private/tools/BUILD.bazel
@@ -20,7 +20,7 @@
     srcs = ["wheel_installer.py"],
     visibility = ["//visibility:public"],
     deps = [
-        "//python/pip_install/tools/wheel_installer:lib",
+        "//python/private/pypi/whl_installer:lib",
         "@pypi__installer//:lib",
     ],
 )
diff --git a/third_party/rules_pycross/pycross/private/tools/wheel_installer.py b/third_party/rules_pycross/pycross/private/tools/wheel_installer.py
index 0c352cf..c03c4c2 100644
--- a/third_party/rules_pycross/pycross/private/tools/wheel_installer.py
+++ b/third_party/rules_pycross/pycross/private/tools/wheel_installer.py
@@ -30,7 +30,7 @@
 from installer.destinations import SchemeDictionaryDestination
 from installer.sources import WheelFile
 
-from python.pip_install.tools.wheel_installer import namespace_pkgs
+from python.private.pypi.whl_installer import namespace_pkgs
 
 
 def setup_namespace_pkg_compatibility(wheel_dir: Path) -> None:
diff --git a/tools/BUILD.bazel b/tools/BUILD.bazel
index 51bd56d..4f42bcb 100644
--- a/tools/BUILD.bazel
+++ b/tools/BUILD.bazel
@@ -29,6 +29,9 @@
     srcs = [
         "BUILD.bazel",
         "wheelmaker.py",
+        "//tools/launcher:distribution",
+        "//tools/precompiler:distribution",
+        "//tools/publish:distribution",
     ],
     visibility = ["//:__pkg__"],
 )
diff --git a/tools/launcher/BUILD.bazel b/tools/launcher/BUILD.bazel
new file mode 100644
index 0000000..aa46106
--- /dev/null
+++ b/tools/launcher/BUILD.bazel
@@ -0,0 +1,33 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["**"]),
+    visibility = ["//:__subpackages__"],
+)
+
+alias(
+    name = "launcher",
+    actual = select({
+        "@platforms//os:windows": "@bazel_tools//tools/launcher:launcher",
+        # The alias.actual value must be non-None, so use an empty target.
+        "//conditions:default": ":_sentinel_no_launcher",
+    }),
+    visibility = ["//visibility:public"],
+)
+
+filegroup(
+    name = "_sentinel_no_launcher",
+)
diff --git a/tools/precompiler/BUILD.bazel b/tools/precompiler/BUILD.bazel
new file mode 100644
index 0000000..268f41b
--- /dev/null
+++ b/tools/precompiler/BUILD.bazel
@@ -0,0 +1,44 @@
+# Copyright 2017 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("@bazel_skylib//rules:common_settings.bzl", "string_list_flag")
+load("//python/private:py_interpreter_program.bzl", "py_interpreter_program")  # buildifier: disable=bzl-visibility
+
+filegroup(
+    name = "distribution",
+    srcs = glob(["**"]),
+    visibility = ["//:__subpackages__"],
+)
+
+py_interpreter_program(
+    name = "precompiler",
+    execution_requirements = ":execution_requirements",
+    main = "precompiler.py",
+    visibility = [
+        # Not actually public. Only public so rules_python-generated toolchains
+        # are able to reference it.
+        "//visibility:public",
+    ],
+)
+
+string_list_flag(
+    name = "execution_requirements",
+    build_setting_default = [
+        "supports-workers=1",
+        "requires-worker-protocol=json",
+        "supports-multiplex-sandboxing=1",
+        "supports-multiplex-workers=1",
+        "supports-worker-cancellation=1",
+    ],
+)
diff --git a/tools/precompiler/precompiler.py b/tools/precompiler/precompiler.py
new file mode 100644
index 0000000..d1b1713
--- /dev/null
+++ b/tools/precompiler/precompiler.py
@@ -0,0 +1,296 @@
+# Copyright 2024 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A simple precompiler to generate deterministic pyc files for Bazel."""
+
+# NOTE: Imports specific to the persistent worker should only be imported
+# when a persistent worker is used. Avoiding the unnecessary imports
+# saves significant startup time for non-worker invocations.
+import argparse
+import py_compile
+import sys
+
+
+def _create_parser() -> "argparse.Namespace":
+    parser = argparse.ArgumentParser(fromfile_prefix_chars="@")
+    parser.add_argument("--invalidation_mode")
+    parser.add_argument("--optimize", type=int)
+    parser.add_argument("--python_version")
+
+    parser.add_argument("--src", action="append", dest="srcs")
+    parser.add_argument("--src_name", action="append", dest="src_names")
+    parser.add_argument("--pyc", action="append", dest="pycs")
+
+    parser.add_argument("--persistent_worker", action="store_true")
+    parser.add_argument("--log_level", default="ERROR")
+    parser.add_argument("--worker_impl", default="async")
+    return parser
+
+
+def _compile(options: "argparse.Namespace") -> None:
+    try:
+        invalidation_mode = getattr(
+            py_compile.PycInvalidationMode, options.invalidation_mode.upper()
+        )
+    except AttributeError as e:
+        raise ValueError(
+            f"Unknown PycInvalidationMode: {options.invalidation_mode}"
+        ) from e
+
+    if len(options.srcs) != len(options.src_names) != len(options.pycs):
+        raise AssertionError(
+            "Mismatched number of --src, --src_name, and/or --pyc args"
+        )
+
+    for src, src_name, pyc in zip(options.srcs, options.src_names, options.pycs):
+        py_compile.compile(
+            src,
+            pyc,
+            doraise=True,
+            dfile=src_name,
+            optimize=options.optimize,
+            invalidation_mode=invalidation_mode,
+        )
+    return 0
+
+
+# A stub type alias for readability.
+# See the Bazel WorkRequest object definition:
+# https://github.com/bazelbuild/bazel/blob/master/src/main/protobuf/worker_protocol.proto
+JsonWorkerRequest = object
+
+# A stub type alias for readability.
+# See the Bazel WorkResponse object definition:
+# https://github.com/bazelbuild/bazel/blob/master/src/main/protobuf/worker_protocol.proto
+JsonWorkerResponse = object
+
+
+class _SerialPersistentWorker:
+    """Simple, synchronous, serial persistent worker."""
+
+    def __init__(self, instream: "typing.TextIO", outstream: "typing.TextIO"):
+        self._instream = instream
+        self._outstream = outstream
+        self._parser = _create_parser()
+
+    def run(self) -> None:
+        try:
+            while True:
+                request = None
+                try:
+                    request = self._get_next_request()
+                    if request is None:
+                        _logger.info("Empty request: exiting")
+                        break
+                    response = self._process_request(request)
+                    if response:  # May be none for cancel request
+                        self._send_response(response)
+                except Exception:
+                    _logger.exception("Unhandled error: request=%s", request)
+                    output = (
+                        f"Unhandled error:\nRequest: {request}\n"
+                        + traceback.format_exc()
+                    )
+                    request_id = 0 if not request else request.get("requestId", 0)
+                    self._send_response(
+                        {
+                            "exitCode": 3,
+                            "output": output,
+                            "requestId": request_id,
+                        }
+                    )
+        finally:
+            _logger.info("Worker shutting down")
+
+    def _get_next_request(self) -> "object | None":
+        line = self._instream.readline()
+        if not line:
+            return None
+        return json.loads(line)
+
+    def _process_request(self, request: "JsonWorkRequest") -> "JsonWorkResponse | None":
+        if request.get("cancel"):
+            return None
+        options = self._options_from_request(request)
+        _compile(options)
+        response = {
+            "requestId": request.get("requestId", 0),
+            "exitCode": 0,
+        }
+        return response
+
+    def _options_from_request(
+        self, request: "JsonWorkResponse"
+    ) -> "argparse.Namespace":
+        options = self._parser.parse_args(request["arguments"])
+        if request.get("sandboxDir"):
+            prefix = request["sandboxDir"]
+            options.srcs = [os.path.join(prefix, v) for v in options.srcs]
+            options.pycs = [os.path.join(prefix, v) for v in options.pycs]
+        return options
+
+    def _send_response(self, response: "JsonWorkResponse") -> None:
+        self._outstream.write(json.dumps(response) + "\n")
+        self._outstream.flush()
+
+
+class _AsyncPersistentWorker:
+    """Asynchronous, concurrent, persistent worker."""
+
+    def __init__(self, reader: "typing.TextIO", writer: "typing.TextIO"):
+        self._reader = reader
+        self._writer = writer
+        self._parser = _create_parser()
+        self._request_id_to_task = {}
+        self._task_to_request_id = {}
+
+    @classmethod
+    async def main(cls, instream: "typing.TextIO", outstream: "typing.TextIO") -> None:
+        reader, writer = await cls._connect_streams(instream, outstream)
+        await cls(reader, writer).run()
+
+    @classmethod
+    async def _connect_streams(
+        cls, instream: "typing.TextIO", outstream: "typing.TextIO"
+    ) -> "tuple[asyncio.StreamReader, asyncio.StreamWriter]":
+        loop = asyncio.get_event_loop()
+        reader = asyncio.StreamReader()
+        protocol = asyncio.StreamReaderProtocol(reader)
+        await loop.connect_read_pipe(lambda: protocol, instream)
+
+        w_transport, w_protocol = await loop.connect_write_pipe(
+            asyncio.streams.FlowControlMixin, outstream
+        )
+        writer = asyncio.StreamWriter(w_transport, w_protocol, reader, loop)
+        return reader, writer
+
+    async def run(self) -> None:
+        while True:
+            _logger.info("pending requests: %s", len(self._request_id_to_task))
+            request = await self._get_next_request()
+            request_id = request.get("requestId", 0)
+            task = asyncio.create_task(
+                self._process_request(request), name=f"request_{request_id}"
+            )
+            self._request_id_to_task[request_id] = task
+            self._task_to_request_id[task] = request_id
+            task.add_done_callback(self._handle_task_done)
+
+    async def _get_next_request(self) -> "JsonWorkRequest":
+        _logger.debug("awaiting line")
+        line = await self._reader.readline()
+        _logger.debug("recv line: %s", line)
+        return json.loads(line)
+
+    def _handle_task_done(self, task: "asyncio.Task") -> None:
+        request_id = self._task_to_request_id[task]
+        _logger.info("task done: %s %s", request_id, task)
+        del self._task_to_request_id[task]
+        del self._request_id_to_task[request_id]
+
+    async def _process_request(self, request: "JsonWorkRequest") -> None:
+        _logger.info("request %s: start: %s", request.get("requestId"), request)
+        try:
+            if request.get("cancel", False):
+                await self._process_cancel_request(request)
+            else:
+                await self._process_compile_request(request)
+        except asyncio.CancelledError:
+            _logger.info(
+                "request %s: cancel received, stopping processing",
+                request.get("requestId"),
+            )
+            # We don't send a response because we assume the request that
+            # triggered cancelling sent the response
+            raise
+        except:
+            _logger.exception("Unhandled error: request=%s", request)
+            self._send_response(
+                {
+                    "exitCode": 3,
+                    "output": f"Unhandled error:\nRequest: {request}\n"
+                    + traceback.format_exc(),
+                    "requestId": 0 if not request else request.get("requestId", 0),
+                }
+            )
+
+    async def _process_cancel_request(self, request: "JsonWorkRequest") -> None:
+        request_id = request.get("requestId", 0)
+        task = self._request_id_to_task.get(request_id)
+        if not task:
+            # It must be already completed, so ignore the request, per spec
+            return
+
+        task.cancel()
+        self._send_response({"requestId": request_id, "wasCancelled": True})
+
+    async def _process_compile_request(self, request: "JsonWorkRequest") -> None:
+        options = self._options_from_request(request)
+        # _compile performs a varity of blocking IO calls, so run it separately
+        await asyncio.to_thread(_compile, options)
+        self._send_response(
+            {
+                "requestId": request.get("requestId", 0),
+                "exitCode": 0,
+            }
+        )
+
+    def _options_from_request(self, request: "JsonWorkRequest") -> "argparse.Namespace":
+        options = self._parser.parse_args(request["arguments"])
+        if request.get("sandboxDir"):
+            prefix = request["sandboxDir"]
+            options.srcs = [os.path.join(prefix, v) for v in options.srcs]
+            options.pycs = [os.path.join(prefix, v) for v in options.pycs]
+        return options
+
+    def _send_response(self, response: "JsonWorkResponse") -> None:
+        _logger.info("request %s: respond: %s", response.get("requestId"), response)
+        self._writer.write(json.dumps(response).encode("utf8") + b"\n")
+
+
+def main(args: "list[str]") -> int:
+    options = _create_parser().parse_args(args)
+
+    # Persistent workers are started with the `--persistent_worker` flag.
+    # See the following docs for details on persistent workers:
+    # https://bazel.build/remote/persistent
+    # https://bazel.build/remote/multiplex
+    # https://bazel.build/remote/creating
+    if options.persistent_worker:
+        global asyncio, itertools, json, logging, os, traceback, _logger
+        import asyncio
+        import itertools
+        import json
+        import logging
+        import os.path
+        import traceback
+
+        _logger = logging.getLogger("precompiler")
+        # Only configure logging for workers. This prevents non-worker
+        # invocations from spamming stderr with logging info
+        logging.basicConfig(level=getattr(logging, options.log_level))
+        _logger.info("persistent worker: impl=%s", options.worker_impl)
+        if options.worker_impl == "serial":
+            _SerialPersistentWorker(sys.stdin, sys.stdout).run()
+        elif options.worker_impl == "async":
+            asyncio.run(_AsyncPersistentWorker.main(sys.stdin, sys.stdout))
+        else:
+            raise ValueError(f"Unknown worker impl: {options.worker_impl}")
+    else:
+        _compile(options)
+    return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main(sys.argv[1:]))
diff --git a/tools/private/update_deps/BUILD.bazel b/tools/private/update_deps/BUILD.bazel
index 2ab7cc7..c83deb0 100644
--- a/tools/private/update_deps/BUILD.bazel
+++ b/tools/private/update_deps/BUILD.bazel
@@ -50,14 +50,12 @@
     name = "update_pip_deps",
     srcs = ["update_pip_deps.py"],
     data = [
-        "//:MODULE.bazel",
-        "//python/pip_install:repositories",
-        "//python/pip_install:requirements_txt",
+        "//python/private/pypi:deps.bzl",
+        "//python/private/pypi:requirements_txt",
     ],
     env = {
-        "MODULE_BAZEL": "$(rlocationpath //:MODULE.bazel)",
-        "REPOSITORIES_BZL": "$(rlocationpath //python/pip_install:repositories)",
-        "REQUIREMENTS_TXT": "$(rlocationpath //python/pip_install:requirements_txt)",
+        "DEPS_BZL": "$(rlocationpath //python/private/pypi:deps.bzl)",
+        "REQUIREMENTS_TXT": "$(rlocationpath //python/private/pypi:requirements_txt)",
     },
     imports = ["../../.."],
     deps = [
diff --git a/tools/private/update_deps/update_coverage_deps.py b/tools/private/update_deps/update_coverage_deps.py
index 6152d70..6b837b9 100755
--- a/tools/private/update_deps/update_coverage_deps.py
+++ b/tools/private/update_deps/update_coverage_deps.py
@@ -131,13 +131,13 @@
         "--py",
         nargs="+",
         type=str,
-        default=["cp38", "cp39", "cp310", "cp311"],
+        default=["cp38", "cp39", "cp310", "cp311", "cp312"],
         help="Supported python versions",
     )
     parser.add_argument(
         "--dry-run",
         action="store_true",
-        help="Wether to write to files",
+        help="Whether to write to files",
     )
     parser.add_argument(
         "--update-file",
@@ -186,8 +186,8 @@
     update_file(
         path=args.update_file,
         snippet=f"_coverage_deps = {repr(Deps(urls))}\n",
-        start_marker="# START: maintained by 'bazel run //tools/private:update_coverage_deps'",
-        end_marker="# END: maintained by 'bazel run //tools/private:update_coverage_deps'",
+        start_marker="# START: maintained by 'bazel run //tools/private/update_deps:update_coverage_deps <version>'",
+        end_marker="# END: maintained by 'bazel run //tools/private/update_deps:update_coverage_deps <version>'",
         dry_run=args.dry_run,
     )
 
diff --git a/tools/private/update_deps/update_pip_deps.py b/tools/private/update_deps/update_pip_deps.py
index 8a2dd5f..1034382 100755
--- a/tools/private/update_deps/update_pip_deps.py
+++ b/tools/private/update_deps/update_pip_deps.py
@@ -111,13 +111,13 @@
     parser.add_argument(
         "--start",
         type=str,
-        default="# START: maintained by 'bazel run //tools/private:update_pip_deps'",
+        default="# START: maintained by 'bazel run //tools/private/update_deps:update_pip_deps'",
         help="The text to match in a file when updating them.",
     )
     parser.add_argument(
         "--end",
         type=str,
-        default="# END: maintained by 'bazel run //tools/private:update_pip_deps'",
+        default="# END: maintained by 'bazel run //tools/private/update_deps:update_pip_deps'",
         help="The text to match in a file when updating them.",
     )
     parser.add_argument(
@@ -129,19 +129,13 @@
         "--requirements-txt",
         type=path_from_runfiles,
         default=os.environ.get("REQUIREMENTS_TXT"),
-        help="The requirements.txt path for the pip_install tools, defaults to the value taken from REQUIREMENTS_TXT",
+        help="The requirements.txt path for the pypi tools, defaults to the value taken from REQUIREMENTS_TXT",
     )
     parser.add_argument(
-        "--module-bazel",
+        "--deps-bzl",
         type=path_from_runfiles,
-        default=os.environ.get("MODULE_BAZEL"),
-        help="The path for the file to be updated, defaults to the value taken from MODULE_BAZEL",
-    )
-    parser.add_argument(
-        "--repositories-bzl",
-        type=path_from_runfiles,
-        default=os.environ.get("REPOSITORIES_BZL"),
-        help="The path for the file to be updated, defaults to the value taken from REPOSITORIES_BZL",
+        default=os.environ.get("DEPS_BZL"),
+        help="The path for the file to be updated, defaults to the value taken from DEPS_BZL",
     )
     args = parser.parse_args()
 
@@ -149,21 +143,13 @@
     deps = _get_deps(report)
 
     update_file(
-        path=args.repositories_bzl,
+        path=args.deps_bzl,
         snippet=_dep_snippet(deps),
         start_marker=args.start,
         end_marker=args.end,
         dry_run=args.dry_run,
     )
 
-    update_file(
-        path=args.module_bazel,
-        snippet=_module_snippet(deps),
-        start_marker=args.start,
-        end_marker=args.end,
-        dry_run=args.dry_run,
-    )
-
 
 if __name__ == "__main__":
     main()
diff --git a/tools/publish/BUILD.bazel b/tools/publish/BUILD.bazel
index 4759a31..a51693b 100644
--- a/tools/publish/BUILD.bazel
+++ b/tools/publish/BUILD.bazel
@@ -1,4 +1,6 @@
 load("//python:pip.bzl", "compile_pip_requirements")
+load("//python/config_settings:transition.bzl", "py_binary")
+load("//python/entry_points:py_console_script_binary.bzl", "py_console_script_binary")
 
 compile_pip_requirements(
     name = "requirements",
@@ -6,3 +8,27 @@
     requirements_darwin = "requirements_darwin.txt",
     requirements_windows = "requirements_windows.txt",
 )
+
+py_console_script_binary(
+    name = "twine",
+    # We use a py_binary rule with version transitions to ensure that we do not
+    # rely on the default version of the registered python toolchain. What is more
+    # we are using this instead of `@python_versions//3.11:defs.bzl` because loading
+    # that file relies on bzlmod being enabled.
+    binary_rule = py_binary,
+    pkg = "@rules_python_publish_deps//twine",
+    python_version = "3.11",
+    script = "twine",
+    visibility = ["//visibility:public"],
+)
+
+filegroup(
+    name = "distribution",
+    srcs = [
+        "BUILD.bazel",
+        "requirements.txt",
+        "requirements_darwin.txt",
+        "requirements_windows.txt",
+    ],
+    visibility = ["//tools:__pkg__"],
+)
diff --git a/tools/publish/requirements.txt b/tools/publish/requirements.txt
index 858fc51..2a9721d 100644
--- a/tools/publish/requirements.txt
+++ b/tools/publish/requirements.txt
@@ -168,30 +168,39 @@
     --hash=sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7 \
     --hash=sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8
     # via requests
-cryptography==39.0.0 \
-    --hash=sha256:1a6915075c6d3a5e1215eab5d99bcec0da26036ff2102a1038401d6ef5bef25b \
-    --hash=sha256:1ee1fd0de9851ff32dbbb9362a4d833b579b4a6cc96883e8e6d2ff2a6bc7104f \
-    --hash=sha256:407cec680e811b4fc829de966f88a7c62a596faa250fc1a4b520a0355b9bc190 \
-    --hash=sha256:50386acb40fbabbceeb2986332f0287f50f29ccf1497bae31cf5c3e7b4f4b34f \
-    --hash=sha256:6f97109336df5c178ee7c9c711b264c502b905c2d2a29ace99ed761533a3460f \
-    --hash=sha256:754978da4d0457e7ca176f58c57b1f9de6556591c19b25b8bcce3c77d314f5eb \
-    --hash=sha256:76c24dd4fd196a80f9f2f5405a778a8ca132f16b10af113474005635fe7e066c \
-    --hash=sha256:7dacfdeee048814563eaaec7c4743c8aea529fe3dd53127313a792f0dadc1773 \
-    --hash=sha256:80ee674c08aaef194bc4627b7f2956e5ba7ef29c3cc3ca488cf15854838a8f72 \
-    --hash=sha256:844ad4d7c3850081dffba91cdd91950038ee4ac525c575509a42d3fc806b83c8 \
-    --hash=sha256:875aea1039d78557c7c6b4db2fe0e9d2413439f4676310a5f269dd342ca7a717 \
-    --hash=sha256:887cbc1ea60786e534b00ba8b04d1095f4272d380ebd5f7a7eb4cc274710fad9 \
-    --hash=sha256:ad04f413436b0781f20c52a661660f1e23bcd89a0e9bb1d6d20822d048cf2856 \
-    --hash=sha256:bae6c7f4a36a25291b619ad064a30a07110a805d08dc89984f4f441f6c1f3f96 \
-    --hash=sha256:c52a1a6f81e738d07f43dab57831c29e57d21c81a942f4602fac7ee21b27f288 \
-    --hash=sha256:e0a05aee6a82d944f9b4edd6a001178787d1546ec7c6223ee9a848a7ade92e39 \
-    --hash=sha256:e324de6972b151f99dc078defe8fb1b0a82c6498e37bff335f5bc6b1e3ab5a1e \
-    --hash=sha256:e5d71c5d5bd5b5c3eebcf7c5c2bb332d62ec68921a8c593bea8c394911a005ce \
-    --hash=sha256:f3ed2d864a2fa1666e749fe52fb8e23d8e06b8012e8bd8147c73797c506e86f1 \
-    --hash=sha256:f671c1bb0d6088e94d61d80c606d65baacc0d374e67bf895148883461cd848de \
-    --hash=sha256:f6c0db08d81ead9576c4d94bbb27aed8d7a430fa27890f39084c2d0e2ec6b0df \
-    --hash=sha256:f964c7dcf7802d133e8dbd1565914fa0194f9d683d82411989889ecd701e8adf \
-    --hash=sha256:fec8b932f51ae245121c4671b4bbc030880f363354b2f0e0bd1366017d891458
+cryptography==42.0.4 \
+    --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \
+    --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \
+    --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \
+    --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \
+    --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \
+    --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \
+    --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \
+    --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \
+    --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \
+    --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \
+    --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \
+    --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \
+    --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \
+    --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \
+    --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \
+    --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \
+    --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \
+    --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \
+    --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \
+    --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \
+    --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \
+    --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \
+    --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \
+    --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \
+    --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \
+    --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \
+    --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \
+    --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \
+    --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \
+    --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \
+    --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \
+    --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449
     # via secretstorage
 docutils==0.19 \
     --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \
diff --git a/tools/publish/requirements_darwin.txt b/tools/publish/requirements_darwin.txt
index 1203ba2..dd4ac40 100644
--- a/tools/publish/requirements_darwin.txt
+++ b/tools/publish/requirements_darwin.txt
@@ -8,107 +8,109 @@
     --hash=sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414 \
     --hash=sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4
     # via readme-renderer
-certifi==2022.12.7 \
-    --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
-    --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+certifi==2024.7.4 \
+    --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \
+    --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90
     # via requests
-charset-normalizer==3.0.1 \
-    --hash=sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b \
-    --hash=sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42 \
-    --hash=sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d \
-    --hash=sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b \
-    --hash=sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a \
-    --hash=sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59 \
-    --hash=sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154 \
-    --hash=sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1 \
-    --hash=sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c \
-    --hash=sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a \
-    --hash=sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d \
-    --hash=sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6 \
-    --hash=sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b \
-    --hash=sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b \
-    --hash=sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783 \
-    --hash=sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5 \
-    --hash=sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918 \
-    --hash=sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555 \
-    --hash=sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639 \
-    --hash=sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786 \
-    --hash=sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e \
-    --hash=sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed \
-    --hash=sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820 \
-    --hash=sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8 \
-    --hash=sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3 \
-    --hash=sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541 \
-    --hash=sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14 \
-    --hash=sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be \
-    --hash=sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e \
-    --hash=sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76 \
-    --hash=sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b \
-    --hash=sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c \
-    --hash=sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b \
-    --hash=sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3 \
-    --hash=sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc \
-    --hash=sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6 \
-    --hash=sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59 \
-    --hash=sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4 \
-    --hash=sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d \
-    --hash=sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d \
-    --hash=sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3 \
-    --hash=sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a \
-    --hash=sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea \
-    --hash=sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6 \
-    --hash=sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e \
-    --hash=sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603 \
-    --hash=sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24 \
-    --hash=sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a \
-    --hash=sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58 \
-    --hash=sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678 \
-    --hash=sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a \
-    --hash=sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c \
-    --hash=sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6 \
-    --hash=sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18 \
-    --hash=sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174 \
-    --hash=sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317 \
-    --hash=sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f \
-    --hash=sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc \
-    --hash=sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837 \
-    --hash=sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41 \
-    --hash=sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c \
-    --hash=sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579 \
-    --hash=sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753 \
-    --hash=sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8 \
-    --hash=sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291 \
-    --hash=sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087 \
-    --hash=sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866 \
-    --hash=sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3 \
-    --hash=sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d \
-    --hash=sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1 \
-    --hash=sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca \
-    --hash=sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e \
-    --hash=sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db \
-    --hash=sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72 \
-    --hash=sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d \
-    --hash=sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc \
-    --hash=sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539 \
-    --hash=sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d \
-    --hash=sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af \
-    --hash=sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b \
-    --hash=sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602 \
-    --hash=sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f \
-    --hash=sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478 \
-    --hash=sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c \
-    --hash=sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e \
-    --hash=sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479 \
-    --hash=sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7 \
-    --hash=sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8
+charset-normalizer==3.3.2 \
+    --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \
+    --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \
+    --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \
+    --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \
+    --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \
+    --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \
+    --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \
+    --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \
+    --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \
+    --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \
+    --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \
+    --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \
+    --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \
+    --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \
+    --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \
+    --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \
+    --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \
+    --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \
+    --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \
+    --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \
+    --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \
+    --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \
+    --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \
+    --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \
+    --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \
+    --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \
+    --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \
+    --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \
+    --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \
+    --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \
+    --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \
+    --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \
+    --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \
+    --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \
+    --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \
+    --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \
+    --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \
+    --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \
+    --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \
+    --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \
+    --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \
+    --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \
+    --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \
+    --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \
+    --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \
+    --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \
+    --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \
+    --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \
+    --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \
+    --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \
+    --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \
+    --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \
+    --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \
+    --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \
+    --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \
+    --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \
+    --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \
+    --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \
+    --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \
+    --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \
+    --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \
+    --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \
+    --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \
+    --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \
+    --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \
+    --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \
+    --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \
+    --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \
+    --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \
+    --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \
+    --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \
+    --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \
+    --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \
+    --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \
+    --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \
+    --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \
+    --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \
+    --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \
+    --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \
+    --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \
+    --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \
+    --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \
+    --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \
+    --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \
+    --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \
+    --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \
+    --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \
+    --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \
+    --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \
+    --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561
     # via requests
 docutils==0.19 \
     --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \
     --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc
     # via readme-renderer
-idna==3.4 \
-    --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
-    --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
+idna==3.7 \
+    --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \
+    --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0
     # via requests
 importlib-metadata==6.0.0 \
     --hash=sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad \
@@ -176,9 +178,9 @@
     --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \
     --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8
     # via -r tools/publish/requirements.in
-urllib3==1.26.18 \
-    --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
-    --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
+urllib3==1.26.19 \
+    --hash=sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3 \
+    --hash=sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429
     # via
     #   requests
     #   twine
@@ -186,7 +188,7 @@
     --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
     --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
     # via bleach
-zipp==3.11.0 \
-    --hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \
-    --hash=sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766
+zipp==3.19.2 \
+    --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \
+    --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c
     # via importlib-metadata
diff --git a/tools/publish/requirements_windows.txt b/tools/publish/requirements_windows.txt
index 25d7776..7e210c9 100644
--- a/tools/publish/requirements_windows.txt
+++ b/tools/publish/requirements_windows.txt
@@ -8,107 +8,109 @@
     --hash=sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414 \
     --hash=sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4
     # via readme-renderer
-certifi==2022.12.7 \
-    --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
-    --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
+certifi==2024.7.4 \
+    --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \
+    --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90
     # via requests
-charset-normalizer==3.0.1 \
-    --hash=sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b \
-    --hash=sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42 \
-    --hash=sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d \
-    --hash=sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b \
-    --hash=sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a \
-    --hash=sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59 \
-    --hash=sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154 \
-    --hash=sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1 \
-    --hash=sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c \
-    --hash=sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a \
-    --hash=sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d \
-    --hash=sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6 \
-    --hash=sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b \
-    --hash=sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b \
-    --hash=sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783 \
-    --hash=sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5 \
-    --hash=sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918 \
-    --hash=sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555 \
-    --hash=sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639 \
-    --hash=sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786 \
-    --hash=sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e \
-    --hash=sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed \
-    --hash=sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820 \
-    --hash=sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8 \
-    --hash=sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3 \
-    --hash=sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541 \
-    --hash=sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14 \
-    --hash=sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be \
-    --hash=sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e \
-    --hash=sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76 \
-    --hash=sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b \
-    --hash=sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c \
-    --hash=sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b \
-    --hash=sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3 \
-    --hash=sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc \
-    --hash=sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6 \
-    --hash=sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59 \
-    --hash=sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4 \
-    --hash=sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d \
-    --hash=sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d \
-    --hash=sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3 \
-    --hash=sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a \
-    --hash=sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea \
-    --hash=sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6 \
-    --hash=sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e \
-    --hash=sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603 \
-    --hash=sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24 \
-    --hash=sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a \
-    --hash=sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58 \
-    --hash=sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678 \
-    --hash=sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a \
-    --hash=sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c \
-    --hash=sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6 \
-    --hash=sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18 \
-    --hash=sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174 \
-    --hash=sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317 \
-    --hash=sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f \
-    --hash=sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc \
-    --hash=sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837 \
-    --hash=sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41 \
-    --hash=sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c \
-    --hash=sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579 \
-    --hash=sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753 \
-    --hash=sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8 \
-    --hash=sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291 \
-    --hash=sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087 \
-    --hash=sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866 \
-    --hash=sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3 \
-    --hash=sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d \
-    --hash=sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1 \
-    --hash=sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca \
-    --hash=sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e \
-    --hash=sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db \
-    --hash=sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72 \
-    --hash=sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d \
-    --hash=sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc \
-    --hash=sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539 \
-    --hash=sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d \
-    --hash=sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af \
-    --hash=sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b \
-    --hash=sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602 \
-    --hash=sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f \
-    --hash=sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478 \
-    --hash=sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c \
-    --hash=sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e \
-    --hash=sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479 \
-    --hash=sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7 \
-    --hash=sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8
+charset-normalizer==3.3.2 \
+    --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \
+    --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \
+    --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \
+    --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \
+    --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \
+    --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \
+    --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \
+    --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \
+    --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \
+    --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \
+    --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \
+    --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \
+    --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \
+    --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \
+    --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \
+    --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \
+    --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \
+    --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \
+    --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \
+    --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \
+    --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \
+    --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \
+    --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \
+    --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \
+    --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \
+    --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \
+    --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \
+    --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \
+    --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \
+    --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \
+    --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \
+    --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \
+    --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \
+    --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \
+    --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \
+    --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \
+    --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \
+    --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \
+    --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \
+    --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \
+    --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \
+    --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \
+    --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \
+    --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \
+    --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \
+    --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \
+    --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \
+    --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \
+    --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \
+    --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \
+    --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \
+    --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \
+    --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \
+    --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \
+    --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \
+    --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \
+    --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \
+    --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \
+    --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \
+    --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \
+    --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \
+    --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \
+    --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \
+    --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \
+    --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \
+    --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \
+    --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \
+    --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \
+    --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \
+    --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \
+    --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \
+    --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \
+    --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \
+    --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \
+    --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \
+    --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \
+    --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \
+    --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \
+    --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \
+    --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \
+    --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \
+    --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \
+    --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \
+    --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \
+    --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \
+    --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \
+    --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \
+    --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \
+    --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \
+    --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561
     # via requests
 docutils==0.19 \
     --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \
     --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc
     # via readme-renderer
-idna==3.4 \
-    --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
-    --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
+idna==3.7 \
+    --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \
+    --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0
     # via requests
 importlib-metadata==6.0.0 \
     --hash=sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad \
@@ -180,9 +182,9 @@
     --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \
     --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8
     # via -r tools/publish/requirements.in
-urllib3==1.26.18 \
-    --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \
-    --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0
+urllib3==1.26.19 \
+    --hash=sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3 \
+    --hash=sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429
     # via
     #   requests
     #   twine
@@ -190,7 +192,7 @@
     --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
     --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
     # via bleach
-zipp==3.11.0 \
-    --hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \
-    --hash=sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766
+zipp==3.19.2 \
+    --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \
+    --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c
     # via importlib-metadata
diff --git a/tools/wheelmaker.py b/tools/wheelmaker.py
index 3bfaba2..8fa3e02 100644
--- a/tools/wheelmaker.py
+++ b/tools/wheelmaker.py
@@ -19,6 +19,7 @@
 import hashlib
 import os
 import re
+import stat
 import sys
 import zipfile
 from pathlib import Path
@@ -101,12 +102,13 @@
         filename,
         *,
         mode,
-        distinfo_dir: str | Path,
+        distribution_prefix: str,
         strip_path_prefixes=None,
         compression=zipfile.ZIP_DEFLATED,
         **kwargs,
     ):
-        self._distinfo_dir: str = Path(distinfo_dir).name
+        self._distribution_prefix = distribution_prefix
+
         self._strip_path_prefixes = strip_path_prefixes or []
         # Entries for the RECORD file as (filename, hash, size) tuples.
         self._record = []
@@ -114,7 +116,10 @@
         super().__init__(filename, mode=mode, compression=compression, **kwargs)
 
     def distinfo_path(self, basename):
-        return f"{self._distinfo_dir}/{basename}"
+        return f"{self._distribution_prefix}.dist-info/{basename}"
+
+    def data_path(self, basename):
+        return f"{self._distribution_prefix}.data/{basename}"
 
     def add_file(self, package_filename, real_filename):
         """Add given file to the distribution."""
@@ -122,8 +127,8 @@
         def arcname_from(name):
             # Always use unix path separators.
             normalized_arcname = name.replace(os.path.sep, "/")
-            # Don't manipulate names filenames in the .distinfo directory.
-            if normalized_arcname.startswith(self._distinfo_dir):
+            # Don't manipulate names filenames in the .distinfo or .data directories.
+            if normalized_arcname.startswith(self._distribution_prefix):
                 return normalized_arcname
             for prefix in self._strip_path_prefixes:
                 if normalized_arcname.startswith(prefix):
@@ -189,7 +194,13 @@
 
         zinfo = zipfile.ZipInfo(filename=arcname, date_time=_ZIP_EPOCH)
         zinfo.create_system = 3  # ZipInfo entry created on a unix-y system
-        zinfo.external_attr = 0o777 << 16  # permissions: rwxrwxrwx
+        # Both pip and installer expect the regular file bit to be set in order for the
+        # executable bit to be preserved after extraction
+        # https://github.com/pypa/pip/blob/23.3.2/src/pip/_internal/utils/unpacking.py#L96-L100
+        # https://github.com/pypa/installer/blob/0.7.0/src/installer/sources.py#L310-L313
+        zinfo.external_attr = (
+            stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO | stat.S_IFREG
+        ) << 16  # permissions: -rwxrwxrwx
         zinfo.compress_type = self.compression
         return zinfo
 
@@ -218,41 +229,22 @@
         platform,
         outfile=None,
         strip_path_prefixes=None,
-        incompatible_normalize_name=True,
-        incompatible_normalize_version=True,
     ):
         self._name = name
-        self._version = version
+        self._version = normalize_pep440(version)
         self._build_tag = build_tag
         self._python_tag = python_tag
         self._abi = abi
         self._platform = platform
         self._outfile = outfile
         self._strip_path_prefixes = strip_path_prefixes
+        self._wheelname_fragment_distribution_name = escape_filename_distribution_name(
+            self._name
+        )
 
-        if incompatible_normalize_version:
-            self._version = normalize_pep440(self._version)
-            self._escaped_version = self._version
-        else:
-            self._escaped_version = escape_filename_segment(self._version)
-
-        if incompatible_normalize_name:
-            escaped_name = escape_filename_distribution_name(self._name)
-            self._distinfo_dir = (
-                escaped_name + "-" + self._escaped_version + ".dist-info/"
-            )
-            self._wheelname_fragment_distribution_name = escaped_name
-        else:
-            # The legacy behavior escapes the distinfo dir but not the
-            # wheel name. Enable incompatible_normalize_name to fix it.
-            # https://github.com/bazelbuild/rules_python/issues/1132
-            self._distinfo_dir = (
-                escape_filename_segment(self._name)
-                + "-"
-                + self._escaped_version
-                + ".dist-info/"
-            )
-            self._wheelname_fragment_distribution_name = self._name
+        self._distribution_prefix = (
+            self._wheelname_fragment_distribution_name + "-" + self._version
+        )
 
         self._whlfile = None
 
@@ -260,7 +252,7 @@
         self._whlfile = _WhlFile(
             self.filename(),
             mode="w",
-            distinfo_dir=self._distinfo_dir,
+            distribution_prefix=self._distribution_prefix,
             strip_path_prefixes=self._strip_path_prefixes,
         )
         return self
@@ -290,6 +282,9 @@
     def distinfo_path(self, basename):
         return self._whlfile.distinfo_path(basename)
 
+    def data_path(self, basename):
+        return self._whlfile.data_path(basename)
+
     def add_file(self, package_filename, real_filename):
         """Add given file to the distribution."""
         self._whlfile.add_file(package_filename, real_filename)
@@ -308,12 +303,12 @@
             wheel_contents += "Tag: %s\n" % tag
         self._whlfile.add_string(self.distinfo_path("WHEEL"), wheel_contents)
 
-    def add_metadata(self, metadata, name, description, version):
+    def add_metadata(self, metadata, name, description):
         """Write METADATA file to the distribution."""
         # https://www.python.org/dev/peps/pep-0566/
         # https://packaging.python.org/specifications/core-metadata/
         metadata = re.sub("^Name: .*$", "Name: %s" % name, metadata, flags=re.MULTILINE)
-        metadata += "Version: %s\n\n" % version
+        metadata += "Version: %s\n\n" % self._version
         # setuptools seems to insert UNKNOWN as description when none is
         # provided.
         metadata += description if description else "UNKNOWN"
@@ -446,6 +441,12 @@
         help="'filename;real_path' pairs listing extra files to include in"
         "dist-info directory. Can be supplied multiple times.",
     )
+    contents_group.add_argument(
+        "--data_files",
+        action="append",
+        help="'filename;real_path' pairs listing data files to include in"
+        "data directory. Can be supplied multiple times.",
+    )
 
     build_group = parser.add_argument_group("Building requirements")
     build_group.add_argument(
@@ -459,34 +460,28 @@
         help="Pass in the stamp info file for stamping",
     )
 
-    feature_group = parser.add_argument_group("Feature flags")
-    feature_group.add_argument("--noincompatible_normalize_name", action="store_true")
-    feature_group.add_argument(
-        "--noincompatible_normalize_version", action="store_true"
-    )
-
     return parser.parse_args(sys.argv[1:])
 
 
+def _parse_file_pairs(content: List[str]) -> List[List[str]]:
+    """
+    Parse ; delimited lists of files into a 2D list.
+    """
+    return [i.split(";", maxsplit=1) for i in content or []]
+
+
 def main() -> None:
     arguments = parse_args()
 
-    if arguments.input_file:
-        input_files = [i.split(";") for i in arguments.input_file]
-    else:
-        input_files = []
+    input_files = _parse_file_pairs(arguments.input_file)
+    extra_distinfo_file = _parse_file_pairs(arguments.extra_distinfo_file)
+    data_files = _parse_file_pairs(arguments.data_files)
 
-    if arguments.extra_distinfo_file:
-        extra_distinfo_file = [i.split(";") for i in arguments.extra_distinfo_file]
-    else:
-        extra_distinfo_file = []
-
-    if arguments.input_file_list:
-        for input_file in arguments.input_file_list:
-            with open(input_file) as _file:
-                input_file_list = _file.read().splitlines()
-            for _input_file in input_file_list:
-                input_files.append(_input_file.split(";"))
+    for input_file in arguments.input_file_list:
+        with open(input_file) as _file:
+            input_file_list = _file.read().splitlines()
+        for _input_file in input_file_list:
+            input_files.append(_input_file.split(";"))
 
     all_files = get_files_to_package(input_files)
     # Sort the files for reproducible order in the archive.
@@ -521,8 +516,6 @@
         platform=arguments.platform,
         outfile=arguments.out,
         strip_path_prefixes=strip_prefixes,
-        incompatible_normalize_name=not arguments.noincompatible_normalize_name,
-        incompatible_normalize_version=not arguments.noincompatible_normalize_version,
     ) as maker:
         for package_filename, real_filename in all_files:
             maker.add_file(package_filename, real_filename)
@@ -535,19 +528,51 @@
             ) as description_file:
                 description = description_file.read()
 
-        metadata = None
-        with open(arguments.metadata_file, "rt", encoding="utf-8") as metadata_file:
-            metadata = metadata_file.read()
+        metadata = arguments.metadata_file.read_text(encoding="utf-8")
 
-        if arguments.noincompatible_normalize_version:
-            version_in_metadata = version
-        else:
-            version_in_metadata = normalize_pep440(version)
+        # This is not imported at the top of the file due to the reliance
+        # on this file in the `whl_library` repository rule which does not
+        # provide `packaging` but does import symbols defined here.
+        from packaging.requirements import Requirement
+
+        # Search for any `Requires-Dist` entries that refer to other files and
+        # expand them.
+        for meta_line in metadata.splitlines():
+            if not meta_line.startswith("Requires-Dist: @"):
+                continue
+            file, _, extra = meta_line[len("Requires-Dist: @") :].partition(";")
+            extra = extra.strip()
+
+            reqs = []
+            for reqs_line in Path(file).read_text(encoding="utf-8").splitlines():
+                reqs_text = reqs_line.strip()
+                if not reqs_text or reqs_text.startswith(("#", "-")):
+                    continue
+
+                # Strip any comments
+                reqs_text, _, _ = reqs_text.partition("#")
+
+                req = Requirement(reqs_text.strip())
+                if req.marker:
+                    if extra:
+                        reqs.append(
+                            f"Requires-Dist: {req.name}{req.specifier}; ({req.marker}) and {extra}"
+                        )
+                    else:
+                        reqs.append(
+                            f"Requires-Dist: {req.name}{req.specifier}; {req.marker}"
+                        )
+                else:
+                    reqs.append(
+                        f"Requires-Dist: {req.name}{req.specifier}; {extra}".strip(" ;")
+                    )
+
+            metadata = metadata.replace(meta_line, "\n".join(reqs))
+
         maker.add_metadata(
             metadata=metadata,
             name=name,
             description=description,
-            version=version_in_metadata,
         )
 
         if arguments.entry_points_file:
@@ -556,6 +581,8 @@
             )
 
         # Sort the files for reproducible order in the archive.
+        for filename, real_path in sorted(data_files):
+            maker.add_file(maker.data_path(filename), real_path)
         for filename, real_path in sorted(extra_distinfo_file):
             maker.add_file(maker.distinfo_path(filename), real_path)