Snap for 10447354 from a50717f41eb1c964e892206b3a01fd25c231005d to mainline-networking-release

Change-Id: If48a9381d60b0e00749f5ad70ef14cc70c184cbf
diff --git a/.gitignore b/.gitignore
index d03d717..6cf3369 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
 __pycache__
 *.pyc
 .mypy_cache
+/venv
diff --git a/Android.bp b/Android.bp
index 5c87c28..4f2e8b3 100644
--- a/Android.bp
+++ b/Android.bp
@@ -16,25 +16,8 @@
     default_applicable_licenses: ["Android-Apache-2.0"],
 }
 
-python_defaults {
-    name: "external_updater_defaults",
-    version: {
-        py2: {
-            enabled: false,
-            embedded_launcher: false,
-        },
-        py3: {
-            enabled: true,
-            // Error if enable:
-            // certificate verify failed: unable to get local issuer certificate
-            embedded_launcher: false,
-        },
-    },
-}
-
 python_binary_host {
     name: "external_updater",
-    defaults: ["external_updater_defaults"],
     main: "external_updater.py",
     srcs: ["external_updater.py"],
     libs: ["external_updater_lib"],
@@ -42,14 +25,12 @@
 
 python_binary_host {
     name: "external_updater_notifier",
-    defaults: ["external_updater_defaults"],
     main: "notifier.py",
     srcs: ["notifier.py"],
 }
 
 python_library_host {
     name: "external_updater_lib",
-    defaults: ["external_updater_defaults"],
     srcs: [
         "archive_utils.py",
         "base_updater.py",
@@ -64,7 +45,6 @@
         "updater_utils.py",
     ],
     libs: [
-        "python-symbol",
         "libprotobuf-python",
     ],
     proto: {
@@ -78,7 +58,6 @@
 
 python_defaults {
     name: "external_updater_test_defaults",
-    defaults: ["external_updater_defaults"],
     version: {
         py3: {
             embedded_launcher: true,
diff --git a/OWNERS b/OWNERS
index da8db7a..61748ed 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,4 +1,2 @@
-# Default code reviewers picked from top 3 or more developers.
-# Please update this list if you find better candidates.
[email protected]
[email protected]
 [email protected]
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
deleted file mode 100644
index 9e97695..0000000
--- a/PREUPLOAD.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-[Builtin Hooks]
-pylint3 = true
diff --git a/README.md b/README.md
index e685774..d5e251e 100644
--- a/README.md
+++ b/README.md
@@ -1,28 +1,40 @@
 # external_updater
 
-external updater is a tool to automatically updates libraries in external/ .
+external updater is a tool to automatically update libraries in external/.
 
 ## Usage
 
-Check updates for a library, or verify METADATA is valid:
+Check updates for a library or verify METADATA is valid:
 
 ```shell
 tools/external_updater/updater.sh check ${LIBNAME}
 ```
 
-Check updates for all libraries in external/:
-
-```shell
-tools/external_updater/updater.sh check --all
-```
-
-Update a library:
+Update a library, commit, and upload the change to Gerrit:
 
 ```shell
 tools/external_updater/updater.sh update ${LIBNAME}
 ```
 
-LIBNAME can be the path to a library under external/. E.g. kotlinc, or
+Update a library without committing and uploading to Gerrit:
+
+```shell
+tools/external_updater/updater.sh update --no-upload ${LIBNAME}
+```
+
+Update a library on top of the local changes in the current branch, commit, and upload the change to Gerrit:
+
+```shell
+tools/external_updater/updater.sh update --keep-local-changes ${LIBNAME}
+```
+
+Update a library without building:
+
+```shell
+tools/external_updater/updater.sh update --no-build ${LIBNAME}
+```
+
+LIBNAME can be the path to a library under external/, e.g. kotlinc, or
 python/cpython3.
 
 ## Configure
@@ -105,8 +117,10 @@
 these files over.
 See [example](https://android.googlesource.com/platform/external/kotlinc/+/refs/heads/master/post_update.sh).
 
+#### Local patches
+
 Local patches can be kept as patches/*.diff. They will be applied after
-upgrade. [example](https://cs.corp.google.com/android/external/jsmn/patches/header.diff)
+upgrade. [example](https://cs.android.com/android/platform/superproject/+/master:external/jsmn/patches/header.diff)
 
 ## Email notification
 
diff --git a/archive_utils.py b/archive_utils.py
index b8386aa..3c633d4 100644
--- a/archive_utils.py
+++ b/archive_utils.py
@@ -105,11 +105,11 @@
         Path to the temporary directory.
     """
 
-    print('Downloading {}'.format(url))
+    print(f'Downloading {url}')
     archive_file, _headers = urllib.request.urlretrieve(url)
 
     temporary_dir = tempfile.mkdtemp()
-    print('Extracting {} to {}'.format(archive_file, temporary_dir))
+    print(f'Extracting {archive_file} to {temporary_dir}')
     get_extract_func(url)(archive_file, temporary_dir)
 
     return temporary_dir
diff --git a/base_updater.py b/base_updater.py
index 78dde1c..cc4f6d6 100644
--- a/base_updater.py
+++ b/base_updater.py
@@ -42,7 +42,7 @@
         """Checks whether a new version is available."""
         raise NotImplementedError()
 
-    def update(self) -> None:
+    def update(self, skip_post_update: bool) -> None:
         """Updates the package.
 
         Has to call check() before this function.
diff --git a/crates_updater.py b/crates_updater.py
index ee476b5..0ce5def 100644
--- a/crates_updater.py
+++ b/crates_updater.py
@@ -21,6 +21,7 @@
 import shutil
 import tempfile
 import urllib.request
+from typing import IO
 
 import archive_utils
 from base_updater import Updater
@@ -28,9 +29,7 @@
 import metadata_pb2  # type: ignore
 import updater_utils
 
-CRATES_IO_URL_PATTERN: str = (r"^https:\/\/crates.io\/crates\/([-\w]+)")
-
-CRATES_IO_URL_RE: re.Pattern = re.compile(CRATES_IO_URL_PATTERN)
+LIBRARY_NAME_PATTERN: str = (r"([-\w]+)")
 
 ALPHA_BETA_PATTERN: str = (r"^.*[0-9]+\.[0-9]+\.[0-9]+-(alpha|beta).*")
 
@@ -40,6 +39,13 @@
 
 VERSION_MATCHER: re.Pattern = re.compile(VERSION_PATTERN)
 
+CRATES_IO_ARCHIVE_URL_PATTERN: str = (r"^https:\/\/static.crates.io\/crates\/" +
+                                      LIBRARY_NAME_PATTERN + "/" +
+                                      LIBRARY_NAME_PATTERN + "-" +
+                                      VERSION_PATTERN + ".crate")
+
+CRATES_IO_ARCHIVE_URL_RE: re.Pattern = re.compile(CRATES_IO_ARCHIVE_URL_PATTERN)
+
 DESCRIPTION_PATTERN: str = (r"^description *= *(\".+\")")
 
 DESCRIPTION_MATCHER: re.Pattern = re.compile(DESCRIPTION_PATTERN)
@@ -51,21 +57,23 @@
     download_url: str
     package: str
     package_dir: str
-    temp_file: tempfile.NamedTemporaryFile
+    temp_file: IO
 
     def is_supported_url(self) -> bool:
-        if self._old_url.type != metadata_pb2.URL.HOMEPAGE:
-            return False
-        match = CRATES_IO_URL_RE.match(self._old_url.value)
+        match = CRATES_IO_ARCHIVE_URL_RE.match(self._old_url.value)
         if match is None:
             return False
         self.package = match.group(1)
         return True
 
-    def _get_version_numbers(self, version: str) -> (int, int, int):
+    def _get_version_numbers(self, version: str) -> tuple[int, int, int]:
         match = VERSION_MATCHER.match(version)
         if match is not None:
-            return tuple(int(match.group(i)) for i in range(1, 4))
+            return (
+                int(match.group(1)),
+                int(match.group(2)),
+                int(match.group(3)),
+            )
         return (0, 0, 0)
 
     def _is_newer_version(self, prev_version: str, prev_id: int,
@@ -75,7 +83,7 @@
                 (self._get_version_numbers(prev_version), prev_id))
 
     def _find_latest_non_test_version(self) -> None:
-        url = "https://crates.io/api/v1/crates/{}/versions".format(self.package)
+        url = f"https://crates.io/api/v1/crates/{self.package}/versions"
         with urllib.request.urlopen(url) as request:
             data = json.loads(request.read().decode())
         last_id = 0
@@ -97,8 +105,7 @@
             self._new_ver = data["crate"]["max_version"]
         # Skip d.d.d-{alpha,beta}* versions
         if ALPHA_BETA_RE.match(self._new_ver):
-            print("Ignore alpha or beta release: {}-{}."
-                  .format(self.package, self._new_ver))
+            print(f"Ignore alpha or beta release: {self.package}-{self._new_ver}.")
             self._find_latest_non_test_version()
         else:
             url = url + "/" + self._new_ver
@@ -109,10 +116,10 @@
     def use_current_as_latest(self):
         Updater.use_current_as_latest(self)
         # A shortcut to use the static download path.
-        self.download_url = "https://static.crates.io/crates/{}/{}-{}.crate".format(
-            self.package, self.package, self._new_ver)
+        self.download_url = f"https://static.crates.io/crates/{self.package}/" \
+                            f"{self.package}-{self._new_ver}.crate"
 
-    def update(self) -> None:
+    def update(self, skip_post_update: bool) -> None:
         """Updates the package.
 
         Has to call check() before this function.
@@ -149,8 +156,8 @@
                 new_url_list.append(url)
         new_url = metadata_pb2.URL()
         new_url.type = metadata_pb2.URL.ARCHIVE
-        new_url.value = "https://static.crates.io/crates/{}/{}-{}.crate".format(
-            metadata.name, metadata.name, metadata.third_party.version)
+        new_url.value = f"https://static.crates.io/crates/{metadata.name}/" \
+                        f"{metadata.name}-{metadata.third_party.version}.crate"
         new_url_list.append(new_url)
         del metadata.third_party.url[:]
         metadata.third_party.url.extend(new_url_list)
@@ -168,11 +175,11 @@
         # track which files existed before the patching.
         rejects = list(self._proj_path.glob('**/*.rej'))
         if len(rejects) > 0:
-            print("Error: Found patch reject files: %s" % str(rejects))
+            print(f"Error: Found patch reject files: {str(rejects)}")
             self._has_errors = True
         # Check for Cargo errors embedded in Android.bp.
         # Note that this should stay in sync with cargo2android.py.
-        with open('%s/Android.bp' % self._proj_path, 'r') as bp_file:
+        with open(f'{self._proj_path}/Android.bp', 'r') as bp_file:
             for line in bp_file:
                 if line.strip() == "Errors in cargo.out:":
                     print("Error: Found Cargo errors in Android.bp")
diff --git a/external_updater.py b/external_updater.py
index c15e92d..149eea9 100644
--- a/external_updater.py
+++ b/external_updater.py
@@ -21,12 +21,14 @@
 """
 
 import argparse
+from collections.abc import Iterable
 import enum
 import glob
 import json
+import logging
 import os
 import sys
-import subprocess
+import textwrap
 import time
 from typing import Dict, Iterator, List, Union, Tuple, Type
 from pathlib import Path
@@ -91,12 +93,16 @@
                metadata: metadata_pb2.MetaData) -> None:
     full_path = updater.project_path
 
-    if args.branch_and_commit:
+    if not args.keep_local_changes:
         git_utils.checkout(full_path, args.remote_name + '/master')
+        if TMP_BRANCH_NAME in git_utils.list_local_branches(full_path):
+            git_utils.delete_branch(full_path, TMP_BRANCH_NAME)
+            git_utils.reset_hard(full_path)
+            git_utils.clean(full_path)
         git_utils.start_branch(full_path, TMP_BRANCH_NAME)
 
     try:
-        updater.update()
+        updater.update(args.skip_post_update)
 
         updated_metadata = metadata_pb2.MetaData()
         updated_metadata.CopyFrom(metadata)
@@ -110,23 +116,38 @@
         fileutils.write_metadata(full_path, updated_metadata, args.keep_date)
         git_utils.add_file(full_path, 'METADATA')
 
-        if args.branch_and_commit:
-            rel_proj_path = fileutils.get_relative_project_path(full_path)
-            msg = 'Upgrade {} to {}\n\nTest: make\n'.format(
-                rel_proj_path, updater.latest_version)
-            git_utils.remove_gitmodules(full_path)
-            git_utils.add_file(full_path, '*')
-            git_utils.commit(full_path, msg)
+        if args.build:
+            if not updater_utils.build(full_path):
+                print("Build failed. Aborting upload.")
+                return
+
+        if args.no_upload:
+            return
+
+        try:
+            rel_proj_path = str(fileutils.get_relative_project_path(full_path))
+        except ValueError:
+            # Absolute paths to other trees will not be relative to our tree. There are
+            # not portable instructions for upgrading that project, since the path will
+            # differ between machines (or checkouts).
+            rel_proj_path = "<absolute path to project>"
+        msg = textwrap.dedent(f"""\
+        Upgrade {metadata.name} to {updater.latest_version}
+
+        This project was upgraded with external_updater.
+        Usage: tools/external_updater/updater.sh update {rel_proj_path}
+        For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md
+
+        Test: TreeHugger""")
+        git_utils.remove_gitmodules(full_path)
+        git_utils.add_file(full_path, '*')
+        git_utils.commit(full_path, msg)
     except Exception as err:
         if updater.rollback():
             print('Rolled back.')
         raise err
 
-    if args.push_change:
-        git_utils.push(full_path, args.remote_name, updater.has_errors)
-
-    if args.branch_and_commit:
-        git_utils.checkout(full_path, args.remote_name + '/master')
+    git_utils.push(full_path, args.remote_name, updater.has_errors)
 
 
 def check_and_update(args: argparse.Namespace,
@@ -141,16 +162,14 @@
     """
 
     try:
-        rel_proj_path = fileutils.get_relative_project_path(proj_path)
-        print(f'Checking {rel_proj_path}. ', end='')
+        canonical_path = fileutils.canonicalize_project_path(proj_path)
+        print(f'Checking {canonical_path}. ', end='')
         updater, metadata = build_updater(proj_path)
         updater.check()
 
         current_ver = updater.current_version
         latest_ver = updater.latest_version
-        print('Current version: {}. Latest version: {}'.format(
-            current_ver, latest_ver),
-              end='')
+        print(f'Current version: {current_ver}. Latest version: {latest_ver}', end='')
 
         has_new_version = current_ver != latest_ver
         if has_new_version:
@@ -166,11 +185,11 @@
         return updater
     # pylint: disable=broad-except
     except Exception as err:
-        print('{} {}.'.format(color_string('Failed.', Color.ERROR), err))
+        logging.exception("Failed to check or update %s", proj_path)
         return str(err)
 
 
-def check_and_update_path(args: argparse.Namespace, paths: Iterator[str],
+def check_and_update_path(args: argparse.Namespace, paths: Iterable[str],
                           update_lib: bool,
                           delay: int) -> Dict[str, Dict[str, str]]:
     results = {}
@@ -182,14 +201,13 @@
         else:
             res['current'] = updater.current_version
             res['latest'] = updater.latest_version
-        relative_path = fileutils.get_relative_project_path(Path(path))
-        results[str(relative_path)] = res
+        results[str(fileutils.canonicalize_project_path(Path(path)))] = res
         time.sleep(delay)
     return results
 
 
 def _list_all_metadata() -> Iterator[str]:
-    for path, dirs, files in os.walk(fileutils.EXTERNAL_PATH):
+    for path, dirs, files in os.walk(fileutils.external_path()):
         if fileutils.METADATA_FILENAME in files:
             # Skip sub directories.
             dirs[:] = []
@@ -205,11 +223,11 @@
     result = [path for abs_path in abs_paths
               for path in sorted(glob.glob(str(abs_path)))]
     if paths and not result:
-        print('Could not find any valid paths in %s' % str(paths))
+        print(f'Could not find any valid paths in {str(paths)}')
     return result
 
 
-def write_json(json_file: str, results: Dict[str, Dict[str, str]]) -> List[str]:
+def write_json(json_file: str, results: Dict[str, Dict[str, str]]) -> None:
     """Output a JSON report."""
     with Path(json_file).open('w') as res_file:
         json.dump(results, res_file, sort_keys=True, indent=4)
@@ -254,7 +272,7 @@
         nargs='*',
         help='Paths of the project. '
         'Relative paths will be resolved from external/.')
-    check_parser.add_argument('--json_output',
+    check_parser.add_argument('--json-output',
                               help='Path of a json file to write result to.')
     check_parser.add_argument(
         '--all',
@@ -274,7 +292,7 @@
         nargs='*',
         help='Paths of the project as globs. '
         'Relative paths will be resolved from external/.')
-    update_parser.add_argument('--json_output',
+    update_parser.add_argument('--json-output',
                                help='Path of a json file to write result to.')
     update_parser.add_argument(
         '--force',
@@ -285,16 +303,23 @@
         help='Run update and refresh to the current version.',
         action='store_true')
     update_parser.add_argument(
-        '--keep_date',
+        '--keep-date',
         help='Run update and do not change date in METADATA.',
         action='store_true')
-    update_parser.add_argument('--branch_and_commit',
+    update_parser.add_argument('--no-upload',
                                action='store_true',
-                               help='Starts a new branch and commit changes.')
-    update_parser.add_argument('--push_change',
+                               help='Does not upload to Gerrit after upgrade')
+    update_parser.add_argument('--keep-local-changes',
                                action='store_true',
-                               help='Pushes change to Gerrit.')
-    update_parser.add_argument('--remote_name',
+                               help='Updates the current branch')
+    update_parser.add_argument('--skip-post-update',
+                               action='store_true',
+                               help='Skip post_update script')
+    update_parser.add_argument('--no-build',
+                               action='store_false',
+                               dest='build',
+                               help='Skip building'),
+    update_parser.add_argument('--remote-name',
                                default='aosp',
                                required=False,
                                help='Upstream remote name.')
diff --git a/fileutils.py b/fileutils.py
index 17202c8..38142c6 100644
--- a/fileutils.py
+++ b/fileutils.py
@@ -14,6 +14,7 @@
 """Tool functions to deal with files."""
 
 import datetime
+from functools import cache
 import os
 from pathlib import Path
 import textwrap
@@ -24,18 +25,42 @@
 # pylint: disable=import-error
 import metadata_pb2  # type: ignore
 
-ANDROID_TOP = Path(os.environ.get('ANDROID_BUILD_TOP', os.getcwd()))
-EXTERNAL_PATH = ANDROID_TOP / 'external'
 
 METADATA_FILENAME = 'METADATA'
 
 
+@cache
+def external_path() -> Path:
+    """Returns the path to //external.
+
+    We cannot use the relative path from this file to find the top of the tree because
+    this will often be run in a "compiled" form from an arbitrary location in the out
+    directory. We can't fully rely on ANDROID_BUILD_TOP because not all contexts will
+    have run envsetup/lunch either. We use ANDROID_BUILD_TOP whenever it is set, but if
+    it is not set we instead rely on the convention that the CWD is the root of the tree
+    (updater.sh will cd there before executing).
+
+    There is one other context where this function cannot succeed: CI. Tests run in CI
+    do not have a source tree to find, so calling this function in that context will
+    fail.
+    """
+    android_top = Path(os.environ.get("ANDROID_BUILD_TOP", os.getcwd()))
+    top = android_top / 'external'
+
+    if not top.exists():
+        raise RuntimeError(
+            f"{top} does not exist. This program must be run from the "
+            f"root of an Android tree (CWD is {os.getcwd()})."
+        )
+    return top
+
+
 def get_absolute_project_path(proj_path: Path) -> Path:
     """Gets absolute path of a project.
 
     Path resolution starts from external/.
     """
-    return EXTERNAL_PATH / proj_path
+    return external_path() / proj_path
 
 
 def get_metadata_path(proj_path: Path) -> Path:
@@ -45,7 +70,32 @@
 
 def get_relative_project_path(proj_path: Path) -> Path:
     """Gets the relative path of a project starting from external/."""
-    return get_absolute_project_path(proj_path).relative_to(EXTERNAL_PATH)
+    return get_absolute_project_path(proj_path).relative_to(external_path())
+
+
+def canonicalize_project_path(proj_path: Path) -> Path:
+  """Returns the canonical representation of the project path.
+
+  For paths that are in the same tree as external_updater (the common case), the
+  canonical path is the path of the project relative to //external.
+
+  For paths that are in a different tree (an uncommon case used for updating projects
+  in other builds such as the NDK), the canonical path is the absolute path.
+  """
+  try:
+      return get_relative_project_path(proj_path)
+  except ValueError:
+      # A less common use case, but the path might be to a non-local tree, in which case
+      # the path will not be relative to our tree. This happens when using
+      # external_updater in another project like the NDK or rr.
+      if proj_path.is_absolute():
+        return proj_path
+
+      # Not relative to //external, and not an absolute path. This case hasn't existed
+      # before, so it has no canonical form.
+      raise ValueError(
+        f"{proj_path} must be either an absolute path or relative to {external_path()}"
+      )
 
 
 def read_metadata(proj_path: Path) -> metadata_pb2.MetaData:
@@ -85,12 +135,26 @@
         date.year = now.year
         date.month = now.month
         date.day = now.day
-    text_metadata = text_format.MessageToString(metadata)
+    try:
+        rel_proj_path = str(get_relative_project_path(proj_path))
+    except ValueError:
+        # Absolute paths to other trees will not be relative to our tree. There are
+        # not portable instructions for upgrading that project, since the path will
+        # differ between machines (or checkouts).
+        rel_proj_path = "<absolute path to project>"
+    usage_hint = textwrap.dedent(f"""\
+    # This project was upgraded with external_updater.
+    # Usage: tools/external_updater/updater.sh update {rel_proj_path}
+    # For more info, check https://cs.android.com/android/platform/superproject/+/master:tools/external_updater/README.md
+
+    """)
+    text_metadata = usage_hint + text_format.MessageToString(metadata)
     with get_metadata_path(proj_path).open('w') as metadata_file:
         if metadata.third_party.license_type == metadata_pb2.LicenseType.BY_EXCEPTION_ONLY:
            metadata_file.write(textwrap.dedent("""\
-            # *** THIS PACKAGE HAS SPECIAL LICENSING CONDITIONS.  PLEASE
-            #     CONSULT THE OWNERS AND [email protected] BEFORE
-            #     DEPENDING ON IT IN YOUR PROJECT. ***
+            # THIS PACKAGE HAS SPECIAL LICENSING CONDITIONS. PLEASE
+            # CONSULT THE OWNERS AND [email protected] BEFORE
+            # DEPENDING ON IT IN YOUR PROJECT.
+
             """))
         metadata_file.write(text_metadata)
diff --git a/git_updater.py b/git_updater.py
index ef320cb..483f3fc 100644
--- a/git_updater.py
+++ b/git_updater.py
@@ -23,21 +23,56 @@
 class GitUpdater(base_updater.Updater):
     """Updater for Git upstream."""
     UPSTREAM_REMOTE_NAME: str = "update_origin"
-    android_remote_name: str
 
     def is_supported_url(self) -> bool:
-        return self._old_url.type == metadata_pb2.URL.GIT
+        return git_utils.is_valid_url(self._proj_path, self._old_url.value)
+
+    @staticmethod
+    def _is_likely_android_remote(url: str) -> bool:
+        """Returns True if the URL is likely to be the project's Android remote."""
+        # There isn't a strict rule for finding the correct remote for upstream-master,
+        # so we have to guess. Be careful to filter out things that look almost right
+        # but aren't. Here's an example of a project that has a lot of false positives:
+        # 
+        # aosp    /usr/local/google/home/danalbert/src/mirrors/android/refs/aosp/toolchain/rr.git (fetch)
+        # aosp    persistent-https://android.git.corp.google.com/toolchain/rr (push)
+        # origin  https://github.com/DanAlbert/rr.git (fetch)
+        # origin  https://github.com/DanAlbert/rr.git (push)
+        # unmirrored      persistent-https://android.git.corp.google.com/toolchain/rr (fetch)
+        # unmirrored      persistent-https://android.git.corp.google.com/toolchain/rr (push)
+        # update_origin   https://github.com/rr-debugger/rr (fetch)
+        # update_origin   https://github.com/rr-debugger/rr (push)
+        # upstream        https://github.com/rr-debugger/rr.git (fetch)
+        # upstream        https://github.com/rr-debugger/rr.git (push)
+        #
+        # unmirrored is the correct remote here. It's not a local path, and contains
+        # either /platform/external/ or /toolchain/ (the two common roots for third-
+        # party Android imports).
+        if '://' not in url:
+            # Skip anything that's likely a local GoB mirror.
+            return False
+        if '/platform/external/' in url:
+            return True
+        if '/toolchain/' in url:
+            return True
+        return False
 
     def _setup_remote(self) -> None:
         remotes = git_utils.list_remotes(self._proj_path)
         current_remote_url = None
+        android_remote_name: str | None = None
         for name, url in remotes.items():
             if name == self.UPSTREAM_REMOTE_NAME:
                 current_remote_url = url
 
-            # Guess android remote name.
-            if '/platform/external/' in url:
-                self.android_remote_name = name
+            if self._is_likely_android_remote(url):
+                android_remote_name = name
+
+        if android_remote_name is None:
+            remotes_formatted = "\n".join(f"{k} {v}" for k, v in remotes.items())
+            raise RuntimeError(
+                f"Could not determine android remote for {self._proj_path}. Tried:\n"
+                f"{remotes_formatted}")
 
         if current_remote_url is not None and current_remote_url != self._old_url.value:
             git_utils.remove_remote(self._proj_path, self.UPSTREAM_REMOTE_NAME)
@@ -48,7 +83,7 @@
                                  self._old_url.value)
 
         git_utils.fetch(self._proj_path,
-                        [self.UPSTREAM_REMOTE_NAME, self.android_remote_name])
+                        [self.UPSTREAM_REMOTE_NAME, android_remote_name])
 
     def check(self) -> None:
         """Checks upstream and returns whether a new version is available."""
@@ -66,15 +101,16 @@
         self._new_ver = updater_utils.get_latest_version(self._old_ver, tags)
 
     def _check_head(self) -> None:
-        branch = git_utils.get_default_branch(self._proj_path,
-                                              self.UPSTREAM_REMOTE_NAME)
+        branch = git_utils.detect_default_branch(self._proj_path,
+                                                 self.UPSTREAM_REMOTE_NAME)
         self._new_ver = git_utils.get_sha_for_branch(
             self._proj_path, self.UPSTREAM_REMOTE_NAME + '/' + branch)
 
-    def update(self) -> None:
+    def update(self, skip_post_update: bool) -> None:
         """Updates the package.
-
         Has to call check() before this function.
         """
         print(f"Running `git merge {self._new_ver}`...")
         git_utils.merge(self._proj_path, self._new_ver)
+        if not skip_post_update:
+            updater_utils.run_post_update(self._proj_path, self._proj_path)
\ No newline at end of file
diff --git a/git_utils.py b/git_utils.py
index 5186eb3..c8e50b6 100644
--- a/git_utils.py
+++ b/git_utils.py
@@ -15,26 +15,23 @@
 
 import datetime
 import re
+import shutil
 import subprocess
 from pathlib import Path
-from typing import Dict, List, Tuple
 
 import hashtags
 import reviewers
 
-def _run(cmd: List[str], cwd: Path) -> str:
-    """Runs a command and returns its output."""
-    return subprocess.check_output(cmd, text=True, cwd=cwd)
 
-
-def fetch(proj_path: Path, remote_names: List[str]) -> None:
+def fetch(proj_path: Path, remote_names: list[str]) -> None:
     """Runs git fetch.
 
     Args:
         proj_path: Path to Git repository.
         remote_names: Array of string to specify remote names.
     """
-    _run(['git', 'fetch', '--tags', '--multiple'] + remote_names, cwd=proj_path)
+    cmd = ['git', 'fetch', '--tags', '--multiple'] + remote_names
+    subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True)
 
 
 def add_remote(proj_path: Path, name: str, url: str) -> None:
@@ -45,15 +42,17 @@
         name: Name of the new remote.
         url: Url of the new remote.
     """
-    _run(['git', 'remote', 'add', name, url], cwd=proj_path)
+    cmd = ['git', 'remote', 'add', name, url]
+    subprocess.run(cmd, cwd=proj_path, check=True)
 
 
 def remove_remote(proj_path: Path, name: str) -> None:
     """Removes a git remote."""
-    _run(['git', 'remote', 'remove', name], cwd=proj_path)
+    cmd = ['git', 'remote', 'remove', name]
+    subprocess.run(cmd, cwd=proj_path, check=True)
 
 
-def list_remotes(proj_path: Path) -> Dict[str, str]:
+def list_remotes(proj_path: Path) -> dict[str, str]:
     """Lists all Git remotes.
 
     Args:
@@ -62,40 +61,63 @@
     Returns:
         A dict from remote name to remote url.
     """
-    def parse_remote(line: str) -> Tuple[str, str]:
+    def parse_remote(line: str) -> tuple[str, str]:
         split = line.split()
-        return (split[0], split[1])
+        return split[0], split[1]
 
-    out = _run(['git', 'remote', '-v'], proj_path)
+    cmd = ['git', 'remote', '-v']
+    out = subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True,
+                         text=True).stdout
     lines = out.splitlines()
     return dict([parse_remote(line) for line in lines])
 
 
+def detect_default_branch(proj_path: Path, remote_name: str) -> str:
+    """Gets the name of the upstream's default branch to use."""
+    cmd = ['git', 'remote', 'show', remote_name]
+    out = subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True,
+                         text=True).stdout
+    lines = out.splitlines()
+    for line in lines:
+        if "HEAD branch" in line:
+            return line.split()[-1]
+    raise RuntimeError(
+        f"Could not find HEAD branch in 'git remote show {remote_name}'"
+    )
+
+
 def get_sha_for_branch(proj_path: Path, branch: str):
     """Gets the hash SHA for a branch."""
-    return _run(['git', 'rev-parse', branch], proj_path).strip()
+    cmd = ['git', 'rev-parse', branch]
+    return subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True,
+                          text=True).stdout.strip()
 
 
 def get_commits_ahead(proj_path: Path, branch: str,
-                      base_branch: str) -> List[str]:
+                      base_branch: str) -> list[str]:
     """Lists commits in `branch` but not `base_branch`."""
-    out = _run([
-        'git', 'rev-list', '--left-only', '--ancestry-path', '{}...{}'.format(
-            branch, base_branch)
-    ], proj_path)
+    cmd = [
+        'git', 'rev-list', '--left-only', '--ancestry-path', 'f{branch}...{base_branch}'
+    ]
+    out = subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True,
+                         text=True).stdout
     return out.splitlines()
 
 
 # pylint: disable=redefined-outer-name
 def get_commit_time(proj_path: Path, commit: str) -> datetime.datetime:
     """Gets commit time of one commit."""
-    out = _run(['git', 'show', '-s', '--format=%ct', commit], cwd=proj_path)
+    cmd = ['git', 'show', '-s', '--format=%ct', commit]
+    out = subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True,
+                         text=True).stdout
     return datetime.datetime.fromtimestamp(int(out.strip()))
 
 
-def list_remote_branches(proj_path: Path, remote_name: str) -> List[str]:
+def list_remote_branches(proj_path: Path, remote_name: str) -> list[str]:
     """Lists all branches for a remote."""
-    lines = _run(['git', 'branch', '-r'], cwd=proj_path).splitlines()
+    cmd = ['git', 'branch', '-r']
+    lines = subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True,
+                           text=True).stdout.splitlines()
     stripped = [line.strip() for line in lines]
     remote_path = remote_name + '/'
     return [
@@ -104,30 +126,30 @@
     ]
 
 
-def list_remote_tags(proj_path: Path, remote_name: str) -> List[str]:
+def list_local_branches(proj_path: Path) -> list[str]:
+    """Lists all local branches."""
+    cmd = ['git', 'branch', '--format=%(refname:short)']
+    lines = subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True,
+                           text=True).stdout.splitlines()
+    return lines
+
+
+def list_remote_tags(proj_path: Path, remote_name: str) -> list[str]:
     """Lists all tags for a remote."""
     regex = re.compile(r".*refs/tags/(?P<tag>[^\^]*).*")
-    def parse_remote_tag(line: str) -> str:
-        return regex.match(line).group("tag")
 
-    lines = _run(['git', "ls-remote", "--tags", remote_name],
-                 cwd=proj_path).splitlines()
+    def parse_remote_tag(line: str) -> str:
+        if (m := regex.match(line)) is not None:
+            return m.group("tag")
+        raise ValueError(f"Could not parse tag from {line}")
+
+    cmd = ['git', "ls-remote", "--tags", remote_name]
+    lines = subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True,
+                           text=True).stdout.splitlines()
     tags = [parse_remote_tag(line) for line in lines]
     return list(set(tags))
 
 
-def get_default_branch(proj_path: Path, remote_name: str) -> str:
-    """Gets the name of the upstream branch to use."""
-    branches_to_try = ['master', 'main']
-    remote_branches = list_remote_branches(proj_path, remote_name)
-    for branch in branches_to_try:
-        if branch in remote_branches:
-            return branch
-    # We couldn't find any of the branches we expected.
-    # Default to 'master', although nothing will work well.
-    return 'master'
-
-
 COMMIT_PATTERN = r'^[a-f0-9]{40}$'
 COMMIT_RE = re.compile(COMMIT_PATTERN)
 
@@ -141,42 +163,77 @@
 def merge(proj_path: Path, branch: str) -> None:
     """Merges a branch."""
     try:
-        _run(['git', 'merge', branch, '--no-commit'], cwd=proj_path)
+        cmd = ['git', 'merge', branch, '--no-commit']
+        subprocess.run(cmd, cwd=proj_path, check=True)
     except subprocess.CalledProcessError as err:
         if hasattr(err, "output"):
             print(err.output)
-        _run(['git', 'merge', '--abort'], cwd=proj_path)
-        raise
+        if not merge_conflict(proj_path):
+            raise
+
+
+def merge_conflict(proj_path: Path) -> bool:
+    """Checks if there was a merge conflict."""
+    cmd = ['git', 'ls-files', '--unmerged']
+    out = subprocess.run(cmd, capture_output=True, cwd=proj_path, check=True,
+                         text=True).stdout
+    return bool(out)
 
 
 def add_file(proj_path: Path, file_name: str) -> None:
     """Stages a file."""
-    _run(['git', 'add', file_name], cwd=proj_path)
+    cmd = ['git', 'add', file_name]
+    subprocess.run(cmd, cwd=proj_path, check=True)
 
 
 def remove_gitmodules(proj_path: Path) -> None:
     """Deletes .gitmodules files."""
-    _run(['find', '.', '-name', '.gitmodules', '-delete'], cwd=proj_path)
+    cmd = ['find', '.', '-name', '.gitmodules', '-delete']
+    subprocess.run(cmd, cwd=proj_path, check=True)
 
 
 def delete_branch(proj_path: Path, branch_name: str) -> None:
     """Force delete a branch."""
-    _run(['git', 'branch', '-D', branch_name], cwd=proj_path)
+    cmd = ['git', 'branch', '-D', branch_name]
+    subprocess.run(cmd, cwd=proj_path, check=True)
+
+
+def tree_uses_pore(proj_path: Path) -> bool:
+    """Returns True if the tree uses pore rather than repo.
+
+    https://github.com/jmgao/pore
+    """
+    if shutil.which("pore") is None:
+        # Fast path for users that don't have pore installed, since that's almost
+        # everyone.
+        return False
+
+    if proj_path == Path(proj_path.root):
+        return False
+    if (proj_path / ".pore").exists():
+        return True
+    return tree_uses_pore(proj_path.parent)
 
 
 def start_branch(proj_path: Path, branch_name: str) -> None:
     """Starts a new repo branch."""
-    _run(['repo', 'start', branch_name], cwd=proj_path)
+    repo = 'repo'
+    if tree_uses_pore(proj_path):
+        repo = 'pore'
+    cmd = [repo, 'start', branch_name]
+    subprocess.run(cmd, cwd=proj_path, check=True)
 
 
 def commit(proj_path: Path, message: str) -> None:
     """Commits changes."""
-    _run(['git', 'commit', '-m', message], cwd=proj_path)
+    cmd = ['git', 'commit', '-m', message]
+    subprocess.run(cmd, cwd=proj_path, check=True)
 
 
 def checkout(proj_path: Path, branch_name: str) -> None:
     """Checkouts a branch."""
-    _run(['git', 'checkout', branch_name], cwd=proj_path)
+    cmd = ['git', 'checkout', branch_name]
+    subprocess.run(cmd, cwd=proj_path, check=True)
 
 
 def push(proj_path: Path, remote_name: str, has_errors: bool) -> None:
@@ -188,4 +245,23 @@
         cmd.extend(['-o', 't=' + tag])
     if has_errors:
         cmd.extend(['-o', 'l=Verified-1'])
-    _run(cmd, cwd=proj_path)
+    subprocess.run(cmd, cwd=proj_path, check=True)
+
+
+def reset_hard(proj_path: Path) -> None:
+    """Resets current HEAD and discards changes to tracked files."""
+    cmd = ['git', 'reset', '--hard']
+    subprocess.run(cmd, cwd=proj_path, check=True)
+
+
+def clean(proj_path: Path) -> None:
+    """Removes untracked files and directories."""
+    cmd = ['git', 'clean', '-fdx']
+    subprocess.run(cmd, cwd=proj_path, check=True)
+
+
+def is_valid_url(proj_path: Path, url: str) -> bool:
+    cmd = ['git', "ls-remote", url]
+    return subprocess.run(cmd, cwd=proj_path, stdin=subprocess.DEVNULL,
+                          stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
+                          start_new_session=True).returncode == 0
diff --git a/github_archive_updater.py b/github_archive_updater.py
index fc3e362..575669d 100644
--- a/github_archive_updater.py
+++ b/github_archive_updater.py
@@ -126,10 +126,8 @@
                                or self._fetch_latest_tag())
 
         # Adds source code urls.
-        urls.append('https://github.com/{}/{}/archive/{}.tar.gz'.format(
-            self.owner, self.repo, self._new_ver))
-        urls.append('https://github.com/{}/{}/archive/{}.zip'.format(
-            self.owner, self.repo, self._new_ver))
+        urls.append(f'https://github.com/{self.owner}/{self.repo}/archive/{self._new_ver}.tar.gz')
+        urls.append(f'https://github.com/{self.owner}/{self.repo}/archive/{self._new_ver}.zip')
 
         self._new_url.value = choose_best_url(urls, self._old_url.value)
 
@@ -156,7 +154,7 @@
         else:
             self._fetch_latest_version()
 
-    def update(self) -> None:
+    def update(self, skip_post_update: bool) -> None:
         """Updates the package.
 
         Has to call check() before this function.
diff --git a/metadata.proto b/metadata.proto
index ed72c34..f7eb007 100644
--- a/metadata.proto
+++ b/metadata.proto
@@ -41,13 +41,20 @@
   UNENCUMBERED = 7;
 }
 
+enum DirectoryType {
+  PACKAGE = 1;
+  GOOGLE_INTERNAL = 4;
+}
+
 message ThirdPartyMetaData {
   repeated URL url = 1;
   optional string version = 2;
   optional LicenseType license_type = 4;
   optional string license_note = 5;
+  optional string local_modifications = 6;
   optional Security security = 7;
   optional Date last_upgrade_date = 10;
+  optional DirectoryType type = 11 [default = PACKAGE];
 }
 
 message URL {
@@ -56,6 +63,7 @@
     HOMEPAGE = 1;
     ARCHIVE = 2;
     GIT = 3;
+    PIPER = 4;
     SVN = 7;
     HG = 8;
     DARCS = 9;
diff --git a/notifier.py b/notifier.py
index dcff0b3..714e50f 100644
--- a/notifier.py
+++ b/notifier.py
@@ -72,13 +72,12 @@
 
 
 def _send_email(proj, latest_ver, recipient, upgrade_log):
-    print('Sending email for {}: {}'.format(proj, latest_ver))
+    print(f'Sending email for {proj}: {latest_ver}')
     msg = ""
     match = CHANGE_URL_RE.search(upgrade_log)
     if match is not None:
         subject = "[Succeeded]"
-        msg = 'An upgrade change is generated at:\n{}'.format(
-            match.group(1))
+        msg = f'An upgrade change is generated at:\n{match.group(1)}'
     else:
         subject = "[Failed]"
         msg = 'Failed to generate upgrade change. See logs below for details.'
@@ -151,9 +150,9 @@
                 proj_history[latest_ver] = int(time.time())
                 proj_history[NOTIFIED_TIME_KEY_NAME] = int(time.time())
             except subprocess.CalledProcessError as err:
-                msg = """Failed to send email for {} ({}).
-stdout: {}
-stderr: {}""".format(proj, latest_ver, err.stdout, err.stderr)
+                msg = f"""Failed to send email for {proj} ({latest_ver}).
+stdout: {stdout}
+stderr: {err.stderr}"""
                 print(msg)
 
 
@@ -181,24 +180,23 @@
 def _upgrade(proj):
     # pylint: disable=subprocess-run-check
     out = subprocess.run([
-        'out/soong/host/linux-x86/bin/external_updater', 'update',
-        '--branch_and_commit', '--push_change', proj
+        'out/soong/host/linux-x86/bin/external_updater', 'update', proj
     ],
                          stdout=subprocess.PIPE,
                          stderr=subprocess.PIPE,
                          cwd=_get_android_top())
     stdout = out.stdout.decode('utf-8')
     stderr = out.stderr.decode('utf-8')
-    return """
+    return f"""
 ====================
 |    Debug Info    |
 ====================
 -=-=-=-=stdout=-=-=-=-
-{}
+{stdout}
 
 -=-=-=-=stderr=-=-=-=-
-{}
-""".format(stdout, stderr)
+{stderr}
+"""
 
 
 def _check_updates(args):
diff --git a/reviewers.py b/reviewers.py
index 39a7349..722b489 100644
--- a/reviewers.py
+++ b/reviewers.py
@@ -13,9 +13,10 @@
 # limitations under the License.
 """Find main reviewers for git push commands."""
 
+from collections.abc import MutableMapping
 import math
 import random
-from typing import List, Mapping, Set, Union
+from typing import List, Set, Union
 
 # To randomly pick one of multiple reviewers, we put them in a List[str]
 # to work with random.choice efficiently.
@@ -25,7 +26,7 @@
 # (1) a single reviewer email address as a string, or
 # (2) a List of multiple reviewers to be randomly picked, or
 # (3) a Set of multiple reviewers to be all added.
-ProjMapping = Mapping[str, Union[str, List[str], Set[str]]]
+ProjMapping = MutableMapping[str, Union[str, List[str], Set[str]]]
 
 # Rust crate owners (reviewers).
 RUST_CRATE_OWNERS: ProjMapping = {
@@ -46,7 +47,7 @@
 # The sum of these quotas should ideally be at least the number of Rust
 # projects, but this only matters if we have many entries in RUST_CRATE_OWNERS,
 # as we subtract a person's owned crates from their quota.
-RUST_REVIEWERS: Mapping[str, int] = {
+RUST_REVIEWERS: dict[str, float] = {
     '[email protected]': 20,
     '[email protected]': 20,
     '[email protected]': 20,
@@ -58,7 +59,7 @@
 
 
 # pylint: disable=invalid-name
-def add_proj_count(projects: Mapping[str, float], reviewer: str, n: float):
+def add_proj_count(projects: MutableMapping[str, float], reviewer: str, n: float) -> None:
     """Add n to the number of projects owned by the reviewer."""
     if reviewer in projects:
         projects[reviewer] += n
@@ -75,7 +76,7 @@
     """Create a list of duplicated reviewers for weighted random selection."""
     # Count number of projects owned by each reviewer.
     rust_reviewers = set(RUST_REVIEWERS.keys())
-    projects = {}  # map from owner to number of owned projects
+    projects: dict[str, float] = {}  # map from owner to number of owned projects
     for value in PROJ_REVIEWERS.values():
         if isinstance(value, str):  # single reviewer for a project
             add_proj_count(projects, value, 1)
diff --git a/test_git_utils.py b/test_git_utils.py
new file mode 100644
index 0000000..c9d948a
--- /dev/null
+++ b/test_git_utils.py
@@ -0,0 +1,59 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Tests for the git_utils module."""
+from pathlib import Path
+import pytest
+from pytest_mock import MockerFixture
+
+from git_utils import tree_uses_pore
+
+
[email protected](name="repo_tree")
+def fixture_repo_tree(tmp_path: Path) -> Path:
+    """Fixture for a repo tree."""
+    (tmp_path / ".repo").write_text("")
+    (tmp_path / "external/foobar").mkdir(parents=True)
+    return tmp_path
+
+
[email protected](name="pore_tree")
+def fixture_pore_tree(repo_tree: Path) -> Path:
+    """Fixture for a pore tree."""
+    (repo_tree / ".pore").write_text("")
+    return repo_tree
+
+
+def test_tree_uses_pore_fast_path(tmp_path: Path, mocker: MockerFixture) -> None:
+    """Tests that the fast-path does not recurse."""
+    which_mock = mocker.patch("shutil.which")
+    which_mock.return_value = None
+    path_parent_mock = mocker.patch("pathlib.Path.parent")
+    assert not tree_uses_pore(tmp_path)
+    path_parent_mock.assert_not_called()
+
+
+def test_tree_uses_pore_identifies_pore_trees(pore_tree: Path, mocker: MockerFixture) -> None:
+    """Tests that a pore tree is correctly identified."""
+    which_mock = mocker.patch("shutil.which")
+    which_mock.return_value = Path("pore")
+    assert tree_uses_pore(pore_tree)
+
+
+def test_tree_uses_pore_identifies_repo_trees(repo_tree: Path, mocker: MockerFixture) -> None:
+    """Tests that a repo tree is correctly identified."""
+    which_mock = mocker.patch("shutil.which")
+    which_mock.return_value = Path("pore")
+    assert not tree_uses_pore(repo_tree)
diff --git a/update_package.sh b/update_package.sh
index 4715db7..053da15 100644
--- a/update_package.sh
+++ b/update_package.sh
@@ -54,9 +54,11 @@
 CopyIfPresent "OWNERS"
 CopyIfPresent "README.android"
 
-echo "Applying patches..."
+file_counter=0
+total_files=$(ls $tmp_dir/patches | grep -Ei '(diff|patch)$' | wc -l)
 for p in $tmp_dir/patches/*.{diff,patch}
 do
+  file_counter=$((file_counter+1))
   [ -e "$p" ] || continue
   # Do not patch the Android.bp file, as we assume it will
   # patch itself.
@@ -65,7 +67,7 @@
       [ "$(basename $p)" != "Android.bp.diff" ] || continue
       [ "$(basename $p)" != "Android.bp.patch" ] || continue
   fi
-  echo "Applying $p..."
+  echo "Applying patch [$file_counter/$total_files] $p..."
   patch -p1 -d $tmp_dir --no-backup-if-mismatch < $p;
 done
 
diff --git a/updater.sh b/updater.sh
index 5aca93d..edddb57 100755
--- a/updater.sh
+++ b/updater.sh
@@ -17,7 +17,5 @@
 set -e
 
 cd $(dirname "$0")/../..
-source build/envsetup.sh
-lunch aosp_arm-eng
-mmma tools/external_updater
+.//build/soong/soong_ui.bash --make-mode external_updater
 out/host/linux-x86/bin/external_updater $@
diff --git a/updater_utils.py b/updater_utils.py
index 209a1cd..b263dcb 100644
--- a/updater_utils.py
+++ b/updater_utils.py
@@ -13,6 +13,7 @@
 # limitations under the License.
 """Helper functions for updaters."""
 
+from collections.abc import Sequence
 import os
 import re
 import subprocess
@@ -40,10 +41,11 @@
       ValueError: Occurred when there's no updater for all urls.
     """
     for url in metadata.third_party.url:
-        for updater_cls in updaters:
-            updater = updater_cls(proj_path, url, metadata.third_party.version)
-            if updater.is_supported_url():
-                return updater
+        if url.type != metadata_pb2.URL.HOMEPAGE:
+            for updater_cls in updaters:
+                updater = updater_cls(proj_path, url, metadata.third_party.version)
+                if updater.is_supported_url():
+                    return updater
 
     raise ValueError('No supported URL.')
 
@@ -56,13 +58,25 @@
       target_dir: The path to the project in Android source tree.
     """
 
-    print('Updating {} using {}.'.format(target_dir, source_dir))
+    print(f'Updating {target_dir} using {source_dir}.')
     script_path = os.path.join(os.path.dirname(sys.argv[0]),
                                'update_package.sh')
     subprocess.check_call(['bash', script_path, source_dir, target_dir,
                            "" if temp_file is None else temp_file])
 
 
+def run_post_update(source_dir: Path, target_dir: Path) -> None:
+    """
+      source_dir: Path to the new downloaded and extracted package.
+      target_dir: The path to the project in Android source tree.
+    """
+    post_update_path = os.path.join(source_dir, 'post_update.sh')
+    if os.path.isfile(post_update_path):
+        cmd: Sequence[str | Path] = ['bash', post_update_path, source_dir, target_dir]
+        print(f'Running {post_update_path}')
+        subprocess.check_call(cmd)
+
+
 VERSION_SPLITTER_PATTERN: str = r'[\.\-_]'
 VERSION_PATTERN: str = (r'^(?P<prefix>[^\d]*)' + r'(?P<version>\d+(' +
                         VERSION_SPLITTER_PATTERN + r'\d+)*)' +
@@ -114,3 +128,9 @@
     if not latest:
         raise ValueError('No matching version.')
     return latest
+
+
+def build(proj_path: Path) -> None:
+    cmd = ['build/soong/soong_ui.bash', "--build-mode", "--modules-in-a-dir-no-deps", f"--dir={str(proj_path)}"]
+    print('Building...')
+    return subprocess.run(cmd, check=True, text=True)