Refactor updater for more type annotation
Change-Id: I3ab3463c6f1fd1cdc4df099e8dff21685cbc722b
diff --git a/.gitignore b/.gitignore
index 8d35cb3..d03d717 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
__pycache__
*.pyc
+.mypy_cache
diff --git a/Android.bp b/Android.bp
index 77feab8..960d19c 100644
--- a/Android.bp
+++ b/Android.bp
@@ -36,6 +36,7 @@
name: "external_updater_lib",
srcs: [
"archive_utils.py",
+ "base_updater.py",
"crates_updater.py",
"fileutils.py",
"git_updater.py",
diff --git a/archive_utils.py b/archive_utils.py
index fe9934a..4a10392 100644
--- a/archive_utils.py
+++ b/archive_utils.py
@@ -25,7 +25,6 @@
See https://bugs.python.org/issue15795
"""
-
def _extract_member(self, member, targetpath, pwd):
ret_val = super()._extract_member(member, targetpath, pwd)
diff --git a/base_updater.py b/base_updater.py
new file mode 100644
index 0000000..74b688d
--- /dev/null
+++ b/base_updater.py
@@ -0,0 +1,72 @@
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Base class for all updaters."""
+
+from pathlib import Path
+
+import fileutils
+import metadata_pb2 # type: ignore
+
+
+class Updater:
+ """Base Updater that defines methods common for all updaters."""
+ def __init__(self, proj_path: Path, old_url: metadata_pb2.URL,
+ old_ver: str) -> None:
+ self._proj_path = fileutils.get_absolute_project_path(proj_path)
+ self._old_url = old_url
+ self._old_ver = old_ver
+
+ self._new_url = metadata_pb2.URL()
+ self._new_url.CopyFrom(old_url)
+ self._new_ver = old_ver
+
+ def is_supported_url(self) -> bool:
+ """Returns whether the url is supported."""
+ raise NotImplementedError()
+
+ def check(self) -> None:
+ """Checks whether a new version is available."""
+ raise NotImplementedError()
+
+ def update(self) -> None:
+ """Updates the package.
+
+ Has to call check() before this function.
+ """
+ raise NotImplementedError()
+
+ @property
+ def project_path(self) -> Path:
+ """Gets absolute path to the project."""
+ return self._proj_path
+
+ @property
+ def current_version(self) -> str:
+ """Gets the current version."""
+ return self._old_ver
+
+ @property
+ def current_url(self) -> metadata_pb2.URL:
+ """Gets the current url."""
+ return self._old_url
+
+ @property
+ def latest_version(self) -> str:
+ """Gets latest version."""
+ return self._new_ver
+
+ @property
+ def latest_url(self) -> metadata_pb2.URL:
+ """Gets URL for latest version."""
+ return self._new_url
diff --git a/crates_updater.py b/crates_updater.py
index b26034f..77fec67 100644
--- a/crates_updater.py
+++ b/crates_updater.py
@@ -13,66 +13,47 @@
# limitations under the License.
"""Module to check updates from crates.io."""
-
import json
import re
import urllib.request
import archive_utils
-import fileutils
-import metadata_pb2 # pylint: disable=import-error
+from base_updater import Updater
+import metadata_pb2 # type: ignore
import updater_utils
+CRATES_IO_URL_PATTERN: str = (r'^https:\/\/crates.io\/crates\/([-\w]+)')
-CRATES_IO_URL_PATTERN = (r'^https:\/\/crates.io\/crates\/([-\w]+)')
-
-CRATES_IO_URL_RE = re.compile(CRATES_IO_URL_PATTERN)
+CRATES_IO_URL_RE: re.Pattern = re.compile(CRATES_IO_URL_PATTERN)
-class CratesUpdater():
+class CratesUpdater(Updater):
"""Updater for crates.io packages."""
- def __init__(self, url, proj_path, metadata):
- if url.type != metadata_pb2.URL.HOMEPAGE:
- raise ValueError('Only check HOMEPAGE url.')
- match = CRATES_IO_URL_RE.match(url.value)
- if match is None:
- raise ValueError('HOMEPAGE url must have crates.io.')
- self.proj_path = proj_path
- self.metadata = metadata
- self.package = match.group(1)
- self.upstream_url = url
- self.new_version = None
- self.dl_path = None
+ dl_path: str
+ package: str
- def check(self):
+ def is_supported_url(self) -> bool:
+ if self._old_url.type != metadata_pb2.URL.HOMEPAGE:
+ return False
+ match = CRATES_IO_URL_RE.match(self._old_url.value)
+ if match is None:
+ return False
+ self.package = match.group(1)
+ return True
+
+ def check(self) -> None:
"""Checks crates.io and returns whether a new version is available."""
url = "https://crates.io/api/v1/crates/" + self.package
with urllib.request.urlopen(url) as request:
data = json.loads(request.read().decode())
- self.new_version = data["crate"]["max_version"]
- url = url + "/" + self.new_version
+ self._new_ver = data["crate"]["max_version"]
+ url = url + "/" + self._new_ver
with urllib.request.urlopen(url) as request:
data = json.loads(request.read().decode())
self.dl_path = data["version"]["dl_path"]
- print('Current version: {}. Latest version: {}'.format(
- self.get_current_version(), self.new_version), end='')
- def get_current_version(self):
- """Returns the latest version name recorded in METADATA."""
- return self.metadata.third_party.version
-
- def get_latest_version(self):
- """Returns the latest version name in upstream."""
- return self.new_version
-
- def _write_metadata(self, path):
- updated_metadata = metadata_pb2.MetaData()
- updated_metadata.CopyFrom(self.metadata)
- updated_metadata.third_party.version = self.new_version
- fileutils.write_metadata(path, updated_metadata)
-
- def update(self):
+ def update(self) -> None:
"""Updates the package.
Has to call check() before this function.
@@ -81,7 +62,6 @@
url = 'https://crates.io' + self.dl_path
temporary_dir = archive_utils.download_and_extract(url)
package_dir = archive_utils.find_archive_root(temporary_dir)
- self._write_metadata(package_dir)
- updater_utils.replace_package(package_dir, self.proj_path)
+ updater_utils.replace_package(package_dir, self._proj_path)
finally:
urllib.request.urlcleanup()
diff --git a/external_updater.py b/external_updater.py
index 2af66cd..6c4759f 100644
--- a/external_updater.py
+++ b/external_updater.py
@@ -1,3 +1,4 @@
+#
# Copyright (C) 2018 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,40 +20,55 @@
"""
import argparse
+import enum
import json
import os
import sys
import subprocess
import time
+from typing import Dict, Iterator, List, Union, Tuple, Type
+from pathlib import Path
-from google.protobuf import text_format # pylint: disable=import-error
+from google.protobuf import text_format # type: ignore
+from base_updater import Updater
from crates_updater import CratesUpdater
from git_updater import GitUpdater
from github_archive_updater import GithubArchiveUpdater
import fileutils
import git_utils
+import metadata_pb2 # type: ignore
import updater_utils
+UPDATERS: List[Type[Updater]] = [
+ CratesUpdater,
+ GithubArchiveUpdater,
+ GitUpdater,
+]
-UPDATERS = [CratesUpdater, GithubArchiveUpdater, GitUpdater]
-
+TMP_BRANCH_NAME = 'tmp_auto_upgrade'
USE_COLOR = sys.stdout.isatty()
-def color_string(string, color):
+
[email protected]
+class Color(enum.Enum):
+ """Colors for output to console."""
+ FRESH = '\x1b[32m'
+ STALE = '\x1b[31;1m'
+ ERROR = '\x1b[31m'
+
+
+END_COLOR = '\033[0m'
+
+
+def color_string(string: str, color: Color):
"""Changes the color of a string when print to terminal."""
if not USE_COLOR:
return string
- colors = {
- 'FRESH': '\x1b[32m',
- 'STALE': '\x1b[31;1m',
- 'ERROR': '\x1b[31m',
- }
- end_color = '\033[0m'
- return colors[color] + string + end_color
+ return color.value + string + END_COLOR
-def build_updater(proj_path):
+def build_updater(proj_path: Path) -> Tuple[Updater, metadata_pb2.MetaData]:
"""Build updater for a project specified by proj_path.
Reads and parses METADATA file. And builds updater based on the information.
@@ -65,131 +81,15 @@
"""
proj_path = fileutils.get_absolute_project_path(proj_path)
- try:
- metadata = fileutils.read_metadata(proj_path)
- except text_format.ParseError as err:
- print('{} {}.'.format(color_string('Invalid metadata file:', 'ERROR'),
- err))
- return None
-
- try:
- updater = updater_utils.create_updater(metadata, proj_path, UPDATERS)
- except ValueError:
- print(color_string('No supported URL.', 'ERROR'))
- return None
- return updater
+ metadata = fileutils.read_metadata(proj_path)
+ updater = updater_utils.create_updater(metadata, proj_path, UPDATERS)
+ return (updater, metadata)
-def has_new_version(updater):
- """Whether an updater found a new version."""
- return updater.get_current_version() != updater.get_latest_version()
+def _do_update(args: argparse.Namespace, updater: Updater,
+ metadata: metadata_pb2.MetaData) -> None:
+ full_path = updater.project_path
-
-def _message_for_calledprocesserror(error):
- return '\n'.join([error.stdout.decode('utf-8'),
- error.stderr.decode('utf-8')])
-
-
-def check_update(proj_path):
- """Checks updates for a project. Prints result on console.
-
- Args:
- proj_path: Absolute or relative path to the project.
- """
-
- print(
- 'Checking {}. '.format(fileutils.get_relative_project_path(proj_path)),
- end='')
- updater = build_updater(proj_path)
- if updater is None:
- return (None, 'Failed to create updater')
- try:
- updater.check()
- if has_new_version(updater):
- print(color_string(' Out of date!', 'STALE'))
- else:
- print(color_string(' Up to date.', 'FRESH'))
- return (updater, None)
- except (IOError, ValueError) as err:
- print('{} {}.'.format(color_string('Failed.', 'ERROR'),
- err))
- return (updater, str(err))
- except subprocess.CalledProcessError as err:
- msg = _message_for_calledprocesserror(err)
- print('{}\n{}'.format(msg, color_string('Failed.', 'ERROR')))
- return (updater, msg)
-
-
-def _process_update_result(path):
- res = {}
- updater, err = check_update(path)
- if err is not None:
- res['error'] = str(err)
- else:
- res['current'] = updater.get_current_version()
- res['latest'] = updater.get_latest_version()
- return res
-
-
-def _check_some(paths, delay):
- results = {}
- for path in paths:
- relative_path = fileutils.get_relative_project_path(path)
- results[relative_path] = _process_update_result(path)
- time.sleep(delay)
- return results
-
-
-def _check_all(delay):
- results = {}
- for path, dirs, files in os.walk(fileutils.EXTERNAL_PATH):
- dirs.sort(key=lambda d: d.lower())
- if fileutils.METADATA_FILENAME in files:
- # Skip sub directories.
- dirs[:] = []
- relative_path = fileutils.get_relative_project_path(path)
- results[relative_path] = _process_update_result(path)
- time.sleep(delay)
- return results
-
-
-def check(args):
- """Handler for check command."""
- if args.all:
- results = _check_all(args.delay)
- else:
- results = _check_some(args.paths, args.delay)
-
- if args.json_output is not None:
- with open(args.json_output, 'w') as f:
- json.dump(results, f, sort_keys=True, indent=4)
-
-
-def update(args):
- """Handler for update command."""
- try:
- _do_update(args)
- except subprocess.CalledProcessError as err:
- msg = _message_for_calledprocesserror(err)
- print(
- '{}\n{}'.format(
- msg,
- color_string(
- 'Failed to upgrade.',
- 'ERROR')))
-
-
-TMP_BRANCH_NAME = 'tmp_auto_upgrade'
-
-
-def _do_update(args):
- updater, _ = check_update(args.path)
- if updater is None:
- return
- if not has_new_version(updater) and not args.force:
- return
-
- full_path = fileutils.get_absolute_project_path(args.path)
if args.branch_and_commit:
git_utils.checkout(full_path, args.remote_name + '/master')
try:
@@ -201,9 +101,17 @@
updater.update()
+ updated_metadata = metadata_pb2.MetaData()
+ updated_metadata.CopyFrom(metadata)
+ updated_metadata.third_party.version = updater.latest_version
+ for metadata_url in updated_metadata.third_party.url:
+ if metadata_url == updater.current_url:
+ metadata_url.CopyFrom(updater.latest_url)
+ fileutils.write_metadata(full_path, updated_metadata)
+ git_utils.add_file(full_path, 'METADATA')
+
if args.branch_and_commit:
- msg = 'Upgrade {} to {}\n\nTest: None'.format(
- args.path, updater.get_latest_version())
+ msg = 'Upgrade {} to {}\n'.format(args.path, updater.latest_version)
git_utils.add_file(full_path, '*')
git_utils.commit(full_path, msg)
@@ -214,7 +122,85 @@
git_utils.checkout(full_path, args.remote_name + '/master')
-def parse_args():
+def check_and_update(args: argparse.Namespace,
+ proj_path: Path,
+ update_lib=False) -> Union[Updater, str]:
+ """Checks updates for a project. Prints result on console.
+
+ Args:
+ args: commandline arguments
+ proj_path: Absolute or relative path to the project.
+ update: If false, will only check for new version, but not update.
+ """
+
+ try:
+ rel_proj_path = fileutils.get_relative_project_path(proj_path)
+ print(f'Checking {rel_proj_path}. ', end='')
+ updater, metadata = build_updater(proj_path)
+ updater.check()
+
+ current_ver = updater.current_version
+ latest_ver = updater.latest_version
+ print('Current version: {}. Latest version: {}'.format(
+ current_ver, latest_ver),
+ end='')
+
+ has_new_version = current_ver != latest_ver
+ if has_new_version:
+ print(color_string(' Out of date!', Color.STALE))
+ else:
+ print(color_string(' Up to date.', Color.FRESH))
+
+ if update_lib and (has_new_version or args.force):
+ _do_update(args, updater, metadata)
+ return updater
+ except Exception as err:
+ print('{} {}.'.format(color_string('Failed.', Color.ERROR), err))
+ return str(err)
+
+
+def _check_path(args: argparse.Namespace, paths: Iterator[str],
+ delay: int) -> Dict[str, Dict[str, str]]:
+ results = {}
+ for path in paths:
+ res = {}
+ updater = check_and_update(args, Path(path))
+ if isinstance(updater, str):
+ res['error'] = updater
+ else:
+ res['current'] = updater.current_version
+ res['latest'] = updater.latest_version
+ relative_path = fileutils.get_relative_project_path(Path(path))
+ results[str(relative_path)] = res
+ time.sleep(delay)
+ return results
+
+
+def _list_all_metadata() -> Iterator[str]:
+ for path, dirs, files in os.walk(fileutils.EXTERNAL_PATH):
+ if fileutils.METADATA_FILENAME in files:
+ # Skip sub directories.
+ dirs[:] = []
+ yield path
+ dirs.sort(key=lambda d: d.lower())
+
+
+def check(args: argparse.Namespace):
+ """Handler for check command."""
+ paths = _list_all_metadata() if args.all else args.paths
+ results = _check_path(args, paths, args.delay)
+
+ if args.json_output is not None:
+ with Path(args.json_output).open('w') as res_file:
+ json.dump(results, res_file, sort_keys=True, indent=4)
+
+
+def update(args: argparse.Namespace) -> None:
+ """Handler for update command."""
+ check_and_update(args, args.path, update_lib=True)
+
+
+def parse_args() -> argparse.Namespace:
"""Parses commandline arguments."""
parser = argparse.ArgumentParser(
@@ -223,20 +209,23 @@
subparsers.required = True
# Creates parser for check command.
- check_parser = subparsers.add_parser(
- 'check', help='Check update for one project.')
+ check_parser = subparsers.add_parser('check',
+ help='Check update for one project.')
check_parser.add_argument(
- 'paths', nargs='*',
+ 'paths',
+ nargs='*',
help='Paths of the project. '
'Relative paths will be resolved from external/.')
+ check_parser.add_argument('--json_output',
+ help='Path of a json file to write result to.')
check_parser.add_argument(
- '--json_output',
- help='Path of a json file to write result to.')
- check_parser.add_argument(
- '--all', action='store_true',
+ '--all',
+ action='store_true',
help='If set, check updates for all supported projects.')
check_parser.add_argument(
- '--delay', default=0, type=int,
+ '--delay',
+ default=0,
+ type=int,
help='Time in seconds to wait between checking two projects.')
check_parser.set_defaults(func=check)
@@ -250,15 +239,16 @@
'--force',
help='Run update even if there\'s no new version.',
action='store_true')
- update_parser.add_argument(
- '--branch_and_commit', action='store_true',
- help='Starts a new branch and commit changes.')
- update_parser.add_argument(
- '--push_change', action='store_true',
- help='Pushes change to Gerrit.')
- update_parser.add_argument(
- '--remote_name', default='aosp', required=False,
- help='Upstream remote name.')
+ update_parser.add_argument('--branch_and_commit',
+ action='store_true',
+ help='Starts a new branch and commit changes.')
+ update_parser.add_argument('--push_change',
+ action='store_true',
+ help='Pushes change to Gerrit.')
+ update_parser.add_argument('--remote_name',
+ default='aosp',
+ required=False,
+ help='Upstream remote name.')
update_parser.set_defaults(func=update)
return parser.parse_args()
diff --git a/external_updater_test.py b/external_updater_test.py
index 0c82f05..b834fed 100644
--- a/external_updater_test.py
+++ b/external_updater_test.py
@@ -20,7 +20,6 @@
class ExternalUpdaterTest(unittest.TestCase):
"""Unit tests for external updater."""
-
def test_url_selection(self):
"""Tests that GithubArchiveUpdater can choose the right url."""
prefix = "https://github.com/author/project/"
diff --git a/fileutils.py b/fileutils.py
index e0a0f66..bd5c29c 100644
--- a/fileutils.py
+++ b/fileutils.py
@@ -15,38 +15,39 @@
import datetime
import os
+from pathlib import Path
-from google.protobuf import text_format # pylint: disable=import-error
+# pylint: disable=import-error
+from google.protobuf import text_format # type: ignore
-import metadata_pb2 # pylint: disable=import-error
+# pylint: disable=import-error
+import metadata_pb2 # type: ignore
-ANDROID_TOP = os.environ.get('ANDROID_BUILD_TOP', os.getcwd())
-EXTERNAL_PATH = os.path.join(ANDROID_TOP, 'external/')
+ANDROID_TOP = Path(os.environ.get('ANDROID_BUILD_TOP', os.getcwd()))
+EXTERNAL_PATH = ANDROID_TOP / 'external'
METADATA_FILENAME = 'METADATA'
-def get_absolute_project_path(project_path):
+def get_absolute_project_path(project_path) -> Path:
"""Gets absolute path of a project.
Path resolution starts from external/.
"""
- return os.path.join(EXTERNAL_PATH, project_path)
+ return EXTERNAL_PATH / project_path
-def get_metadata_path(project_path):
+def get_metadata_path(project_path) -> Path:
"""Gets the absolute path of METADATA for a project."""
- return os.path.join(
- get_absolute_project_path(project_path), METADATA_FILENAME)
+ return get_absolute_project_path(project_path) / METADATA_FILENAME
-def get_relative_project_path(project_path):
+def get_relative_project_path(project_path) -> Path:
"""Gets the relative path of a project starting from external/."""
- project_path = get_absolute_project_path(project_path)
- return os.path.relpath(project_path, EXTERNAL_PATH)
+ return get_absolute_project_path(project_path).relative_to(EXTERNAL_PATH)
-def read_metadata(proj_path):
+def read_metadata(proj_path) -> metadata_pb2.MetaData:
"""Reads and parses METADATA file for a project.
Args:
@@ -60,12 +61,12 @@
FileNotFoundError: Occurred when METADATA file is not found.
"""
- with open(get_metadata_path(proj_path), 'r') as metadata_file:
+ with get_metadata_path(proj_path).open('r') as metadata_file:
metadata = metadata_file.read()
return text_format.Parse(metadata, metadata_pb2.MetaData())
-def write_metadata(proj_path, metadata):
+def write_metadata(proj_path, metadata) -> None:
"""Writes updated METADATA file for a project.
This function updates last_upgrade_date in metadata and write to the project
@@ -82,5 +83,5 @@
date.month = now.month
date.day = now.day
text_metadata = text_format.MessageToString(metadata)
- with open(get_metadata_path(proj_path), 'w') as metadata_file:
+ with get_metadata_path(proj_path).open('w') as metadata_file:
metadata_file.write(text_metadata)
diff --git a/git_updater.py b/git_updater.py
index eee64f1..7e94890 100644
--- a/git_updater.py
+++ b/git_updater.py
@@ -13,105 +13,64 @@
# limitations under the License.
"""Module to check updates from Git upstream."""
-
-import datetime
-
-import fileutils
+import base_updater
import git_utils
-import metadata_pb2 # pylint: disable=import-error
+# pylint: disable=import-error
+import metadata_pb2 # type: ignore
import updater_utils
-class GitUpdater():
+class GitUpdater(base_updater.Updater):
"""Updater for Git upstream."""
+ upstream_remote_name: str
+ android_remote_name: str
- def __init__(self, url, proj_path, metadata):
- if url.type != metadata_pb2.URL.GIT:
- raise ValueError('Only support GIT upstream.')
- self.proj_path = proj_path
- self.metadata = metadata
- self.upstream_url = url
- self.upstream_remote_name = None
- self.android_remote_name = None
- self.new_version = None
+ def is_supported_url(self) -> bool:
+ return self._old_url.type == metadata_pb2.URL.GIT
- def _setup_remote(self):
- remotes = git_utils.list_remotes(self.proj_path)
+ def _setup_remote(self) -> None:
+ remotes = git_utils.list_remotes(self._proj_path)
+ upstream_remote_name = None
for name, url in remotes.items():
- if url == self.upstream_url.value:
- self.upstream_remote_name = name
+ if url == self._old_url.value:
+ upstream_remote_name = name
# Guess android remote name.
if '/platform/external/' in url:
self.android_remote_name = name
- if self.upstream_remote_name is None:
- self.upstream_remote_name = "update_origin"
- git_utils.add_remote(self.proj_path, self.upstream_remote_name,
- self.upstream_url.value)
+ if upstream_remote_name is None:
+ upstream_remote_name = "update_origin"
+ git_utils.add_remote(self._proj_path, upstream_remote_name,
+ self._old_url.value)
+ self.upstream_remote_name = upstream_remote_name
- git_utils.fetch(self.proj_path,
+ git_utils.fetch(self._proj_path,
[self.upstream_remote_name, self.android_remote_name])
- def check(self):
+ def check(self) -> None:
"""Checks upstream and returns whether a new version is available."""
-
self._setup_remote()
- if git_utils.is_commit(self.metadata.third_party.version):
+ if git_utils.is_commit(self._old_ver):
# Update to remote head.
self._check_head()
else:
# Update to latest version tag.
self._check_tag()
- def get_current_version(self):
- """Returns the latest version name recorded in METADATA."""
- return self.metadata.third_party.version
-
- def get_latest_version(self):
- """Returns the latest version name in upstream."""
- return self.new_version
-
def _check_tag(self):
- tags = git_utils.list_remote_tags(self.proj_path,
+ tags = git_utils.list_remote_tags(self._proj_path,
self.upstream_remote_name)
- current_ver = self.get_current_version()
- self.new_version = updater_utils.get_latest_version(
- current_ver, tags)
- print('Current version: {}. Latest version: {}'.format(
- current_ver, self.new_version), end='')
+ self._new_ver = updater_utils.get_latest_version(self._old_ver, tags)
def _check_head(self):
- commits = git_utils.get_commits_ahead(
- self.proj_path, self.upstream_remote_name + '/master',
- self.android_remote_name + '/master')
+ self._new_ver = git_utils.get_sha_for_branch(
+ self._proj_path, self.upstream_remote_name + '/master')
- if not commits:
- self.new_version = self.get_current_version()
- return
-
- self.new_version = commits[0]
-
- commit_time = git_utils.get_commit_time(self.proj_path, commits[-1])
- time_behind = datetime.datetime.now() - commit_time
- print('{} commits ({} days) behind.'.format(
- len(commits), time_behind.days), end='')
-
- def _write_metadata(self, path):
- updated_metadata = metadata_pb2.MetaData()
- updated_metadata.CopyFrom(self.metadata)
- updated_metadata.third_party.version = self.new_version
- fileutils.write_metadata(path, updated_metadata)
-
- def update(self):
+ def update(self) -> None:
"""Updates the package.
Has to call check() before this function.
"""
- upstream_branch = self.upstream_remote_name + '/master'
-
- print("Running `git merge {merge_branch}`..."
- .format(merge_branch=self.new_version))
- git_utils.merge(self.proj_path, self.new_version)
- self._write_metadata(self.proj_path)
- git_utils.add_file(self.proj_path, 'METADATA')
+ print(f"Running `git merge {self._new_ver}`...")
+ git_utils.merge(self._proj_path, self._new_ver)
diff --git a/git_utils.py b/git_utils.py
index 7b78b21..bdbc05c 100644
--- a/git_utils.py
+++ b/git_utils.py
@@ -11,21 +11,21 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-'''Helper functions to communicate with Git.'''
+"""Helper functions to communicate with Git."""
import datetime
import re
import subprocess
+from pathlib import Path
+from typing import Dict, List, Tuple
-def _run(cmd, cwd, redirect=True):
- """Runs a command with stdout and stderr redirected."""
- out = subprocess.PIPE if redirect else None
- return subprocess.run(cmd, stdout=out, stderr=out,
- check=True, cwd=cwd)
+def _run(cmd: List[str], cwd: Path) -> str:
+ """Runs a command and returns its output."""
+ return subprocess.check_output(cmd, text=True, cwd=cwd)
-def fetch(proj_path, remote_names):
+def fetch(proj_path: Path, remote_names: List[str]) -> None:
"""Runs git fetch.
Args:
@@ -35,7 +35,7 @@
_run(['git', 'fetch', '--multiple'] + remote_names, cwd=proj_path)
-def add_remote(proj_path, name, url):
+def add_remote(proj_path: Path, name: str, url: str) -> None:
"""Adds a git remote.
Args:
@@ -46,7 +46,7 @@
_run(['git', 'remote', 'add', name, url], cwd=proj_path)
-def list_remotes(proj_path):
+def list_remotes(proj_path: Path) -> Dict[str, str]:
"""Lists all Git remotes.
Args:
@@ -55,55 +55,58 @@
Returns:
A dict from remote name to remote url.
"""
+ def parse_remote(line: str) -> Tuple[str, str]:
+ split = line.split()
+ return (split[0], split[1])
+
out = _run(['git', 'remote', '-v'], proj_path)
- lines = out.stdout.decode('utf-8').splitlines()
- return dict([line.split()[0:2] for line in lines])
+ lines = out.splitlines()
+ return dict([parse_remote(line) for line in lines])
-def get_commits_ahead(proj_path, branch, base_branch):
+def get_sha_for_branch(proj_path: Path, branch: str):
+ """Gets the hash SHA for a branch."""
+ return _run(['git', 'rev-parse', branch], proj_path).strip()
+
+
+def get_commits_ahead(proj_path: Path, branch: str,
+ base_branch: str) -> List[str]:
"""Lists commits in `branch` but not `base_branch`."""
- out = _run(['git', 'rev-list', '--left-only', '--ancestry-path',
- '{}...{}'.format(branch, base_branch)],
- proj_path)
- return out.stdout.decode('utf-8').splitlines()
+ out = _run([
+ 'git', 'rev-list', '--left-only', '--ancestry-path', '{}...{}'.format(
+ branch, base_branch)
+ ], proj_path)
+ return out.splitlines()
-def get_commit_time(proj_path, commit):
+def get_commit_time(proj_path: Path, commit: str) -> datetime.datetime:
"""Gets commit time of one commit."""
out = _run(['git', 'show', '-s', '--format=%ct', commit], cwd=proj_path)
- return datetime.datetime.fromtimestamp(int(out.stdout))
+ return datetime.datetime.fromtimestamp(int(out.strip()))
-def list_remote_branches(proj_path, remote_name):
+def list_remote_branches(proj_path: Path, remote_name: str) -> List[str]:
"""Lists all branches for a remote."""
- out = _run(['git', 'branch', '-r'], cwd=proj_path)
- lines = out.stdout.decode('utf-8').splitlines()
+ lines = _run(['git', 'branch', '-r'], cwd=proj_path).splitlines()
stripped = [line.strip() for line in lines]
remote_path = remote_name + '/'
- remote_path_len = len(remote_path)
- return [line[remote_path_len:] for line in stripped
- if line.startswith(remote_path)]
+ return [
+ line.lstrip(remote_path) for line in stripped
+ if line.startswith(remote_path)
+ ]
-def _parse_remote_tag(line):
- tag_prefix = 'refs/tags/'
- tag_suffix = '^{}'
- try:
- line = line[line.index(tag_prefix):]
- except ValueError:
- return None
- line = line[len(tag_prefix):]
- if line.endswith(tag_suffix):
- line = line[:-len(tag_suffix)]
- return line
-
-
-def list_remote_tags(proj_path, remote_name):
+def list_remote_tags(proj_path: Path, remote_name: str) -> List[str]:
"""Lists all tags for a remote."""
- out = _run(['git', "ls-remote", "--tags", remote_name],
- cwd=proj_path)
- lines = out.stdout.decode('utf-8').splitlines()
- tags = [_parse_remote_tag(line) for line in lines]
+ def parse_remote_tag(line: str) -> str:
+ tag_prefix = 'refs/tags/'
+ tag_suffix = '^{}'
+ line = line[line.index(tag_prefix):]
+ return line.lstrip(tag_prefix).rstrip(tag_suffix)
+
+ lines = _run(['git', "ls-remote", "--tags", remote_name],
+ cwd=proj_path).splitlines()
+ tags = [parse_remote_tag(line) for line in lines]
return list(set(tags))
@@ -111,48 +114,46 @@
COMMIT_RE = re.compile(COMMIT_PATTERN)
-def is_commit(commit):
+def is_commit(commit: str) -> bool:
"""Whether a string looks like a SHA1 hash."""
return bool(COMMIT_RE.match(commit))
-def merge(proj_path, branch):
+def merge(proj_path: Path, branch: str) -> None:
"""Merges a branch."""
try:
- out = _run(['git', 'merge', branch, '--no-commit'],
- cwd=proj_path)
+ _run(['git', 'merge', branch, '--no-commit'], cwd=proj_path)
except subprocess.CalledProcessError:
# Merge failed. Error is already written to console.
- subprocess.run(['git', 'merge', '--abort'], cwd=proj_path)
+ _run(['git', 'merge', '--abort'], cwd=proj_path)
raise
-def add_file(proj_path, file_name):
+def add_file(proj_path: Path, file_name: str) -> None:
"""Stages a file."""
_run(['git', 'add', file_name], cwd=proj_path)
-def delete_branch(proj_path, branch_name):
+def delete_branch(proj_path: Path, branch_name: str) -> None:
"""Force delete a branch."""
_run(['git', 'branch', '-D', branch_name], cwd=proj_path)
-def start_branch(proj_path, branch_name):
+def start_branch(proj_path: Path, branch_name: str) -> None:
"""Starts a new repo branch."""
_run(['repo', 'start', branch_name], cwd=proj_path)
-def commit(proj_path, message):
+def commit(proj_path: Path, message: str) -> None:
"""Commits changes."""
_run(['git', 'commit', '-m', message], cwd=proj_path)
-def checkout(proj_path, branch_name):
+def checkout(proj_path: Path, branch_name: str) -> None:
"""Checkouts a branch."""
_run(['git', 'checkout', branch_name], cwd=proj_path)
-def push(proj_path, remote_name):
+def push(proj_path: Path, remote_name: str) -> None:
"""Pushes change to remote."""
- return _run(['git', 'push', remote_name, 'HEAD:refs/for/master'],
- cwd=proj_path, redirect=False)
+ _run(['git', 'push', remote_name, 'HEAD:refs/for/master'], cwd=proj_path)
diff --git a/github_archive_updater.py b/github_archive_updater.py
index 17e1925..d5a4e29 100644
--- a/github_archive_updater.py
+++ b/github_archive_updater.py
@@ -13,24 +13,25 @@
# limitations under the License.
"""Module to update packages from GitHub archive."""
-
import json
import re
import time
import urllib.request
+import urllib.error
+from typing import List, Optional, Tuple
import archive_utils
-import fileutils
+from base_updater import Updater
import git_utils
-import metadata_pb2 # pylint: disable=import-error
+import metadata_pb2 # type: ignore
import updater_utils
-GITHUB_URL_PATTERN = (r'^https:\/\/github.com\/([-\w]+)\/([-\w]+)\/' +
- r'(releases\/download\/|archive\/)')
-GITHUB_URL_RE = re.compile(GITHUB_URL_PATTERN)
+GITHUB_URL_PATTERN: str = (r'^https:\/\/github.com\/([-\w]+)\/([-\w]+)\/' +
+ r'(releases\/download\/|archive\/)')
+GITHUB_URL_RE: re.Pattern = re.compile(GITHUB_URL_PATTERN)
-def _edit_distance(str1, str2):
+def _edit_distance(str1: str, str2: str) -> int:
prev = list(range(0, len(str2) + 1))
for i, chr1 in enumerate(str1):
cur = [i + 1]
@@ -43,7 +44,7 @@
return prev[len(str2)]
-def choose_best_url(urls, previous_url):
+def choose_best_url(urls: List[str], previous_url: str) -> str:
"""Returns the best url to download from a list of candidate urls.
This function calculates similarity between previous url and each of new
@@ -58,54 +59,47 @@
Returns:
One url from `urls`.
"""
- return min(urls, default=None,
- key=lambda url: _edit_distance(
- url, previous_url))
+ return min(urls,
+ default="",
+ key=lambda url: _edit_distance(url, previous_url))
-class GithubArchiveUpdater():
+class GithubArchiveUpdater(Updater):
"""Updater for archives from GitHub.
This updater supports release archives in GitHub. Version is determined by
release name in GitHub.
"""
- VERSION_FIELD = 'tag_name'
+ VERSION_FIELD: str = 'tag_name'
+ owner: str
+ repo: str
- def __init__(self, url, proj_path, metadata):
- self.proj_path = proj_path
- self.metadata = metadata
- self.old_url = url
- self.owner = None
- self.repo = None
- self.new_version = None
- self.new_url = None
- self._parse_url(url)
-
- def _parse_url(self, url):
- if url.type != metadata_pb2.URL.ARCHIVE:
- raise ValueError('Only archive url from Github is supported.')
- match = GITHUB_URL_RE.match(url.value)
+ def is_supported_url(self) -> bool:
+ if self._old_url.type != metadata_pb2.URL.ARCHIVE:
+ return False
+ match = GITHUB_URL_RE.match(self._old_url.value)
if match is None:
- raise ValueError('Url format is not supported.')
+ return False
try:
self.owner, self.repo = match.group(1, 2)
except IndexError:
- raise ValueError('Url format is not supported.')
+ return False
+ return True
- def _fetch_latest_release(self):
- url = 'https://api.github.com/repos/{}/{}/releases/latest'.format(
- self.owner, self.repo)
+ def _fetch_latest_release(self) -> Optional[Tuple[str, List[str]]]:
+ url = f'https://api.github.com/repos/{self.owner}/{self.repo}/releases/latest'
try:
with urllib.request.urlopen(url) as request:
data = json.loads(request.read().decode())
- except urllib.error.HTTPError as e:
- if e.code == 404:
+ except urllib.error.HTTPError as err:
+ if err.code == 404:
return None
raise
supported_assets = [
a['browser_download_url'] for a in data['assets']
- if archive_utils.is_supported_archive(a['browser_download_url'])]
+ if archive_utils.is_supported_archive(a['browser_download_url'])
+ ]
return (data[self.VERSION_FIELD], supported_assets)
def _fetch_latest_tag(self):
@@ -115,69 +109,45 @@
for page in range(1, 21):
# Sleeps 10s to avoid rate limit.
time.sleep(10)
- url = 'https://api.github.com/repos/{}/{}/tags?page={}'.format(
- self.owner, self.repo, page)
+ url = f'https://api.github.com/repos/{self.owner}/{self.repo}/tags?page={page}'
with urllib.request.urlopen(url) as request:
data = json.loads(request.read().decode())
if len(data) == 0:
break
tags.extend(d['name'] for d in data)
- return (updater_utils.get_latest_version(self.get_current_version(), tags), [])
+ return (updater_utils.get_latest_version(self._old_ver, tags), [])
- def _fetch_latest_version(self):
+ def _fetch_latest_version(self) -> None:
"""Checks upstream and gets the latest release tag."""
- self.new_version, urls = self._fetch_latest_release() or self._fetch_latest_tag()
+ self._new_ver, urls = self._fetch_latest_release(
+ ) or self._fetch_latest_tag()
# Adds source code urls.
- urls.append(
- 'https://github.com/{}/{}/archive/{}.tar.gz'.format(
- self.owner, self.repo, self.new_version))
- urls.append(
- 'https://github.com/{}/{}/archive/{}.zip'.format(
- self.owner, self.repo, self.new_version))
+ urls.append('https://github.com/{}/{}/archive/{}.tar.gz'.format(
+ self.owner, self.repo, self._new_ver))
+ urls.append('https://github.com/{}/{}/archive/{}.zip'.format(
+ self.owner, self.repo, self._new_ver))
- self.new_url = choose_best_url(urls, self.old_url.value)
+ self._new_url.value = choose_best_url(urls, self._old_url.value)
- def _fetch_latest_commit(self):
+ def _fetch_latest_commit(self) -> None:
"""Checks upstream and gets the latest commit to master."""
- url = 'https://api.github.com/repos/{}/{}/commits/master'.format(
- self.owner, self.repo)
+ url = f'https://api.github.com/repos/{self.owner}/{self.repo}/commits/master'
with urllib.request.urlopen(url) as request:
data = json.loads(request.read().decode())
- self.new_version = data['sha']
- self.new_url = 'https://github.com/{}/{}/archive/{}.zip'.format(
- self.owner, self.repo, self.new_version)
-
- def get_current_version(self):
- """Returns the latest version name recorded in METADATA."""
- return self.metadata.third_party.version
-
- def get_latest_version(self):
- """Returns the latest version name in upstream."""
- return self.new_version
-
- def _write_metadata(self, url, path):
- updated_metadata = metadata_pb2.MetaData()
- updated_metadata.CopyFrom(self.metadata)
- updated_metadata.third_party.version = self.new_version
- for metadata_url in updated_metadata.third_party.url:
- if metadata_url == self.old_url:
- metadata_url.value = url
- fileutils.write_metadata(path, updated_metadata)
+ self._new_ver = data['sha']
+ self._new_url.value = f'https://github.com/{self.owner}/{self.repo}/archive/{self._new_ver}.zip'
def check(self):
"""Checks update for package.
Returns True if a new version is available.
"""
- current = self.get_current_version()
- if git_utils.is_commit(current):
+ if git_utils.is_commit(self._old_ver):
self._fetch_latest_commit()
else:
self._fetch_latest_version()
- print('Current version: {}. Latest version: {}'.format(
- current, self.new_version), end='')
def update(self):
"""Updates the package.
@@ -186,10 +156,9 @@
"""
temporary_dir = None
try:
- temporary_dir = archive_utils.download_and_extract(self.new_url)
+ temporary_dir = archive_utils.download_and_extract(self._new_url.value)
package_dir = archive_utils.find_archive_root(temporary_dir)
- self._write_metadata(self.new_url, package_dir)
- updater_utils.replace_package(package_dir, self.proj_path)
+ updater_utils.replace_package(package_dir, self._proj_path)
finally:
# Don't remove the temporary directory, or it'll be impossible
# to debug the failure...
diff --git a/updater_utils.py b/updater_utils.py
index e871e49..02f227a 100644
--- a/updater_utils.py
+++ b/updater_utils.py
@@ -33,16 +33,15 @@
ValueError: Occurred when there's no updater for all urls.
"""
for url in metadata.third_party.url:
- for updater in updaters:
- try:
- return updater(url, proj_path, metadata)
- except ValueError:
- pass
+ for updater_cls in updaters:
+ updater = updater_cls(proj_path, url, metadata.third_party.version)
+ if updater.is_supported_url():
+ return updater
raise ValueError('No supported URL.')
-def replace_package(source_dir, target_dir):
+def replace_package(source_dir, target_dir) -> None:
"""Invokes a shell script to prepare and update a project.
Args:
@@ -51,16 +50,14 @@
"""
print('Updating {} using {}.'.format(target_dir, source_dir))
- script_path = os.path.join(
- os.path.dirname(
- sys.argv[0]),
- 'update_package.sh')
+ script_path = os.path.join(os.path.dirname(sys.argv[0]),
+ 'update_package.sh')
subprocess.check_call(['bash', script_path, source_dir, target_dir])
+
VERSION_SPLITTER_PATTERN = r'[\.\-_]'
-VERSION_PATTERN = (r'^(?P<prefix>[^\d]*)' +
- r'(?P<version>\d+(' + VERSION_SPLITTER_PATTERN + r'\d+)*)' +
- r'(?P<suffix>.*)$')
+VERSION_PATTERN = (r'^(?P<prefix>[^\d]*)' + r'(?P<version>\d+(' +
+ VERSION_SPLITTER_PATTERN + r'\d+)*)' + r'(?P<suffix>.*)$')
VERSION_RE = re.compile(VERSION_PATTERN)
VERSION_SPLITTER_RE = re.compile(VERSION_SPLITTER_PATTERN)
@@ -70,7 +67,7 @@
if match is None:
raise ValueError('Invalid version.')
try:
- prefix, version, suffix = match.group('prefix', 'version', 'suffix')
+ prefix, version, suffix = match.group('prefix', 'version', 'suffix')
version = [int(v) for v in VERSION_SPLITTER_RE.split(version)]
return (version, prefix, suffix)
except IndexError:
@@ -97,10 +94,10 @@
"""
parsed_current_ver = _parse_version(current_version)
- latest = max(version_list,
- key=lambda ver: _match_and_get_version(
- parsed_current_ver, ver),
- default=[])
+ latest = max(
+ version_list,
+ key=lambda ver: _match_and_get_version(parsed_current_ver, ver),
+ default=[])
if not latest:
raise ValueError('No matching version.')
return latest