Upgrade python-api-core to v2.24.1
This project was upgraded with external_updater.
Usage: tools/external_updater/updater.sh update external/python/python-api-core
For more info, check https://cs.android.com/android/platform/superproject/main/+/main:tools/external_updater/README.md
Test: TreeHugger
Change-Id: I86c1c34c9331d0db3b1c765ebfeb8cb32e0ae57d
diff --git a/.coveragerc b/.coveragerc
index d097511..34417c3 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -11,3 +11,5 @@
def __repr__
# Ignore abstract methods
raise NotImplementedError
+ # Ignore coverage for code specific to static type checkers
+ TYPE_CHECKING
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 7519fa3..4c0027f 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,17 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7
+ digest: sha256:04c35dc5f49f0f503a306397d6d043685f8d2bb822ab515818c4208d7fb2db3a
+# created: 2025-01-16T15:24:11.364245182Z
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index ee81891..1b023b7 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -8,5 +8,5 @@
# @googleapis/yoshi-python @googleapis/actools-python are the default owners for changes in this repo
* @googleapis/yoshi-python @googleapis/actools-python
-# @googleapis/python-samples-owners @googleapis/actools-python are the default owners for samples changes
-/samples/ @googleapis/python-samples-owners @googleapis/actools-python
+# @googleapis/python-samples-reviewers @googleapis/actools-python are the default owners for samples changes
+/samples/ @googleapis/python-samples-reviewers @googleapis/actools-python
diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml
new file mode 100644
index 0000000..311ebbb
--- /dev/null
+++ b/.github/auto-approve.yml
@@ -0,0 +1,3 @@
+# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve
+processes:
+ - "OwlBotTemplateChanges"
diff --git a/google/__init__.py b/.github/auto-label.yaml
similarity index 63%
rename from google/__init__.py
rename to .github/auto-label.yaml
index 9f1d549..21786a4 100644
--- a/google/__init__.py
+++ b/.github/auto-label.yaml
@@ -1,4 +1,4 @@
-# Copyright 2016 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,15 +11,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+requestsize:
+ enabled: true
-"""Google namespace package."""
-
-try:
- import pkg_resources
-
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
-
- # See: https://github.com/python/mypy/issues/1422
- __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore
+path:
+ pullrequest: true
+ paths:
+ samples: "samples"
diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml
new file mode 100644
index 0000000..1618464
--- /dev/null
+++ b/.github/blunderbuss.yml
@@ -0,0 +1,17 @@
+# Blunderbuss config
+#
+# This file controls who is assigned for pull requests and issues.
+# Note: This file is autogenerated. To make changes to the assignee
+# team, please update `codeowner_team` in `.repo-metadata.json`.
+assign_issues:
+ - googleapis/actools-python
+
+assign_issues_by:
+ - labels:
+ - "samples"
+ to:
+ - googleapis/python-samples-reviewers
+ - googleapis/actools-python
+
+assign_prs:
+ - googleapis/actools-python
diff --git a/.github/cherry-pick-bot.yml b/.github/cherry-pick-bot.yml
new file mode 100644
index 0000000..1e9cfcd
--- /dev/null
+++ b/.github/cherry-pick-bot.yml
@@ -0,0 +1,2 @@
+enabled: true
+
diff --git a/.github/release-please.yml b/.github/release-please.yml
index 4507ad0..29601ad 100644
--- a/.github/release-please.yml
+++ b/.github/release-please.yml
@@ -1 +1,11 @@
releaseType: python
+handleGHRelease: true
+# NOTE: this section is generated by synthtool.languages.python
+# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py
+branches:
+- branch: v1
+ handleGHRelease: true
+ releaseType: python
+- branch: v0
+ handleGHRelease: true
+ releaseType: python
diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml
new file mode 100644
index 0000000..50e8bd3
--- /dev/null
+++ b/.github/release-trigger.yml
@@ -0,0 +1,2 @@
+enabled: true
+multiScmName: python-api-core
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index e621885..a19b27a 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -1,3 +1,34 @@
+# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings
+# Rules for main branch protection
+branchProtectionRules:
+# Identifies the protection rule pattern. Name of the branch to be protected.
+# Defaults to `main`
+- pattern: main
+ requiresCodeOwnerReviews: true
+ requiresStrictStatusChecks: true
+ requiredStatusCheckContexts:
+ - 'cla/google'
+ # No Kokoro: the following are Github actions
+ - 'lint'
+ - 'mypy'
+ - 'unit_grpc_gcp-3.7'
+ - 'unit_grpc_gcp-3.8'
+ - 'unit_grpc_gcp-3.9'
+ - 'unit_grpc_gcp-3.10'
+ - 'unit_grpc_gcp-3.11'
+ - 'unit_grpc_gcp-3.12'
+ - 'unit-3.7'
+ - 'unit-3.8'
+ - 'unit-3.9'
+ - 'unit-3.10'
+ - 'unit-3.11'
+ - 'unit-3.12'
+ - 'unit_wo_grpc-3.10'
+ - 'unit_wo_grpc-3.11'
+ - 'unit_wo_grpc-3.12'
+ - 'cover'
+ - 'docs'
+ - 'docfx'
permissionRules:
- team: actools-python
permission: admin
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 0000000..2833fe9
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,38 @@
+on:
+ pull_request:
+ branches:
+ - main
+name: docs
+jobs:
+ docs:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run docs
+ run: |
+ nox -s docs
+ docfx:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run docfx
+ run: |
+ nox -s docfx
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 0000000..1051da0
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,25 @@
+on:
+ pull_request:
+ branches:
+ - main
+name: lint
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run lint
+ run: |
+ nox -s lint
+ - name: Run lint_setup_py
+ run: |
+ nox -s lint_setup_py
diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
new file mode 100644
index 0000000..e6a7929
--- /dev/null
+++ b/.github/workflows/mypy.yml
@@ -0,0 +1,22 @@
+on:
+ pull_request:
+ branches:
+ - main
+name: mypy
+jobs:
+ mypy:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run mypy
+ run: |
+ nox -s mypy
diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml
new file mode 100644
index 0000000..4e4fa25
--- /dev/null
+++ b/.github/workflows/unittest.yml
@@ -0,0 +1,80 @@
+name: "Unit tests"
+
+on:
+ pull_request:
+ branches:
+ - main
+
+jobs:
+ run-unittests:
+ name: unit${{ matrix.option }}-${{ matrix.python }}
+ # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed.
+ # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix
+ # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories
+ runs-on: ubuntu-22.04
+ strategy:
+ matrix:
+ option: ["", "_grpc_gcp", "_wo_grpc", "_w_prerelease_deps", "_w_async_rest_extra"]
+ python:
+ - "3.7"
+ - "3.8"
+ - "3.9"
+ - "3.10"
+ - "3.11"
+ - "3.12"
+ - "3.13"
+ exclude:
+ - option: "_wo_grpc"
+ python: 3.7
+ - option: "_wo_grpc"
+ python: 3.8
+ - option: "_wo_grpc"
+ python: 3.9
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python }}
+ - name: Install nox
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install nox
+ - name: Run unit tests
+ env:
+ COVERAGE_FILE: .coverage${{ matrix.option }}-${{matrix.python }}
+ run: |
+ nox -s unit${{ matrix.option }}-${{ matrix.python }}
+ - name: Upload coverage results
+ uses: actions/upload-artifact@v4
+ with:
+ name: coverage-artifact-${{ matrix.option }}-${{ matrix.python }}
+ path: .coverage${{ matrix.option }}-${{ matrix.python }}
+ include-hidden-files: true
+
+ report-coverage:
+ name: cover
+ runs-on: ubuntu-latest
+ needs:
+ - run-unittests
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+ - name: Install coverage
+ run: |
+ python -m pip install --upgrade setuptools pip wheel
+ python -m pip install coverage
+ - name: Download coverage results
+ uses: actions/download-artifact@v4
+ with:
+ path: .coverage-results/
+ - name: Report coverage results
+ run: |
+ find .coverage-results -type f -name '*.zip' -exec unzip {} \;
+ coverage combine .coverage-results/**/.coverage*
+ coverage report --show-missing --fail-under=100
diff --git a/.gitignore b/.gitignore
index 99c3a14..168b201 100644
--- a/.gitignore
+++ b/.gitignore
@@ -51,6 +51,7 @@
# Virtual environment
env/
+venv/
# Test logs
coverage.xml
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 0394c8a..f05e867 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2018 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -33,13 +33,6 @@
# Setup project id.
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
-# Remove old nox
-python3 -m pip uninstall --yes --quiet nox-automation
-
-# Install nox
-python3 -m pip install --upgrade --quiet nox
-python3 -m nox --version
-
# If this is a continuous build, send the test log to the FlakyBot.
# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg
new file mode 100644
index 0000000..3595fb4
--- /dev/null
+++ b/.kokoro/continuous/prerelease-deps.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "prerelease_deps"
+}
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
index 4e1b1fb..e5410e2 100644
--- a/.kokoro/docker/docs/Dockerfile
+++ b/.kokoro/docker/docs/Dockerfile
@@ -1,4 +1,4 @@
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from ubuntu:20.04
+from ubuntu:24.04
ENV DEBIAN_FRONTEND noninteractive
@@ -40,7 +40,6 @@
libssl-dev \
libsqlite3-dev \
portaudio19-dev \
- python3-distutils \
redis-server \
software-properties-common \
ssh \
@@ -60,8 +59,31 @@
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /var/cache/apt/archives/*.deb
+
+###################### Install python 3.10.14 for docs/docfx session
+
+# Download python 3.10.14
+RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz
+
+# Extract files
+RUN tar -xvf Python-3.10.14.tgz
+
+# Install python 3.10.14
+RUN ./Python-3.10.14/configure --enable-optimizations
+RUN make altinstall
+
+ENV PATH /usr/local/bin/python3.10:$PATH
+
+###################### Install pip
RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
- && python3.8 /tmp/get-pip.py \
+ && python3.10 /tmp/get-pip.py \
&& rm /tmp/get-pip.py
-CMD ["python3.8"]
+# Test pip
+RUN python3.10 -m pip
+
+# Install build requirements
+COPY requirements.txt /requirements.txt
+RUN python3.10 -m pip install --require-hashes -r requirements.txt
+
+CMD ["python3.10"]
diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in
new file mode 100644
index 0000000..586bd07
--- /dev/null
+++ b/.kokoro/docker/docs/requirements.in
@@ -0,0 +1,2 @@
+nox
+gcp-docuploader
diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt
new file mode 100644
index 0000000..a9360a2
--- /dev/null
+++ b/.kokoro/docker/docs/requirements.txt
@@ -0,0 +1,297 @@
+#
+# This file is autogenerated by pip-compile with Python 3.10
+# by the following command:
+#
+# pip-compile --allow-unsafe --generate-hashes requirements.in
+#
+argcomplete==3.5.3 \
+ --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \
+ --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392
+ # via nox
+cachetools==5.5.0 \
+ --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \
+ --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a
+ # via google-auth
+certifi==2024.12.14 \
+ --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \
+ --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db
+ # via requests
+charset-normalizer==3.4.1 \
+ --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \
+ --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \
+ --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \
+ --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \
+ --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \
+ --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \
+ --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \
+ --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \
+ --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \
+ --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \
+ --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \
+ --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \
+ --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \
+ --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \
+ --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \
+ --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \
+ --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \
+ --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \
+ --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \
+ --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \
+ --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \
+ --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \
+ --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \
+ --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \
+ --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \
+ --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \
+ --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \
+ --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \
+ --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \
+ --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \
+ --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \
+ --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \
+ --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \
+ --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \
+ --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \
+ --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \
+ --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \
+ --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \
+ --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \
+ --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \
+ --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \
+ --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \
+ --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \
+ --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \
+ --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \
+ --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \
+ --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \
+ --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \
+ --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \
+ --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \
+ --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \
+ --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \
+ --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \
+ --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \
+ --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \
+ --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \
+ --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \
+ --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \
+ --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \
+ --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \
+ --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \
+ --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \
+ --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \
+ --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \
+ --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \
+ --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \
+ --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \
+ --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \
+ --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \
+ --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \
+ --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \
+ --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \
+ --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \
+ --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \
+ --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \
+ --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \
+ --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \
+ --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \
+ --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \
+ --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \
+ --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \
+ --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \
+ --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \
+ --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \
+ --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \
+ --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \
+ --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \
+ --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \
+ --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \
+ --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \
+ --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
+ --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
+ # via requests
+click==8.1.8 \
+ --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \
+ --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a
+ # via gcp-docuploader
+colorlog==6.9.0 \
+ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \
+ --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2
+ # via
+ # gcp-docuploader
+ # nox
+distlib==0.3.9 \
+ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \
+ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403
+ # via virtualenv
+filelock==3.16.1 \
+ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \
+ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435
+ # via virtualenv
+gcp-docuploader==0.6.5 \
+ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \
+ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea
+ # via -r requirements.in
+google-api-core==2.24.0 \
+ --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \
+ --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf
+ # via
+ # google-cloud-core
+ # google-cloud-storage
+google-auth==2.37.0 \
+ --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \
+ --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0
+ # via
+ # google-api-core
+ # google-cloud-core
+ # google-cloud-storage
+google-cloud-core==2.4.1 \
+ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \
+ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61
+ # via google-cloud-storage
+google-cloud-storage==2.19.0 \
+ --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \
+ --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2
+ # via gcp-docuploader
+google-crc32c==1.6.0 \
+ --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \
+ --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \
+ --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \
+ --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \
+ --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \
+ --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \
+ --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \
+ --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \
+ --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \
+ --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \
+ --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \
+ --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \
+ --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \
+ --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \
+ --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \
+ --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \
+ --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \
+ --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \
+ --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \
+ --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \
+ --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \
+ --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \
+ --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \
+ --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \
+ --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \
+ --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \
+ --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4
+ # via
+ # google-cloud-storage
+ # google-resumable-media
+google-resumable-media==2.7.2 \
+ --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \
+ --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0
+ # via google-cloud-storage
+googleapis-common-protos==1.66.0 \
+ --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \
+ --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed
+ # via google-api-core
+idna==3.10 \
+ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
+ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
+ # via requests
+nox==2024.10.9 \
+ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \
+ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95
+ # via -r requirements.in
+packaging==24.2 \
+ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \
+ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f
+ # via nox
+platformdirs==4.3.6 \
+ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \
+ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb
+ # via virtualenv
+proto-plus==1.25.0 \
+ --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \
+ --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91
+ # via google-api-core
+protobuf==5.29.3 \
+ --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \
+ --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \
+ --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \
+ --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \
+ --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \
+ --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \
+ --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \
+ --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \
+ --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \
+ --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \
+ --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84
+ # via
+ # gcp-docuploader
+ # google-api-core
+ # googleapis-common-protos
+ # proto-plus
+pyasn1==0.6.1 \
+ --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \
+ --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034
+ # via
+ # pyasn1-modules
+ # rsa
+pyasn1-modules==0.4.1 \
+ --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \
+ --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c
+ # via google-auth
+requests==2.32.3 \
+ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
+ --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
+ # via
+ # google-api-core
+ # google-cloud-storage
+rsa==4.9 \
+ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
+ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
+ # via google-auth
+six==1.17.0 \
+ --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \
+ --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81
+ # via gcp-docuploader
+tomli==2.2.1 \
+ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \
+ --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \
+ --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \
+ --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \
+ --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \
+ --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \
+ --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \
+ --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \
+ --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \
+ --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \
+ --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \
+ --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \
+ --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \
+ --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \
+ --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \
+ --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \
+ --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \
+ --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \
+ --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \
+ --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \
+ --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \
+ --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \
+ --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \
+ --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \
+ --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \
+ --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \
+ --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \
+ --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \
+ --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \
+ --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \
+ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \
+ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7
+ # via nox
+urllib3==2.3.0 \
+ --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
+ --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
+ # via requests
+virtualenv==20.28.1 \
+ --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \
+ --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329
+ # via nox
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index 48e8985..722f447 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -30,9 +30,9 @@
env_vars: {
key: "V2_STAGING_BUCKET"
- # Push non-cloud library docs to `docs-staging-v2-staging` instead of the
+ # Push non-cloud library docs to `docs-staging-v2-dev` instead of the
# Cloud RAD bucket `docs-staging-v2`
- value: "docs-staging-v2-staging"
+ value: "docs-staging-v2-dev"
}
# It will upload the docker image after successful builds.
@@ -64,4 +64,4 @@
keyname: "docuploader_service_account"
}
}
-}
\ No newline at end of file
+}
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
index f525142..c435402 100755
--- a/.kokoro/populate-secrets.sh
+++ b/.kokoro/populate-secrets.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2020 Google LLC.
+# Copyright 2024 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg
new file mode 100644
index 0000000..3595fb4
--- /dev/null
+++ b/.kokoro/presubmit/prerelease-deps.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "prerelease_deps"
+}
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
index 8acb14e..4ed4aaf 100755
--- a/.kokoro/publish-docs.sh
+++ b/.kokoro/publish-docs.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,21 +20,15 @@
export PATH="${HOME}/.local/bin:${PATH}"
-# Install nox
-python3 -m pip install --user --upgrade --quiet nox
-python3 -m nox --version
-
# build docs
nox -s docs
-python3 -m pip install --user gcp-docuploader
-
# create metadata
-python3 -m docuploader create-metadata \
+python3.10 -m docuploader create-metadata \
--name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
- --version=$(python3 setup.py --version) \
+ --version=$(python3.10 setup.py --version) \
--language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
- --distribution-name=$(python3 setup.py --name) \
+ --distribution-name=$(python3.10 setup.py --name) \
--product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
--github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
--issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
@@ -42,18 +36,18 @@
cat docs.metadata
# upload docs
-python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
+python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
# docfx yaml files
nox -s docfx
# create metadata.
-python3 -m docuploader create-metadata \
+python3.10 -m docuploader create-metadata \
--name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
- --version=$(python3 setup.py --version) \
+ --version=$(python3.10 setup.py --version) \
--language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
- --distribution-name=$(python3 setup.py --name) \
+ --distribution-name=$(python3.10 setup.py --name) \
--product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
--github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
--issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
@@ -61,4 +55,4 @@
cat docs.metadata
# upload docs
-python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
+python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
deleted file mode 100755
index 0728ce1..0000000
--- a/.kokoro/release.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Start the releasetool reporter
-python3 -m pip install gcp-releasetool
-python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
-
-# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
-python3 -m pip install --upgrade twine wheel setuptools
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Move into the package, build the distribution and upload.
-TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token")
-cd github/python-api-core
-python3 setup.py sdist bdist_wheel
-twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
deleted file mode 100644
index 586e764..0000000
--- a/.kokoro/release/common.cfg
+++ /dev/null
@@ -1,30 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/release.sh"
-}
-
-# Tokens needed to report release status back to GitHub
-env_vars: {
- key: "SECRET_MANAGER_KEYS"
- value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token"
-}
diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg
deleted file mode 100644
index 8f43917..0000000
--- a/.kokoro/release/release.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.11/common.cfg
similarity index 93%
copy from .kokoro/samples/python3.6/common.cfg
copy to .kokoro/samples/python3.11/common.cfg
index 3bb6b3a..d3597f0 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.11/common.cfg
@@ -10,13 +10,13 @@
# Specify which tests to run
env_vars: {
key: "RUN_TESTS_SESSION"
- value: "py-3.6"
+ value: "py-3.11"
}
# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-py36"
+ value: "python-docs-samples-tests-311"
}
env_vars: {
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.11/continuous.cfg
similarity index 100%
copy from .kokoro/samples/python3.6/presubmit.cfg
copy to .kokoro/samples/python3.11/continuous.cfg
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg
similarity index 100%
rename from .kokoro/samples/python3.6/periodic-head.cfg
rename to .kokoro/samples/python3.11/periodic-head.cfg
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.11/periodic.cfg
similarity index 100%
rename from .kokoro/samples/python3.6/periodic.cfg
rename to .kokoro/samples/python3.11/periodic.cfg
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.11/presubmit.cfg
similarity index 100%
rename from .kokoro/samples/python3.6/presubmit.cfg
rename to .kokoro/samples/python3.11/presubmit.cfg
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.12/common.cfg
similarity index 93%
copy from .kokoro/samples/python3.6/common.cfg
copy to .kokoro/samples/python3.12/common.cfg
index 3bb6b3a..8a5840a 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.12/common.cfg
@@ -10,13 +10,13 @@
# Specify which tests to run
env_vars: {
key: "RUN_TESTS_SESSION"
- value: "py-3.6"
+ value: "py-3.12"
}
# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-py36"
+ value: "python-docs-samples-tests-312"
}
env_vars: {
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.12/continuous.cfg
similarity index 100%
copy from .kokoro/samples/python3.6/presubmit.cfg
copy to .kokoro/samples/python3.12/continuous.cfg
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg
similarity index 100%
copy from .kokoro/samples/python3.6/periodic-head.cfg
copy to .kokoro/samples/python3.12/periodic-head.cfg
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg
similarity index 100%
copy from .kokoro/samples/python3.6/periodic.cfg
copy to .kokoro/samples/python3.12/periodic.cfg
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg
similarity index 100%
copy from .kokoro/samples/python3.6/presubmit.cfg
copy to .kokoro/samples/python3.12/presubmit.cfg
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.13/common.cfg
similarity index 93%
rename from .kokoro/samples/python3.6/common.cfg
rename to .kokoro/samples/python3.13/common.cfg
index 3bb6b3a..2a4199f 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.13/common.cfg
@@ -10,13 +10,13 @@
# Specify which tests to run
env_vars: {
key: "RUN_TESTS_SESSION"
- value: "py-3.6"
+ value: "py-3.13"
}
# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
- value: "python-docs-samples-tests-py36"
+ value: "python-docs-samples-tests-313"
}
env_vars: {
@@ -37,4 +37,4 @@
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline_v2.sh"
\ No newline at end of file
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.13/continuous.cfg
similarity index 100%
copy from .kokoro/samples/python3.6/presubmit.cfg
copy to .kokoro/samples/python3.13/continuous.cfg
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg
similarity index 100%
copy from .kokoro/samples/python3.6/periodic-head.cfg
copy to .kokoro/samples/python3.13/periodic-head.cfg
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg
similarity index 100%
copy from .kokoro/samples/python3.6/periodic.cfg
copy to .kokoro/samples/python3.13/periodic.cfg
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg
similarity index 100%
copy from .kokoro/samples/python3.6/presubmit.cfg
copy to .kokoro/samples/python3.13/presubmit.cfg
diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg
deleted file mode 100644
index 7218af1..0000000
--- a/.kokoro/samples/python3.6/continuous.cfg
+++ /dev/null
@@ -1,7 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "INSTALL_LIBRARY_FROM_SOURCE"
- value: "True"
-}
-
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
index ba3a707..e9d8bd7 100755
--- a/.kokoro/test-samples-against-head.sh
+++ b/.kokoro/test-samples-against-head.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
index 8a324c9..53e365b 100755
--- a/.kokoro/test-samples-impl.sh
+++ b/.kokoro/test-samples-impl.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2021 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -33,7 +33,8 @@
env | grep KOKORO
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
+# `virtualenv==20.26.6` is added for Python 3.7 compatibility
+python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6
# Use secrets acessor service account to get secrets
if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
@@ -76,7 +77,7 @@
echo "------------------------------------------------------------"
# Use nox to execute the tests for the project.
- python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ python3.9 -m nox -s "$RUN_TESTS_SESSION"
EXIT=$?
# If this is a periodic build, send the test log to the FlakyBot.
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 11c042d..7933d82 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index f39236e..48f7969 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2017 Google Inc.
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
index 4af6cdc..35fa529 100755
--- a/.kokoro/trampoline_v2.sh
+++ b/.kokoro/trampoline_v2.sh
@@ -1,5 +1,5 @@
#!/usr/bin/env bash
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 62eb5a7..1d74695 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,4 +1,4 @@
-# Copyright 2021 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -22,10 +22,10 @@
- id: end-of-file-fixer
- id: check-yaml
- repo: https://github.com/psf/black
- rev: 19.10b0
+ rev: 23.7.0
hooks:
- id: black
-- repo: https://gitlab.com/pycqa/flake8
- rev: 3.9.2
+- repo: https://github.com/pycqa/flake8
+ rev: 6.1.0
hooks:
- id: flake8
diff --git a/.repo-metadata.json b/.repo-metadata.json
index e16c9d2..0f0abd9 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -2,7 +2,7 @@
"name": "google-api-core",
"name_pretty": "Google API client core library",
"client_documentation": "https://googleapis.dev/python/google-api-core/latest",
- "release_level": "ga",
+ "release_level": "stable",
"language": "python",
"library_type": "CORE",
"repo": "googleapis/python-api-core",
diff --git a/.trampolinerc b/.trampolinerc
index 0eee72a..0080152 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -1,4 +1,4 @@
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# Template for .trampolinerc
-
# Add required env vars here.
required_envvars+=(
)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 04d47da..5717d6e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,369 @@
[1]: https://pypi.org/project/google-api-core/#history
+## [2.24.1](https://github.com/googleapis/python-api-core/compare/v2.24.0...v2.24.1) (2025-01-24)
+
+
+### Bug Fixes
+
+* Memory leak in bidi classes ([#770](https://github.com/googleapis/python-api-core/issues/770)) ([c1b8afa](https://github.com/googleapis/python-api-core/commit/c1b8afa4e2abe256e70651defccdc285f104ed19))
+* Resolve the issue where rpc timeout of 0 is used when timeout expires ([#776](https://github.com/googleapis/python-api-core/issues/776)) ([a5604a5](https://github.com/googleapis/python-api-core/commit/a5604a55070c6d92618d078191bf99f4c168d5f6))
+
+
+### Documentation
+
+* Add warnings regarding consuming externally sourced credentials ([#783](https://github.com/googleapis/python-api-core/issues/783)) ([0ec1825](https://github.com/googleapis/python-api-core/commit/0ec18254b90721684679a98bcacef4615467a227))
+
+## [2.24.0](https://github.com/googleapis/python-api-core/compare/v2.23.0...v2.24.0) (2024-12-06)
+
+
+### Features
+
+* Add automatic logging config to support debug logging ([#754](https://github.com/googleapis/python-api-core/issues/754)) ([d18d9b5](https://github.com/googleapis/python-api-core/commit/d18d9b5131162b44eebcc0859a7aca1198a2ac06))
+* Update recognized logging fields ([#766](https://github.com/googleapis/python-api-core/issues/766)) ([5f80f77](https://github.com/googleapis/python-api-core/commit/5f80f778bc25d878b3187c6138077ad8c6bcd35f))
+
+## [2.23.0](https://github.com/googleapis/python-api-core/compare/v2.22.0...v2.23.0) (2024-11-11)
+
+
+### Features
+
+* Migrate to pyproject.toml ([#736](https://github.com/googleapis/python-api-core/issues/736)) ([159e9a4](https://github.com/googleapis/python-api-core/commit/159e9a49525937f18a55c38136aae32575424d55))
+
+## [2.22.0](https://github.com/googleapis/python-api-core/compare/v2.21.0...v2.22.0) (2024-10-25)
+
+
+### Features
+
+* Add support for python 3.13 ([#696](https://github.com/googleapis/python-api-core/issues/696)) ([46b3d3a](https://github.com/googleapis/python-api-core/commit/46b3d3abaa1bae28e9d788d7c3006224cd6f74d5))
+
+
+### Bug Fixes
+
+* Add type hints to ClientOptions ([#735](https://github.com/googleapis/python-api-core/issues/735)) ([b91ed19](https://github.com/googleapis/python-api-core/commit/b91ed19210148dfa49ec790c4dd5f4a7bff80954))
+* Improve `Any` decode error ([#712](https://github.com/googleapis/python-api-core/issues/712)) ([0d5ed37](https://github.com/googleapis/python-api-core/commit/0d5ed37c96f9b40bccae98e228163a88abeb1763))
+* Require proto-plus >= 1.25.0 for Python 3.13 ([#740](https://github.com/googleapis/python-api-core/issues/740)) ([a26313e](https://github.com/googleapis/python-api-core/commit/a26313e1cb12e44aa498f12622edccc0c83ba0c3))
+* Switch to unittest.mock from mock ([#713](https://github.com/googleapis/python-api-core/issues/713)) ([8c53381](https://github.com/googleapis/python-api-core/commit/8c533819b7e212aa2f1d695a7ce08629f4fb2daf))
+
+## [2.21.0](https://github.com/googleapis/python-api-core/compare/v2.20.0...v2.21.0) (2024-10-07)
+
+
+### Features
+
+* Add support for asynchronous long running operations ([#724](https://github.com/googleapis/python-api-core/issues/724)) ([aaed69b](https://github.com/googleapis/python-api-core/commit/aaed69b6f1d694cd7e561e2aa03fdd8d6cfb369a))
+
+
+### Bug Fixes
+
+* Set chunk size for async stream content ([#702](https://github.com/googleapis/python-api-core/issues/702)) ([45b8a6d](https://github.com/googleapis/python-api-core/commit/45b8a6db5a5c75acdd8be896d0152f11608c7e51))
+
+## [2.20.0](https://github.com/googleapis/python-api-core/compare/v2.19.2...v2.20.0) (2024-09-18)
+
+
+### Features
+
+* Add async unsupported paramater exception ([#694](https://github.com/googleapis/python-api-core/issues/694)) ([8c137fe](https://github.com/googleapis/python-api-core/commit/8c137feb6e880fdd93d1248d9b6c10002dc3c096))
+* Add support for asynchronous rest streaming ([#686](https://github.com/googleapis/python-api-core/issues/686)) ([1b7bb6d](https://github.com/googleapis/python-api-core/commit/1b7bb6d1b721e4ee1561e8e4a347846d7fdd7c27))
+* Add support for creating exceptions from an asynchronous response ([#688](https://github.com/googleapis/python-api-core/issues/688)) ([1c4b0d0](https://github.com/googleapis/python-api-core/commit/1c4b0d079f2103a7b5562371a7bd1ada92528de3))
+
+## [2.19.2](https://github.com/googleapis/python-api-core/compare/v2.19.1...v2.19.2) (2024-08-16)
+
+
+### Bug Fixes
+
+* Fail gracefully if could not import `rpc_status` module ([#680](https://github.com/googleapis/python-api-core/issues/680)) ([7ccbf57](https://github.com/googleapis/python-api-core/commit/7ccbf5738fa236649f9a155055c71789362b5c4c))
+
+## [2.19.1](https://github.com/googleapis/python-api-core/compare/v2.19.0...v2.19.1) (2024-06-19)
+
+
+### Bug Fixes
+
+* Add support for protobuf 5.x ([#644](https://github.com/googleapis/python-api-core/issues/644)) ([fda0ca6](https://github.com/googleapis/python-api-core/commit/fda0ca6f0664ac5044671591ed62618175a7393f))
+* Ignore unknown fields in rest streaming. ([#651](https://github.com/googleapis/python-api-core/issues/651)) ([1203fb9](https://github.com/googleapis/python-api-core/commit/1203fb97d2685535f89113e944c4764c1deb595e))
+
+## [2.19.0](https://github.com/googleapis/python-api-core/compare/v2.18.0...v2.19.0) (2024-04-29)
+
+
+### Features
+
+* Add google.api_core.version_header ([#638](https://github.com/googleapis/python-api-core/issues/638)) ([a7b53e9](https://github.com/googleapis/python-api-core/commit/a7b53e9e9a7deb88baf92a2827958429e3677069))
+
+## [2.18.0](https://github.com/googleapis/python-api-core/compare/v2.17.1...v2.18.0) (2024-03-20)
+
+
+### Features
+
+* Add common logic for supporting universe domain ([#621](https://github.com/googleapis/python-api-core/issues/621)) ([94f2ca3](https://github.com/googleapis/python-api-core/commit/94f2ca3b4d094e6e10154634d3463d07ebea2035))
+
+
+### Bug Fixes
+
+* Add _registered_method to grpc ChannelStub ([#614](https://github.com/googleapis/python-api-core/issues/614)) ([5eaaea8](https://github.com/googleapis/python-api-core/commit/5eaaea8a989f8bdbdb5fbc95a155a20837c87f42))
+* **deps:** Require proto-plus >= 1.22.3 ([#626](https://github.com/googleapis/python-api-core/issues/626)) ([4fed37c](https://github.com/googleapis/python-api-core/commit/4fed37cbc32122f156e38250b5fa8b2b08a787a1))
+
+## [2.17.1](https://github.com/googleapis/python-api-core/compare/v2.17.0...v2.17.1) (2024-02-13)
+
+
+### Bug Fixes
+
+* Resolve issue handling protobuf responses in rest streaming ([#604](https://github.com/googleapis/python-api-core/issues/604)) ([bcebc92](https://github.com/googleapis/python-api-core/commit/bcebc92eca69dae81c5e546d526c92b164a6b3b4))
+
+## [2.17.0](https://github.com/googleapis/python-api-core/compare/v2.16.2...v2.17.0) (2024-02-06)
+
+
+### Features
+
+* Add attempt_direct_path argument to create_channel ([#583](https://github.com/googleapis/python-api-core/issues/583)) ([94726e7](https://github.com/googleapis/python-api-core/commit/94726e739698035b00667983f854c600252abd28))
+
+
+### Bug Fixes
+
+* Retry constructors methods support None ([#592](https://github.com/googleapis/python-api-core/issues/592)) ([416203c](https://github.com/googleapis/python-api-core/commit/416203c1888934670bfeccafe5f5469f87314512))
+
+## [2.16.2](https://github.com/googleapis/python-api-core/compare/v2.16.1...v2.16.2) (2024-02-02)
+
+
+### Bug Fixes
+
+* Spelling error `a,out` -> `amount` ([#596](https://github.com/googleapis/python-api-core/issues/596)) ([88688b1](https://github.com/googleapis/python-api-core/commit/88688b1625c4dab0df6124a0560f550eb322500f))
+
+## [2.16.1](https://github.com/googleapis/python-api-core/compare/v2.16.0...v2.16.1) (2024-01-30)
+
+
+### Bug Fixes
+
+* Fix broken import for google.api_core.retry_async.AsyncRetry ([#587](https://github.com/googleapis/python-api-core/issues/587)) ([ac012c0](https://github.com/googleapis/python-api-core/commit/ac012c04c69b8bbe72962f0d0d9e9536c0b4a524))
+
+## [2.16.0](https://github.com/googleapis/python-api-core/compare/v2.15.0...v2.16.0) (2024-01-29)
+
+
+### Features
+
+* Retry and retry_async support streaming rpcs ([#495](https://github.com/googleapis/python-api-core/issues/495)) ([17ff5f1](https://github.com/googleapis/python-api-core/commit/17ff5f1d83a9a6f50a0226fb0e794634bd584f17))
+
+## [2.15.0](https://github.com/googleapis/python-api-core/compare/v2.14.0...v2.15.0) (2023-12-07)
+
+
+### Features
+
+* Add support for Python 3.12 ([#557](https://github.com/googleapis/python-api-core/issues/557)) ([091b4f1](https://github.com/googleapis/python-api-core/commit/091b4f1c7fcc59c3f2a02ee44fd3c30b78423f12))
+* Add type annotations to wrapped grpc calls ([#554](https://github.com/googleapis/python-api-core/issues/554)) ([fc12b40](https://github.com/googleapis/python-api-core/commit/fc12b40bfc6e0c4bb313196e2e3a9c9374ce1c45))
+* Add universe_domain argument to ClientOptions ([3069ef4](https://github.com/googleapis/python-api-core/commit/3069ef4b9123ddb64841cbb7bbb183b53d502e0a))
+* Introduce compatibility with native namespace packages ([#561](https://github.com/googleapis/python-api-core/issues/561)) ([bd82827](https://github.com/googleapis/python-api-core/commit/bd82827108f1eeb6c05cfacf6c044b2afacc18a2))
+
+
+### Bug Fixes
+
+* Fix regression in `bidi` causing `Thread-ConsumeBidirectionalStream caught unexpected exception and will exit` ([#562](https://github.com/googleapis/python-api-core/issues/562)) ([40c8ae0](https://github.com/googleapis/python-api-core/commit/40c8ae0cf1f797e31e106461164e22db4fb2d3d9))
+* Replace deprecated `datetime.datetime.utcnow()` ([#552](https://github.com/googleapis/python-api-core/issues/552)) ([448923a](https://github.com/googleapis/python-api-core/commit/448923acf277a70e8704c949311bf4feaef8cab6)), closes [#540](https://github.com/googleapis/python-api-core/issues/540)
+
+## [2.14.0](https://github.com/googleapis/python-api-core/compare/v2.13.1...v2.14.0) (2023-11-09)
+
+
+### Features
+
+* Support with_call for wrapped rpcs ([#550](https://github.com/googleapis/python-api-core/issues/550)) ([01a57a7](https://github.com/googleapis/python-api-core/commit/01a57a745f4c8345c9c93412c27dd416b49f5953))
+
+## [2.13.1](https://github.com/googleapis/python-api-core/compare/v2.13.0...v2.13.1) (2023-11-09)
+
+
+### Bug Fixes
+
+* Update async client to use async retry ([#544](https://github.com/googleapis/python-api-core/issues/544)) ([f21bb32](https://github.com/googleapis/python-api-core/commit/f21bb32b8e6310116a642a6e6b6dd8e44e30e656))
+
+## [2.13.0](https://github.com/googleapis/python-api-core/compare/v2.12.0...v2.13.0) (2023-11-03)
+
+
+### Features
+
+* Add caching to routing header calculation ([#526](https://github.com/googleapis/python-api-core/issues/526)) ([6251eab](https://github.com/googleapis/python-api-core/commit/6251eab3fca5f7e509cb9b6e476ce1184094b711))
+
+
+### Bug Fixes
+
+* Add warning to retry target to avoid incorrect usage ([#543](https://github.com/googleapis/python-api-core/issues/543)) ([bfb40e6](https://github.com/googleapis/python-api-core/commit/bfb40e6929ef47be7a6464d2f1e0d06595736b8d))
+* Drop usage of distutils ([#541](https://github.com/googleapis/python-api-core/issues/541)) ([4bd9e10](https://github.com/googleapis/python-api-core/commit/4bd9e10f20eea227c88e3e1496010cca6dd8a270))
+* Ensure exception is available when BackgroundConsumer open stream fails ([#357](https://github.com/googleapis/python-api-core/issues/357)) ([405272c](https://github.com/googleapis/python-api-core/commit/405272c05f8c6d20e242c6172b01f78f0fd3bf32))
+
+## [2.12.0](https://github.com/googleapis/python-api-core/compare/v2.11.1...v2.12.0) (2023-09-07)
+
+
+### Features
+
+* Add a little bit of typing to google.api_core.retry ([#453](https://github.com/googleapis/python-api-core/issues/453)) ([2477ab9](https://github.com/googleapis/python-api-core/commit/2477ab9ea5c2e863a493fb7ebebaa429a44ea096))
+* Add grpc Compression argument to channels and methods ([#451](https://github.com/googleapis/python-api-core/issues/451)) ([bdebd63](https://github.com/googleapis/python-api-core/commit/bdebd6331f9c0d3d1a8ceaf274f07d2ed75bfe92))
+
+
+### Documentation
+
+* Fix a typo in google/api_core/page_iterator.py ([#511](https://github.com/googleapis/python-api-core/issues/511)) ([c0ce73c](https://github.com/googleapis/python-api-core/commit/c0ce73c4de53ad694fe36d17408998aa1230398f))
+
+## [2.11.1](https://github.com/googleapis/python-api-core/compare/v2.11.0...v2.11.1) (2023-06-12)
+
+
+### Bug Fixes
+
+* Add actionable errors for GCE long running operations ([#498](https://github.com/googleapis/python-api-core/issues/498)) ([7dfc3a7](https://github.com/googleapis/python-api-core/commit/7dfc3a7a439243f05238a11b68a31720fde1769e))
+* Invalid `dev` version identifiers in `setup.py` ([#505](https://github.com/googleapis/python-api-core/issues/505)) ([8844edb](https://github.com/googleapis/python-api-core/commit/8844edb1e802040810918a12bc9ff89104da38d4))
+
+## [2.11.0](https://github.com/googleapis/python-api-core/compare/v2.10.2...v2.11.0) (2022-11-10)
+
+
+### Features
+
+* Add support for Python 3.11 ([#466](https://github.com/googleapis/python-api-core/issues/466)) ([ff379e3](https://github.com/googleapis/python-api-core/commit/ff379e304c353bcab734e1c4706b74b356a1e932))
+* Allow representing enums with their unqualified symbolic names in headers ([#465](https://github.com/googleapis/python-api-core/issues/465)) ([522b98e](https://github.com/googleapis/python-api-core/commit/522b98ecc1ebd1c2280d3d7c73a02f6e4fb528d4))
+
+
+### Bug Fixes
+
+* Major refactoring of Polling, Retry and Timeout logic ([#462](https://github.com/googleapis/python-api-core/issues/462)) ([434253d](https://github.com/googleapis/python-api-core/commit/434253de16d9efdf984ddb64c409706cda1d5f82))
+* Require google-auth >= 2.14.1 ([#463](https://github.com/googleapis/python-api-core/issues/463)) ([7cc329f](https://github.com/googleapis/python-api-core/commit/7cc329fe1498b0a4285123448e4ea80c6a780d47))
+
+## [2.10.2](https://github.com/googleapis/python-api-core/compare/v2.10.1...v2.10.2) (2022-10-08)
+
+
+### Bug Fixes
+
+* **deps:** Allow protobuf 3.19.5 ([#459](https://github.com/googleapis/python-api-core/issues/459)) ([e949364](https://github.com/googleapis/python-api-core/commit/e949364ce3a2c4c3cdb2658054d4793aa942d999))
+
+## [2.10.1](https://github.com/googleapis/python-api-core/compare/v2.10.0...v2.10.1) (2022-09-14)
+
+
+### Bug Fixes
+
+* Improve transcoding error message ([#442](https://github.com/googleapis/python-api-core/issues/442)) ([538df80](https://github.com/googleapis/python-api-core/commit/538df80ed6d21f43b512a73853935f7a7b9bdf52))
+
+## [2.10.0](https://github.com/googleapis/python-api-core/compare/v2.9.0...v2.10.0) (2022-09-02)
+
+
+### Features
+
+* Add 'strict' to flatten_query_params to lower-case bools ([#433](https://github.com/googleapis/python-api-core/issues/433)) ([83678e9](https://github.com/googleapis/python-api-core/commit/83678e94e1081f9087b19c43f26fad4774184d66))
+
+## [2.9.0](https://github.com/googleapis/python-api-core/compare/v2.8.2...v2.9.0) (2022-09-01)
+
+
+### Features
+
+* Make grpc transcode logic work in terms of protobuf python objects ([#428](https://github.com/googleapis/python-api-core/issues/428)) ([c3ad8ea](https://github.com/googleapis/python-api-core/commit/c3ad8ea67447e3d8a1154d7a9221e116f60d425a))
+
+
+### Bug Fixes
+
+* Require python 3.7+ ([#410](https://github.com/googleapis/python-api-core/issues/410)) ([7ddb8c0](https://github.com/googleapis/python-api-core/commit/7ddb8c00e6be7ab6905a9a802ad1c3063fbfa46c))
+* Restore support for grpcio-gcp ([#418](https://github.com/googleapis/python-api-core/issues/418)) ([8c19609](https://github.com/googleapis/python-api-core/commit/8c19609d6244930bd91fd5f40ef9b5b65584c4a5))
+
+## [2.8.2](https://github.com/googleapis/python-api-core/compare/v2.8.1...v2.8.2) (2022-06-13)
+
+
+### Bug Fixes
+
+* **deps:** allow protobuf < 5.0.0 ([#400](https://github.com/googleapis/python-api-core/issues/400)) ([8f73d2e](https://github.com/googleapis/python-api-core/commit/8f73d2ee2d3af2201f877aa7e2f7361147759dc7))
+* drop support for grpc-gcp ([#401](https://github.com/googleapis/python-api-core/issues/401)) ([5da6733](https://github.com/googleapis/python-api-core/commit/5da6733a475c436efc11b14889af73b3a0e20379))
+
+
+### Documentation
+
+* fix changelog header to consistent size ([#394](https://github.com/googleapis/python-api-core/issues/394)) ([ac266e9](https://github.com/googleapis/python-api-core/commit/ac266e935bc4e7c6dff250384407e7a60d8dba90))
+* Fix typo in the BackgroundConsumer docstring ([#395](https://github.com/googleapis/python-api-core/issues/395)) ([0eb727f](https://github.com/googleapis/python-api-core/commit/0eb727f92314db3c4383754514f75a49ba02e27b))
+
+## [2.8.1](https://github.com/googleapis/python-api-core/compare/v2.8.0...v2.8.1) (2022-05-26)
+
+
+### Bug Fixes
+
+* **deps:** require googleapis-common-protos >= 1.56.2 ([d84d66c](https://github.com/googleapis/python-api-core/commit/d84d66c2a4107f5f9a20c53e870a27fb1250ea3d))
+* **deps:** require protobuf>= 3.15.0, <4.0.0dev ([#385](https://github.com/googleapis/python-api-core/issues/385)) ([d84d66c](https://github.com/googleapis/python-api-core/commit/d84d66c2a4107f5f9a20c53e870a27fb1250ea3d))
+
+## [2.8.0](https://github.com/googleapis/python-api-core/compare/v2.7.3...v2.8.0) (2022-05-18)
+
+
+### Features
+
+* adds support for audience in client_options ([#379](https://github.com/googleapis/python-api-core/issues/379)) ([c97c498](https://github.com/googleapis/python-api-core/commit/c97c4980125a86f384cdf12720df7bb1a2adf9d2))
+* adds support for audience in client_options. ([c97c498](https://github.com/googleapis/python-api-core/commit/c97c4980125a86f384cdf12720df7bb1a2adf9d2))
+
+## [2.7.3](https://github.com/googleapis/python-api-core/compare/v2.7.2...v2.7.3) (2022-04-29)
+
+
+### Bug Fixes
+
+* Avoid AttributeError if grpcio-status is not installed ([#370](https://github.com/googleapis/python-api-core/issues/370)) ([022add1](https://github.com/googleapis/python-api-core/commit/022add16266f9c07f0f88eea13472cc2e0bfc991))
+
+## [2.7.2](https://github.com/googleapis/python-api-core/compare/v2.7.1...v2.7.2) (2022-04-13)
+
+
+### Bug Fixes
+
+* allow grpc without grpcio-status ([#355](https://github.com/googleapis/python-api-core/issues/355)) ([112049e](https://github.com/googleapis/python-api-core/commit/112049e79f5a5b0a989d85d438a1bd29485f46f7))
+* remove dependency on pkg_resources ([#361](https://github.com/googleapis/python-api-core/issues/361)) ([523dbd0](https://github.com/googleapis/python-api-core/commit/523dbd0b10d37ffcf83fa751f0bad313f162abf1))
+
+## [2.7.1](https://github.com/googleapis/python-api-core/compare/v2.7.0...v2.7.1) (2022-03-09)
+
+
+### Bug Fixes
+
+* add more context to error message. ([#340](https://github.com/googleapis/python-api-core/issues/340)) ([0680fb4](https://github.com/googleapis/python-api-core/commit/0680fb4d3e013fe2de27e0a2ae2cd9896479e596))
+
+## [2.7.0](https://github.com/googleapis/python-api-core/compare/v2.6.1...v2.7.0) (2022-03-08)
+
+
+### Features
+
+* expose extra fields in ExtendedOperation ([#351](https://github.com/googleapis/python-api-core/issues/351)) ([9abc6f4](https://github.com/googleapis/python-api-core/commit/9abc6f48f23c87b9771dca3c96b4f6af39620a50))
+
+## [2.6.1](https://github.com/googleapis/python-api-core/compare/v2.6.0...v2.6.1) (2022-03-05)
+
+
+### Bug Fixes
+
+* Remove py2 tag from wheel ([#343](https://github.com/googleapis/python-api-core/issues/343)) ([7e21e9e](https://github.com/googleapis/python-api-core/commit/7e21e9e34892472a34f9b44175fa761f0e3fd9ed))
+
+## [2.6.0](https://github.com/googleapis/python-api-core/compare/v2.5.0...v2.6.0) (2022-03-03)
+
+
+### Features
+
+* initial support for Extended Operations ([#344](https://github.com/googleapis/python-api-core/issues/344)) ([021bb7d](https://github.com/googleapis/python-api-core/commit/021bb7d5bf0a1d8ac58dbf0c738fac309135ba7d))
+
+## [2.5.0](https://github.com/googleapis/python-api-core/compare/v2.4.0...v2.5.0) (2022-02-02)
+
+
+### Features
+
+* add api_key to client options ([#248](https://github.com/googleapis/python-api-core/issues/248)) ([5e5ad37](https://github.com/googleapis/python-api-core/commit/5e5ad37b8161109d65b0fab43636f7424e570fa3))
+
+
+### Bug Fixes
+
+* **deps:** remove setuptools from dependencies ([#339](https://github.com/googleapis/python-api-core/issues/339)) ([c782f29](https://github.com/googleapis/python-api-core/commit/c782f294b50b078f01959627fb82aa4c5efec333))
+
+
+### Documentation
+
+* fix typo in library name ([#332](https://github.com/googleapis/python-api-core/issues/332)) ([f267111](https://github.com/googleapis/python-api-core/commit/f267111823545a6c67ef5f10b85cd8c2fab8a612))
+
+## [2.4.0](https://www.github.com/googleapis/python-api-core/compare/v2.3.2...v2.4.0) (2022-01-11)
+
+
+### Features
+
+* add support for 'error_info' ([#315](https://www.github.com/googleapis/python-api-core/issues/315)) ([cc46aa6](https://www.github.com/googleapis/python-api-core/commit/cc46aa68ec184871330d16a6c767f57a4f0eb633))
+* iterator for processing JSON responses in REST streaming. ([#317](https://www.github.com/googleapis/python-api-core/issues/317)) ([f9f2696](https://www.github.com/googleapis/python-api-core/commit/f9f26969842b456ea372bed941d712b7a9ab7239))
+
+## [2.3.2](https://www.github.com/googleapis/python-api-core/compare/v2.3.1...v2.3.2) (2021-12-16)
+
+
+### Bug Fixes
+
+* address broken wheels in version 2.3.1
+
+## [2.3.1](https://www.github.com/googleapis/python-api-core/compare/v2.3.0...v2.3.1) (2021-12-15)
+
+
+### Bug Fixes
+* exclude function target from retry deadline exceeded exception message ([#318](https://www.github.com/googleapis/python-api-core/issues/318)) ([34ebdcc](https://www.github.com/googleapis/python-api-core/commit/34ebdcc251d4f3d7d496e8e0b78847645a06650b))
+
## [2.3.0](https://www.github.com/googleapis/python-api-core/compare/v2.2.2...v2.3.0) (2021-11-25)
@@ -16,14 +379,14 @@
* handle bare 'grpc.Call' in 'from_grpc_error' ([#298](https://www.github.com/googleapis/python-api-core/issues/298)) ([060b339](https://www.github.com/googleapis/python-api-core/commit/060b339e3af296dd1772bfc1b4a0d2b4264cae1f))
-### [2.2.2](https://www.github.com/googleapis/python-api-core/compare/v2.2.1...v2.2.2) (2021-11-02)
+## [2.2.2](https://www.github.com/googleapis/python-api-core/compare/v2.2.1...v2.2.2) (2021-11-02)
### Bug Fixes
* make 'gapic_v1.method.DEFAULT' a typed object ([#292](https://www.github.com/googleapis/python-api-core/issues/292)) ([ffc51f0](https://www.github.com/googleapis/python-api-core/commit/ffc51f03c7ce5d9f009ba859b8df385d52925578))
-### [2.2.1](https://www.github.com/googleapis/python-api-core/compare/v2.2.0...v2.2.1) (2021-10-26)
+## [2.2.1](https://www.github.com/googleapis/python-api-core/compare/v2.2.0...v2.2.1) (2021-10-26)
### Bug Fixes
@@ -37,7 +400,7 @@
* add 'GoogleAPICallError.error_details' property ([#286](https://www.github.com/googleapis/python-api-core/issues/286)) ([ef6f0fc](https://www.github.com/googleapis/python-api-core/commit/ef6f0fcfdfe771172056e35e3c990998b3b00416))
-### [2.1.1](https://www.github.com/googleapis/python-api-core/compare/v2.1.0...v2.1.1) (2021-10-13)
+## [2.1.1](https://www.github.com/googleapis/python-api-core/compare/v2.1.0...v2.1.1) (2021-10-13)
### Bug Fixes
@@ -53,7 +416,7 @@
* Add helper function to format query_params for rest transport. ([#275](https://www.github.com/googleapis/python-api-core/issues/275)) ([1c5eb4d](https://www.github.com/googleapis/python-api-core/commit/1c5eb4df93d78e791082d9282330ebf0faacd222))
* add support for Python 3.10 ([#284](https://www.github.com/googleapis/python-api-core/issues/284)) ([a422a5d](https://www.github.com/googleapis/python-api-core/commit/a422a5d72cb6f363d57e7a4effe421ba8e049cde))
-### [2.0.1](https://www.github.com/googleapis/python-api-core/compare/v2.0.0...v2.0.1) (2021-08-31)
+## [2.0.1](https://www.github.com/googleapis/python-api-core/compare/v2.0.0...v2.0.1) (2021-08-31)
### Bug Fixes
@@ -82,7 +445,7 @@
* strip trailing _ from field mask paths ([#228](https://www.github.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416))
-### [1.31.1](https://www.github.com/googleapis/python-api-core/compare/v1.31.0...v1.31.1) (2021-07-26)
+## [1.31.1](https://www.github.com/googleapis/python-api-core/compare/v1.31.0...v1.31.1) (2021-07-26)
### Bug Fixes
@@ -145,7 +508,7 @@
* Add support for `rest/` token in `x-goog-api-client` header ([#189](https://www.github.com/googleapis/python-api-core/issues/189)) ([15aca6b](https://www.github.com/googleapis/python-api-core/commit/15aca6b288b2ec5ce0251e442e1dfa7f52e1b124))
* retry google.auth TransportError and requests ConnectionError ([#178](https://www.github.com/googleapis/python-api-core/issues/178)) ([6ae04a8](https://www.github.com/googleapis/python-api-core/commit/6ae04a8d134fffe13f06081e15f9723c1b2ea334))
-### [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25)
+## [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25)
### Bug Fixes
@@ -157,14 +520,14 @@
* update python contributing guide ([#147](https://www.github.com/googleapis/python-api-core/issues/147)) ([1d76b57](https://www.github.com/googleapis/python-api-core/commit/1d76b57d1f218f7885f85dc7c052bad1ad3857ac))
-### [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23)
+## [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23)
### Bug Fixes
* save empty IAM policy bindings ([#155](https://www.github.com/googleapis/python-api-core/issues/155)) ([536c2ca](https://www.github.com/googleapis/python-api-core/commit/536c2cad814b8fa8cd346a3d7bd5f6b9889c4a6f))
-### [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12)
+## [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12)
### Bug Fixes
@@ -178,7 +541,7 @@
* allow default_host and default_scopes to be passed to create_channel ([#134](https://www.github.com/googleapis/python-api-core/issues/134)) ([94c76e0](https://www.github.com/googleapis/python-api-core/commit/94c76e0873e5b2f42331d5b1ad286c1e63b61395))
-### [1.25.1](https://www.github.com/googleapis/python-api-core/compare/v1.25.0...v1.25.1) (2021-01-25)
+## [1.25.1](https://www.github.com/googleapis/python-api-core/compare/v1.25.0...v1.25.1) (2021-01-25)
### Bug Fixes
@@ -202,7 +565,7 @@
* **python:** document adding Python 3.9 support, dropping 3.5 support ([#120](https://www.github.com/googleapis/python-api-core/issues/120)) ([b51b7f5](https://www.github.com/googleapis/python-api-core/commit/b51b7f587042fe9340371c1b5c8e9adf8001c43a)), closes [#787](https://www.github.com/googleapis/python-api-core/issues/787)
-### [1.24.1](https://www.github.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1) (2020-12-16)
+## [1.24.1](https://www.github.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1) (2020-12-16)
### Bug Fixes
@@ -235,28 +598,28 @@
* harden install to use full paths, and windows separators on windows ([#88](https://www.github.com/googleapis/python-api-core/issues/88)) ([db8e636](https://www.github.com/googleapis/python-api-core/commit/db8e636f545a8872f959e3f403cfec30ffed6c34))
* update out-of-date comment in exceptions.py ([#93](https://www.github.com/googleapis/python-api-core/issues/93)) ([70ebe42](https://www.github.com/googleapis/python-api-core/commit/70ebe42601b3d088b3421233ef7d8245229b7265))
-### [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05)
+## [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05)
### Bug Fixes
* use version.py instead of pkg_resources.get_distribution ([#80](https://www.github.com/googleapis/python-api-core/issues/80)) ([d480d97](https://www.github.com/googleapis/python-api-core/commit/d480d97e41cd6705325b3b649360553a83c23f47))
-### [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02)
+## [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02)
### Bug Fixes
* **deps:** require six >= 1.13.0 ([#78](https://www.github.com/googleapis/python-api-core/issues/78)) ([a7a8b98](https://www.github.com/googleapis/python-api-core/commit/a7a8b98602a3eb277fdc607ac69f3bcb147f3351)), closes [/github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES#L30-L31](https://www.github.com/googleapis//github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES/issues/L30-L31)
-### [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03)
+## [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03)
### Bug Fixes
* only add quota project id if supported ([#75](https://www.github.com/googleapis/python-api-core/issues/75)) ([8f8ee78](https://www.github.com/googleapis/python-api-core/commit/8f8ee7879e4f834f3c676e535ffc41b5b9b2de62))
-### [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12)
+## [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12)
### Documentation
@@ -287,7 +650,7 @@
* allow credentials files to be passed for channel creation ([#50](https://www.github.com/googleapis/python-api-core/issues/50)) ([ded92d0](https://www.github.com/googleapis/python-api-core/commit/ded92d0acdcde4295d0e5df05fda0d83783a3991))
-### [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16)
+## [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16)
### Bug Fixes
@@ -301,7 +664,7 @@
* allow disabling response stream pre-fetch ([#30](https://www.github.com/googleapis/python-api-core/issues/30)) ([74e0b0f](https://www.github.com/googleapis/python-api-core/commit/74e0b0f8387207933c120af15b2bb5d175dd8f84)), closes [#25](https://www.github.com/googleapis/python-api-core/issues/25)
-### [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06)
+## [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06)
### Bug Fixes
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 6b375f0..1a1f608 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -21,7 +21,7 @@
documentation.
- The feature must work fully on the following CPython versions:
- 3.6, 3.7, 3.8, 3.9, and 3.10 on both UNIX and Windows.
+ 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -71,7 +71,7 @@
- To run a single unit test::
- $ nox -s unit-3.10 -- -k <name of test>
+ $ nox -s unit-3.13 -- -k <name of test>
.. note::
@@ -197,17 +197,21 @@
We support:
-- `Python 3.6`_
- `Python 3.7`_
- `Python 3.8`_
- `Python 3.9`_
- `Python 3.10`_
+- `Python 3.11`_
+- `Python 3.12`_
+- `Python 3.13`_
-.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
.. _Python 3.9: https://docs.python.org/3.9/
.. _Python 3.10: https://docs.python.org/3.10/
+.. _Python 3.11: https://docs.python.org/3.11/
+.. _Python 3.12: https://docs.python.org/3.12/
+.. _Python 3.13: https://docs.python.org/3.13/
Supported versions can be found in our ``noxfile.py`` `config`_.
@@ -215,18 +219,6 @@
.. _config: https://github.com/googleapis/python-api-core/blob/main/noxfile.py
-We also explicitly decided to support Python 3 beginning with version 3.6.
-Reasons for this include:
-
-- Encouraging use of newest versions of Python 3
-- Taking the lead of `prominent`_ open-source `projects`_
-- `Unicode literal support`_ which allows for a cleaner codebase that
- works in both Python 2 and Python 3
-
-.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django
-.. _projects: http://flask.pocoo.org/docs/0.10/python3/
-.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/
-
**********
Versioning
**********
diff --git a/MANIFEST.in b/MANIFEST.in
index e783f4c..d6814cd 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/METADATA b/METADATA
index cfe802c..559918d 100644
--- a/METADATA
+++ b/METADATA
@@ -1,18 +1,20 @@
-name: "python-api-core"
-description:
- "This library is not meant to stand-alone. Instead it defines common "
- "helpers used by all Google API clients."
+# This project was upgraded with external_updater.
+# Usage: tools/external_updater/updater.sh update external/python/python-api-core
+# For more info, check https://cs.android.com/android/platform/superproject/main/+/main:tools/external_updater/README.md
+name: "python-api-core"
+description: "This library is not meant to stand-alone. Instead it defines common helpers used by all Google API clients."
third_party {
- url {
- type: HOMEPAGE
- value: "https://pypi.org/project/google-api-core/"
- }
- url {
- type: GIT
- value: "https://github.com/googleapis/python-api-core"
- }
- version: "v2.3.0"
- last_upgrade_date { year: 2022 month: 1 day: 4 }
license_type: NOTICE
+ last_upgrade_date {
+ year: 2025
+ month: 1
+ day: 29
+ }
+ homepage: "https://pypi.org/project/google-api-core/"
+ identifier {
+ type: "Git"
+ value: "https://github.com/googleapis/python-api-core"
+ version: "v2.24.1"
+ }
}
diff --git a/README.rst b/README.rst
index d94f3e8..58ae26c 100644
--- a/README.rst
+++ b/README.rst
@@ -16,13 +16,16 @@
Supported Python Versions
-------------------------
-Python >= 3.6
+Python >= 3.7
Unsupported Python Versions
---------------------------
-Python == 2.7, Python == 3.5.
+Python == 2.7, Python == 3.5, Python == 3.6.
The last version of this library compatible with Python 2.7 and 3.5 is
-`google-api_core==1.31.1`.
+`google-api-core==1.31.1`.
+
+The last version of this library compatible with Python 3.6 is
+`google-api-core==2.8.2`.
diff --git a/docs/auth.rst b/docs/auth.rst
index a9b296d..3dcc5fd 100644
--- a/docs/auth.rst
+++ b/docs/auth.rst
@@ -165,7 +165,7 @@
getting started with the ``google-cloud-*`` library.
The simplest way to use credentials from a user account is via
-Application Default Credentials using ``gcloud auth login``
+Application Default Credentials using ``gcloud auth application-default login``
(as mentioned above) and :func:`google.auth.default`:
.. code:: python
diff --git a/docs/conf.py b/docs/conf.py
index 09f0c2b..ad4723c 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2021 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -314,7 +314,13 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- (root_doc, "google-api-core", "google-api-core Documentation", [author], 1,)
+ (
+ root_doc,
+ "google-api-core",
+ "google-api-core Documentation",
+ [author],
+ 1,
+ )
]
# If true, show URL addresses after external links.
@@ -355,7 +361,10 @@
intersphinx_mapping = {
"python": ("https://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
+ "google.api_core": (
+ "https://googleapis.dev/python/google-api-core/latest/",
+ None,
+ ),
"grpc": ("https://grpc.github.io/grpc/python/", None),
"proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
"protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
diff --git a/docs/retry.rst b/docs/retry.rst
index 97a7f2c..6e165f5 100644
--- a/docs/retry.rst
+++ b/docs/retry.rst
@@ -10,4 +10,5 @@
.. automodule:: google.api_core.retry_async
:members:
+ :noindex:
:show-inheritance:
diff --git a/google/api_core/__init__.py b/google/api_core/__init__.py
index 605dd8b..b80ea37 100644
--- a/google/api_core/__init__.py
+++ b/google/api_core/__init__.py
@@ -14,7 +14,7 @@
"""Google API Core.
-This package contains common code and utilties used by Google client libraries.
+This package contains common code and utilities used by Google client libraries.
"""
from google.api_core import version as api_core_version
diff --git a/google/api_core/_rest_streaming_base.py b/google/api_core/_rest_streaming_base.py
new file mode 100644
index 0000000..3bc87a9
--- /dev/null
+++ b/google/api_core/_rest_streaming_base.py
@@ -0,0 +1,118 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for server-side streaming in REST."""
+
+from collections import deque
+import string
+from typing import Deque, Union
+import types
+
+import proto
+import google.protobuf.message
+from google.protobuf.json_format import Parse
+
+
+class BaseResponseIterator:
+ """Base Iterator over REST API responses. This class should not be used directly.
+
+ Args:
+ response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response
+ class expected to be returned from an API.
+
+ Raises:
+ ValueError: If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`.
+ """
+
+ def __init__(
+ self,
+ response_message_cls: Union[proto.Message, google.protobuf.message.Message],
+ ):
+ self._response_message_cls = response_message_cls
+ # Contains a list of JSON responses ready to be sent to user.
+ self._ready_objs: Deque[str] = deque()
+ # Current JSON response being built.
+ self._obj = ""
+ # Keeps track of the nesting level within a JSON object.
+ self._level = 0
+ # Keeps track whether HTTP response is currently sending values
+ # inside of a string value.
+ self._in_string = False
+ # Whether an escape symbol "\" was encountered.
+ self._escape_next = False
+
+ self._grab = types.MethodType(self._create_grab(), self)
+
+ def _process_chunk(self, chunk: str):
+ if self._level == 0:
+ if chunk[0] != "[":
+ raise ValueError(
+ "Can only parse array of JSON objects, instead got %s" % chunk
+ )
+ for char in chunk:
+ if char == "{":
+ if self._level == 1:
+ # Level 1 corresponds to the outermost JSON object
+ # (i.e. the one we care about).
+ self._obj = ""
+ if not self._in_string:
+ self._level += 1
+ self._obj += char
+ elif char == "}":
+ self._obj += char
+ if not self._in_string:
+ self._level -= 1
+ if not self._in_string and self._level == 1:
+ self._ready_objs.append(self._obj)
+ elif char == '"':
+ # Helps to deal with an escaped quotes inside of a string.
+ if not self._escape_next:
+ self._in_string = not self._in_string
+ self._obj += char
+ elif char in string.whitespace:
+ if self._in_string:
+ self._obj += char
+ elif char == "[":
+ if self._level == 0:
+ self._level += 1
+ else:
+ self._obj += char
+ elif char == "]":
+ if self._level == 1:
+ self._level -= 1
+ else:
+ self._obj += char
+ else:
+ self._obj += char
+ self._escape_next = not self._escape_next if char == "\\" else False
+
+ def _create_grab(self):
+ if issubclass(self._response_message_cls, proto.Message):
+
+ def grab(this):
+ return this._response_message_cls.from_json(
+ this._ready_objs.popleft(), ignore_unknown_fields=True
+ )
+
+ return grab
+ elif issubclass(self._response_message_cls, google.protobuf.message.Message):
+
+ def grab(this):
+ return Parse(this._ready_objs.popleft(), this._response_message_cls())
+
+ return grab
+ else:
+ raise ValueError(
+ "Response message class must be a subclass of proto.Message or google.protobuf.message.Message."
+ )
diff --git a/google/api_core/bidi.py b/google/api_core/bidi.py
index 4b4963f..4e800c8 100644
--- a/google/api_core/bidi.py
+++ b/google/api_core/bidi.py
@@ -91,11 +91,9 @@
def _is_active(self):
# Note: there is a possibility that this starts *before* the call
# property is set. So we have to check if self.call is set before
- # seeing if it's active.
- if self.call is not None and not self.call.is_active():
- return False
- else:
- return True
+ # seeing if it's active. We need to return True if self.call is None.
+ # See https://github.com/googleapis/python-api-core/issues/560.
+ return self.call is None or self.call.is_active()
def __iter__(self):
if self._initial_request is not None:
@@ -265,6 +263,10 @@
self._callbacks.append(callback)
def _on_call_done(self, future):
+ # This occurs when the RPC errors or is successfully terminated.
+ # Note that grpc's "future" here can also be a grpc.RpcError.
+ # See note in https://github.com/grpc/grpc/issues/10885#issuecomment-302651331
+ # that `grpc.RpcError` is also `grpc.call`.
for callback in self._callbacks:
callback(future)
@@ -276,7 +278,13 @@
request_generator = _RequestQueueGenerator(
self._request_queue, initial_request=self._initial_request
)
- call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata)
+ try:
+ call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata)
+ except exceptions.GoogleAPICallError as exc:
+ # The original `grpc.RpcError` (which is usually also a `grpc.Call`) is
+ # available from the ``response`` property on the mapped exception.
+ self._on_call_done(exc.response)
+ raise
request_generator.call = call
@@ -298,6 +306,8 @@
self._request_queue.put(None)
self.call.cancel()
self._request_generator = None
+ self._initial_request = None
+ self._callbacks = []
# Don't set self.call to None. Keep it around so that send/recv can
# raise the error.
@@ -364,7 +374,7 @@
def should_recover(exc):
return (
isinstance(exc, grpc.RpcError) and
- exc.code() == grpc.StatusCode.UNVAILABLE)
+ exc.code() == grpc.StatusCode.UNAVAILABLE)
initial_request = example_pb2.StreamingRpcRequest(
setting='example')
@@ -589,7 +599,7 @@
def should_recover(exc):
return (
isinstance(exc, grpc.RpcError) and
- exc.code() == grpc.StatusCode.UNVAILABLE)
+ exc.code() == grpc.StatusCode.UNAVAILABLE)
initial_request = example_pb2.StreamingRpcRequest(
setting='example')
@@ -709,6 +719,7 @@
_LOGGER.warning("Background thread did not exit.")
self._thread = None
+ self._on_response = None
@property
def is_active(self):
diff --git a/google/api_core/client_info.py b/google/api_core/client_info.py
index e093ffd..90926be 100644
--- a/google/api_core/client_info.py
+++ b/google/api_core/client_info.py
@@ -21,8 +21,6 @@
import platform
from typing import Union
-import pkg_resources
-
from google.api_core import version as api_core_version
_PY_VERSION = platform.python_version()
@@ -31,8 +29,10 @@
_GRPC_VERSION: Union[str, None]
try:
- _GRPC_VERSION = pkg_resources.get_distribution("grpcio").version
-except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ import grpc
+
+ _GRPC_VERSION = grpc.__version__
+except ImportError: # pragma: NO COVER
_GRPC_VERSION = None
@@ -48,7 +48,7 @@
``'3.9.6'``.
grpc_version (Optional[str]): The gRPC library version.
api_core_version (str): The google-api-core library version.
- gapic_version (Optional[str]): The sversion of gapic-generated client
+ gapic_version (Optional[str]): The version of gapic-generated client
library, if the library was generated by gapic.
client_library_version (Optional[str]): The version of the client
library, generally used if the client library was not generated
@@ -57,7 +57,8 @@
user_agent (Optional[str]): Prefix to the user agent header. This is
used to supply information such as application name or partner tool.
Recommended format: ``application-or-tool-ID/major.minor.version``.
- rest_version (Optional[str]): The requests library version.
+ rest_version (Optional[str]): A string with labeled versions of the
+ dependencies used for REST transport.
"""
def __init__(
diff --git a/google/api_core/client_logging.py b/google/api_core/client_logging.py
new file mode 100644
index 0000000..837e3e0
--- /dev/null
+++ b/google/api_core/client_logging.py
@@ -0,0 +1,144 @@
+import logging
+import json
+import os
+
+from typing import List, Optional
+
+_LOGGING_INITIALIZED = False
+_BASE_LOGGER_NAME = "google"
+
+# Fields to be included in the StructuredLogFormatter.
+#
+# TODO(https://github.com/googleapis/python-api-core/issues/761): Update this list to support additional logging fields.
+_recognized_logging_fields = [
+ "httpRequest",
+ "rpcName",
+ "serviceName",
+ "credentialsType",
+ "credentialsInfo",
+ "universeDomain",
+ "request",
+ "response",
+ "metadata",
+ "retryAttempt",
+ "httpResponse",
+] # Additional fields to be Logged.
+
+
+def logger_configured(logger) -> bool:
+ """Determines whether `logger` has non-default configuration
+
+ Args:
+ logger: The logger to check.
+
+ Returns:
+ bool: Whether the logger has any non-default configuration.
+ """
+ return (
+ logger.handlers != [] or logger.level != logging.NOTSET or not logger.propagate
+ )
+
+
+def initialize_logging():
+ """Initializes "google" loggers, partly based on the environment variable
+
+ Initializes the "google" logger and any loggers (at the "google"
+ level or lower) specified by the environment variable
+ GOOGLE_SDK_PYTHON_LOGGING_SCOPE, as long as none of these loggers
+ were previously configured. If any such loggers (including the
+ "google" logger) are initialized, they are set to NOT propagate
+ log events up to their parent loggers.
+
+ This initialization is executed only once, and hence the
+ environment variable is only processed the first time this
+ function is called.
+ """
+ global _LOGGING_INITIALIZED
+ if _LOGGING_INITIALIZED:
+ return
+ scopes = os.getenv("GOOGLE_SDK_PYTHON_LOGGING_SCOPE", "")
+ setup_logging(scopes)
+ _LOGGING_INITIALIZED = True
+
+
+def parse_logging_scopes(scopes: Optional[str] = None) -> List[str]:
+ """Returns a list of logger names.
+
+ Splits the single string of comma-separated logger names into a list of individual logger name strings.
+
+ Args:
+ scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.)
+
+ Returns:
+ A list of all the logger names in scopes.
+ """
+ if not scopes:
+ return []
+ # TODO(https://github.com/googleapis/python-api-core/issues/759): check if the namespace is a valid namespace.
+ # TODO(b/380481951): Support logging multiple scopes.
+ # TODO(b/380483756): Raise or log a warning for an invalid scope.
+ namespaces = [scopes]
+ return namespaces
+
+
+def configure_defaults(logger):
+ """Configures `logger` to emit structured info to stdout."""
+ if not logger_configured(logger):
+ console_handler = logging.StreamHandler()
+ logger.setLevel("DEBUG")
+ logger.propagate = False
+ formatter = StructuredLogFormatter()
+ console_handler.setFormatter(formatter)
+ logger.addHandler(console_handler)
+
+
+def setup_logging(scopes: str = ""):
+ """Sets up logging for the specified `scopes`.
+
+ If the loggers specified in `scopes` have not been previously
+ configured, this will configure them to emit structured log
+ entries to stdout, and to not propagate their log events to their
+ parent loggers. Additionally, if the "google" logger (whether it
+ was specified in `scopes` or not) was not previously configured,
+ it will also configure it to not propagate log events to the root
+ logger.
+
+ Args:
+ scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.)
+
+ """
+
+ # only returns valid logger scopes (namespaces)
+ # this list has at most one element.
+ logger_names = parse_logging_scopes(scopes)
+
+ for namespace in logger_names:
+ # This will either create a module level logger or get the reference of the base logger instantiated above.
+ logger = logging.getLogger(namespace)
+
+ # Configure default settings.
+ configure_defaults(logger)
+
+ # disable log propagation at base logger level to the root logger only if a base logger is not already configured via code changes.
+ base_logger = logging.getLogger(_BASE_LOGGER_NAME)
+ if not logger_configured(base_logger):
+ base_logger.propagate = False
+
+
+# TODO(https://github.com/googleapis/python-api-core/issues/763): Expand documentation.
+class StructuredLogFormatter(logging.Formatter):
+ # TODO(https://github.com/googleapis/python-api-core/issues/761): ensure that additional fields such as
+ # function name, file name, and line no. appear in a log output.
+ def format(self, record: logging.LogRecord):
+ log_obj = {
+ "timestamp": self.formatTime(record),
+ "severity": record.levelname,
+ "name": record.name,
+ "message": record.getMessage(),
+ }
+
+ for field_name in _recognized_logging_fields:
+ value = getattr(record, field_name, None)
+ if value is not None:
+ log_obj[field_name] = value
+ return json.dumps(log_obj)
diff --git a/google/api_core/client_options.py b/google/api_core/client_options.py
index be5523d..d11665d 100644
--- a/google/api_core/client_options.py
+++ b/google/api_core/client_options.py
@@ -48,6 +48,8 @@
"""
+from typing import Callable, Mapping, Optional, Sequence, Tuple
+
class ClientOptions(object):
"""Client Options used to set options on clients.
@@ -55,49 +57,84 @@
Args:
api_endpoint (Optional[str]): The desired API endpoint, e.g.,
compute.googleapis.com
- client_cert_source (Optional[Callable[[], (bytes, bytes)]]): A callback
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback
which returns client certificate bytes and private key bytes both in
PEM format. ``client_cert_source`` and ``client_encrypted_cert_source``
are mutually exclusive.
- client_encrypted_cert_source (Optional[Callable[[], (str, str, bytes)]]):
+ client_encrypted_cert_source (Optional[Callable[[], Tuple[str, str, bytes]]]):
A callback which returns client certificate file path, encrypted
private key file path, and the passphrase bytes.``client_cert_source``
and ``client_encrypted_cert_source`` are mutually exclusive.
quota_project_id (Optional[str]): A project name that a client's
quota belongs to.
credentials_file (Optional[str]): A path to a file storing credentials.
+ ``credentials_file` and ``api_key`` are mutually exclusive.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
scopes (Optional[Sequence[str]]): OAuth access token override scopes.
+ api_key (Optional[str]): Google API key. ``credentials_file`` and
+ ``api_key`` are mutually exclusive.
+ api_audience (Optional[str]): The intended audience for the API calls
+ to the service that will be set when using certain 3rd party
+ authentication flows. Audience is typically a resource identifier.
+ If not set, the service endpoint value will be used as a default.
+ An example of a valid ``api_audience`` is: "https://language.googleapis.com".
+ universe_domain (Optional[str]): The desired universe domain. This must match
+ the one in credentials. If not set, the default universe domain is
+ `googleapis.com`. If both `api_endpoint` and `universe_domain` are set,
+ then `api_endpoint` is used as the service endpoint. If `api_endpoint` is
+ not specified, the format will be `{service}.{universe_domain}`.
Raises:
ValueError: If both ``client_cert_source`` and ``client_encrypted_cert_source``
- are provided.
+ are provided, or both ``credentials_file`` and ``api_key`` are provided.
"""
def __init__(
self,
- api_endpoint=None,
- client_cert_source=None,
- client_encrypted_cert_source=None,
- quota_project_id=None,
- credentials_file=None,
- scopes=None,
+ api_endpoint: Optional[str] = None,
+ client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ client_encrypted_cert_source: Optional[
+ Callable[[], Tuple[str, str, bytes]]
+ ] = None,
+ quota_project_id: Optional[str] = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ api_key: Optional[str] = None,
+ api_audience: Optional[str] = None,
+ universe_domain: Optional[str] = None,
):
if client_cert_source and client_encrypted_cert_source:
raise ValueError(
"client_cert_source and client_encrypted_cert_source are mutually exclusive"
)
+ if api_key and credentials_file:
+ raise ValueError("api_key and credentials_file are mutually exclusive")
self.api_endpoint = api_endpoint
self.client_cert_source = client_cert_source
self.client_encrypted_cert_source = client_encrypted_cert_source
self.quota_project_id = quota_project_id
self.credentials_file = credentials_file
self.scopes = scopes
+ self.api_key = api_key
+ self.api_audience = api_audience
+ self.universe_domain = universe_domain
- def __repr__(self):
+ def __repr__(self) -> str:
return "ClientOptions: " + repr(self.__dict__)
-def from_dict(options):
+def from_dict(options: Mapping[str, object]) -> ClientOptions:
"""Construct a client options object from a mapping object.
Args:
diff --git a/google/api_core/datetime_helpers.py b/google/api_core/datetime_helpers.py
index 78268ef..c379230 100644
--- a/google/api_core/datetime_helpers.py
+++ b/google/api_core/datetime_helpers.py
@@ -42,7 +42,7 @@
def utcnow():
"""A :meth:`datetime.datetime.utcnow()` alias to allow mocking in tests."""
- return datetime.datetime.utcnow()
+ return datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None)
def to_milliseconds(value):
@@ -151,7 +151,7 @@
micros = 0
else:
scale = 9 - len(fraction)
- nanos = int(fraction) * (10 ** scale)
+ nanos = int(fraction) * (10**scale)
micros = nanos // 1000
return bare_seconds.replace(microsecond=micros, tzinfo=datetime.timezone.utc)
@@ -170,7 +170,7 @@
datetime object is ignored and the datetime is treated as UTC.
Returns:
- str: The RFC3339 formated string representing the datetime.
+ str: The RFC3339 formatted string representing the datetime.
"""
if not ignore_zone and value.tzinfo is not None:
# Convert to UTC and remove the time zone info.
@@ -245,7 +245,7 @@
nanos = 0
else:
scale = 9 - len(fraction)
- nanos = int(fraction) * (10 ** scale)
+ nanos = int(fraction) * (10**scale)
return cls(
bare.year,
bare.month,
diff --git a/google/api_core/exceptions.py b/google/api_core/exceptions.py
index 6b1b6f7..e3eb696 100644
--- a/google/api_core/exceptions.py
+++ b/google/api_core/exceptions.py
@@ -22,17 +22,30 @@
from __future__ import unicode_literals
import http.client
-from typing import Dict
+from typing import Optional, Dict
from typing import Union
+import warnings
from google.rpc import error_details_pb2
+
+def _warn_could_not_import_grpcio_status():
+ warnings.warn(
+ "Please install grpcio-status to obtain helpful grpc error messages.",
+ ImportWarning,
+ ) # pragma: NO COVER
+
+
try:
import grpc
- from grpc_status import rpc_status
+
+ try:
+ from grpc_status import rpc_status
+ except ImportError: # pragma: NO COVER
+ _warn_could_not_import_grpcio_status()
+ rpc_status = None
except ImportError: # pragma: NO COVER
grpc = None
- rpc_status = None
# Lookup tables for mapping exceptions from HTTP and gRPC transports.
# Populated by _GoogleAPICallErrorMeta
@@ -65,7 +78,7 @@
Args:
message (str): The exception message.
- cause (Exception): The last exception raised when retring the
+ cause (Exception): The last exception raised when retrying the
function.
"""
@@ -104,6 +117,8 @@
details (Sequence[Any]): An optional list of objects defined in google.rpc.error_details.
response (Union[requests.Request, grpc.Call]): The response or
gRPC call metadata.
+ error_info (Union[error_details_pb2.ErrorInfo, None]): An optional object containing error info
+ (google.rpc.error_details.ErrorInfo).
"""
code: Union[int, None] = None
@@ -122,19 +137,67 @@
This may be ``None`` if the exception does not match up to a gRPC error.
"""
- def __init__(self, message, errors=(), details=(), response=None):
+ def __init__(self, message, errors=(), details=(), response=None, error_info=None):
super(GoogleAPICallError, self).__init__(message)
self.message = message
"""str: The exception message."""
self._errors = errors
self._details = details
self._response = response
+ self._error_info = error_info
def __str__(self):
+ error_msg = "{} {}".format(self.code, self.message)
if self.details:
- return "{} {} {}".format(self.code, self.message, self.details)
+ error_msg = "{} {}".format(error_msg, self.details)
+ # Note: This else condition can be removed once proposal A from
+ # b/284179390 is implemented.
else:
- return "{} {}".format(self.code, self.message)
+ if self.errors:
+ errors = [
+ f"{error.code}: {error.message}"
+ for error in self.errors
+ if hasattr(error, "code") and hasattr(error, "message")
+ ]
+ if errors:
+ error_msg = "{} {}".format(error_msg, "\n".join(errors))
+ return error_msg
+
+ @property
+ def reason(self):
+ """The reason of the error.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[str, None]: An optional string containing reason of the error.
+ """
+ return self._error_info.reason if self._error_info else None
+
+ @property
+ def domain(self):
+ """The logical grouping to which the "reason" belongs.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[str, None]: An optional string containing a logical grouping to which the "reason" belongs.
+ """
+ return self._error_info.domain if self._error_info else None
+
+ @property
+ def metadata(self):
+ """Additional structured details about this error.
+
+ Reference:
+ https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112
+
+ Returns:
+ Union[Dict[str, str], None]: An optional object containing structured details about the error.
+ """
+ return self._error_info.metadata if self._error_info else None
@property
def errors(self):
@@ -155,7 +218,7 @@
Returns:
Sequence[Any]: A list of structured objects from error_details.proto
- """
+ """
return list(self._details)
@property
@@ -379,6 +442,12 @@
grpc_status_code = grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None
+class AsyncRestUnsupportedParameterError(NotImplementedError):
+ """Raised when an unsupported parameter is configured against async rest transport."""
+
+ pass
+
+
def exception_class_for_http_status(status_code):
"""Return the exception class for a specific HTTP status code.
@@ -413,6 +482,62 @@
return error
+def _format_rest_error_message(error, method, url):
+ method = method.upper() if method else None
+ message = "{method} {url}: {error}".format(
+ method=method,
+ url=url,
+ error=error,
+ )
+ return message
+
+
+# NOTE: We're moving away from `from_http_status` because it expects an aiohttp response compared
+# to `format_http_response_error` which expects a more abstract response from google.auth and is
+# compatible with both sync and async response types.
+# TODO(https://github.com/googleapis/python-api-core/issues/691): Add type hint for response.
+def format_http_response_error(
+ response, method: str, url: str, payload: Optional[Dict] = None
+):
+ """Create a :class:`GoogleAPICallError` from a google auth rest response.
+
+ Args:
+ response Union[google.auth.transport.Response, google.auth.aio.transport.Response]: The HTTP response.
+ method Optional(str): The HTTP request method.
+ url Optional(str): The HTTP request url.
+ payload Optional(dict): The HTTP response payload. If not passed in, it is read from response for a response type of google.auth.transport.Response.
+
+ Returns:
+ GoogleAPICallError: An instance of the appropriate subclass of
+ :class:`GoogleAPICallError`, with the message and errors populated
+ from the response.
+ """
+ payload = {} if not payload else payload
+ error_message = payload.get("error", {}).get("message", "unknown error")
+ errors = payload.get("error", {}).get("errors", ())
+ # In JSON, details are already formatted in developer-friendly way.
+ details = payload.get("error", {}).get("details", ())
+ error_info_list = list(
+ filter(
+ lambda detail: detail.get("@type", "")
+ == "type.googleapis.com/google.rpc.ErrorInfo",
+ details,
+ )
+ )
+ error_info = error_info_list[0] if error_info_list else None
+ message = _format_rest_error_message(error_message, method, url)
+
+ exception = from_http_status(
+ response.status_code,
+ message,
+ errors=errors,
+ details=details,
+ response=response,
+ error_info=error_info,
+ )
+ return exception
+
+
def from_http_response(response):
"""Create a :class:`GoogleAPICallError` from a :class:`requests.Response`.
@@ -428,21 +553,10 @@
payload = response.json()
except ValueError:
payload = {"error": {"message": response.text or "unknown error"}}
-
- error_message = payload.get("error", {}).get("message", "unknown error")
- errors = payload.get("error", {}).get("errors", ())
- # In JSON, details are already formatted in developer-friendly way.
- details = payload.get("error", {}).get("details", ())
-
- message = "{method} {url}: {error}".format(
- method=response.request.method, url=response.request.url, error=error_message
+ return format_http_response_error(
+ response, response.request.method, response.request.url, payload
)
- exception = from_http_status(
- response.status_code, message, errors=errors, details=details, response=response
- )
- return exception
-
def exception_class_for_grpc_status(status_code):
"""Return the exception class for a specific :class:`grpc.StatusCode`.
@@ -487,13 +601,16 @@
def _parse_grpc_error_details(rpc_exc):
+ if not rpc_status: # pragma: NO COVER
+ _warn_could_not_import_grpcio_status()
+ return [], None
try:
status = rpc_status.from_call(rpc_exc)
except NotImplementedError: # workaround
- return []
+ return [], None
if not status:
- return []
+ return [], None
possible_errors = [
error_details_pb2.BadRequest,
@@ -507,6 +624,7 @@
error_details_pb2.Help,
error_details_pb2.LocalizedMessage,
]
+ error_info = None
error_details = []
for detail in status.details:
matched_detail_cls = list(
@@ -519,7 +637,9 @@
info = matched_detail_cls[0]()
detail.Unpack(info)
error_details.append(info)
- return error_details
+ if isinstance(info, error_details_pb2.ErrorInfo):
+ error_info = info
+ return error_details, error_info
def from_grpc_error(rpc_exc):
@@ -534,13 +654,17 @@
"""
# NOTE(lidiz) All gRPC error shares the parent class grpc.RpcError.
# However, check for grpc.RpcError breaks backward compatibility.
- if isinstance(rpc_exc, grpc.Call) or _is_informative_grpc_error(rpc_exc):
+ if (
+ grpc is not None and isinstance(rpc_exc, grpc.Call)
+ ) or _is_informative_grpc_error(rpc_exc):
+ details, err_info = _parse_grpc_error_details(rpc_exc)
return from_grpc_status(
rpc_exc.code(),
rpc_exc.details(),
errors=(rpc_exc,),
- details=_parse_grpc_error_details(rpc_exc),
+ details=details,
response=rpc_exc,
+ error_info=err_info,
)
else:
return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc)
diff --git a/google/api_core/extended_operation.py b/google/api_core/extended_operation.py
new file mode 100644
index 0000000..d474632
--- /dev/null
+++ b/google/api_core/extended_operation.py
@@ -0,0 +1,225 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Futures for extended long-running operations returned from Google Cloud APIs.
+
+These futures can be used to synchronously wait for the result of a
+long-running operations using :meth:`ExtendedOperation.result`:
+
+.. code-block:: python
+
+ extended_operation = my_api_client.long_running_method()
+
+ extended_operation.result()
+
+Or asynchronously using callbacks and :meth:`Operation.add_done_callback`:
+
+.. code-block:: python
+
+ extended_operation = my_api_client.long_running_method()
+
+ def my_callback(ex_op):
+ print(f"Operation {ex_op.name} completed")
+
+ extended_operation.add_done_callback(my_callback)
+
+"""
+
+import threading
+
+from google.api_core import exceptions
+from google.api_core.future import polling
+
+
+class ExtendedOperation(polling.PollingFuture):
+ """An ExtendedOperation future for interacting with a Google API Long-Running Operation.
+
+ Args:
+ extended_operation (proto.Message): The initial operation.
+ refresh (Callable[[], type(extended_operation)]): A callable that returns
+ the latest state of the operation.
+ cancel (Callable[[], None]): A callable that tries to cancel the operation.
+ polling Optional(google.api_core.retry.Retry): The configuration used
+ for polling. This can be used to control how often :meth:`done`
+ is polled. If the ``timeout`` argument to :meth:`result` is
+ specified it will override the ``polling.timeout`` property.
+ retry Optional(google.api_core.retry.Retry): DEPRECATED use ``polling``
+ instead. If specified it will override ``polling`` parameter to
+ maintain backward compatibility.
+
+ Note: Most long-running API methods use google.api_core.operation.Operation
+ This class is a wrapper for a subset of methods that use alternative
+ Long-Running Operation (LRO) semantics.
+
+ Note: there is not a concrete type the extended operation must be.
+ It MUST have fields that correspond to the following, POSSIBLY WITH DIFFERENT NAMES:
+ * name: str
+ * status: Union[str, bool, enum.Enum]
+ * error_code: int
+ * error_message: str
+ """
+
+ def __init__(
+ self,
+ extended_operation,
+ refresh,
+ cancel,
+ polling=polling.DEFAULT_POLLING,
+ **kwargs,
+ ):
+ super().__init__(polling=polling, **kwargs)
+ self._extended_operation = extended_operation
+ self._refresh = refresh
+ self._cancel = cancel
+ # Note: the extended operation does not give a good way to indicate cancellation.
+ # We make do with manually tracking cancellation and checking for doneness.
+ self._cancelled = False
+ self._completion_lock = threading.Lock()
+ # Invoke in case the operation came back already complete.
+ self._handle_refreshed_operation()
+
+ # Note: the following four properties MUST be overridden in a subclass
+ # if, and only if, the fields in the corresponding extended operation message
+ # have different names.
+ #
+ # E.g. we have an extended operation class that looks like
+ #
+ # class MyOperation(proto.Message):
+ # moniker = proto.Field(proto.STRING, number=1)
+ # status_msg = proto.Field(proto.STRING, number=2)
+ # optional http_error_code = proto.Field(proto.INT32, number=3)
+ # optional http_error_msg = proto.Field(proto.STRING, number=4)
+ #
+ # the ExtendedOperation subclass would provide property overrides that map
+ # to these (poorly named) fields.
+ @property
+ def name(self):
+ return self._extended_operation.name
+
+ @property
+ def status(self):
+ return self._extended_operation.status
+
+ @property
+ def error_code(self):
+ return self._extended_operation.error_code
+
+ @property
+ def error_message(self):
+ return self._extended_operation.error_message
+
+ def __getattr__(self, name):
+ return getattr(self._extended_operation, name)
+
+ def done(self, retry=None):
+ self._refresh_and_update(retry)
+ return self._extended_operation.done
+
+ def cancel(self):
+ if self.done():
+ return False
+
+ self._cancel()
+ self._cancelled = True
+ return True
+
+ def cancelled(self):
+ # TODO(dovs): there is not currently a good way to determine whether the
+ # operation has been cancelled.
+ # The best we can do is manually keep track of cancellation
+ # and check for doneness.
+ if not self._cancelled:
+ return False
+
+ self._refresh_and_update()
+ return self._extended_operation.done
+
+ def _refresh_and_update(self, retry=None):
+ if not self._extended_operation.done:
+ self._extended_operation = (
+ self._refresh(retry=retry) if retry else self._refresh()
+ )
+ self._handle_refreshed_operation()
+
+ def _handle_refreshed_operation(self):
+ with self._completion_lock:
+ if not self._extended_operation.done:
+ return
+
+ if self.error_code and self.error_message:
+ # Note: `errors` can be removed once proposal A from
+ # b/284179390 is implemented.
+ errors = []
+ if hasattr(self, "error") and hasattr(self.error, "errors"):
+ errors = self.error.errors
+ exception = exceptions.from_http_status(
+ status_code=self.error_code,
+ message=self.error_message,
+ response=self._extended_operation,
+ errors=errors,
+ )
+ self.set_exception(exception)
+ elif self.error_code or self.error_message:
+ exception = exceptions.GoogleAPICallError(
+ f"Unexpected error {self.error_code}: {self.error_message}"
+ )
+ self.set_exception(exception)
+ else:
+ # Extended operations have no payload.
+ self.set_result(None)
+
+ @classmethod
+ def make(cls, refresh, cancel, extended_operation, **kwargs):
+ """
+ Return an instantiated ExtendedOperation (or child) that wraps
+ * a refresh callable
+ * a cancel callable (can be a no-op)
+ * an initial result
+
+ .. note::
+ It is the caller's responsibility to set up refresh and cancel
+ with their correct request argument.
+ The reason for this is that the services that use Extended Operations
+ have rpcs that look something like the following:
+
+ // service.proto
+ service MyLongService {
+ rpc StartLongTask(StartLongTaskRequest) returns (ExtendedOperation) {
+ option (google.cloud.operation_service) = "CustomOperationService";
+ }
+ }
+
+ service CustomOperationService {
+ rpc Get(GetOperationRequest) returns (ExtendedOperation) {
+ option (google.cloud.operation_polling_method) = true;
+ }
+ }
+
+ Any info needed for the poll, e.g. a name, path params, etc.
+ is held in the request, which the initial client method is in a much
+ better position to make made because the caller made the initial request.
+
+ TL;DR: the caller sets up closures for refresh and cancel that carry
+ the properly configured requests.
+
+ Args:
+ refresh (Callable[Optional[Retry]][type(extended_operation)]): A callable that
+ returns the latest state of the operation.
+ cancel (Callable[][Any]): A callable that tries to cancel the operation
+ on a best effort basis.
+ extended_operation (Any): The initial response of the long running method.
+ See the docstring for ExtendedOperation.__init__ for requirements on
+ the type and fields of extended_operation
+ """
+ return cls(extended_operation, refresh, cancel, **kwargs)
diff --git a/google/api_core/future/async_future.py b/google/api_core/future/async_future.py
index 88c183f..325ee9c 100644
--- a/google/api_core/future/async_future.py
+++ b/google/api_core/future/async_future.py
@@ -95,7 +95,7 @@
if self._future.done():
return
- retry_ = self._retry.with_deadline(timeout)
+ retry_ = self._retry.with_timeout(timeout)
try:
await retry_(self._done_or_raise)()
diff --git a/google/api_core/future/polling.py b/google/api_core/future/polling.py
index 02e680f..f1e2a18 100644
--- a/google/api_core/future/polling.py
+++ b/google/api_core/future/polling.py
@@ -18,7 +18,7 @@
import concurrent.futures
from google.api_core import exceptions
-from google.api_core import retry
+from google.api_core import retry as retries
from google.api_core.future import _helpers
from google.api_core.future import base
@@ -29,14 +29,37 @@
pass
-RETRY_PREDICATE = retry.if_exception_type(
+# DEPRECATED as it conflates RPC retry and polling concepts into one.
+# Use POLLING_PREDICATE instead to configure polling.
+RETRY_PREDICATE = retries.if_exception_type(
_OperationNotComplete,
exceptions.TooManyRequests,
exceptions.InternalServerError,
exceptions.BadGateway,
exceptions.ServiceUnavailable,
)
-DEFAULT_RETRY = retry.Retry(predicate=RETRY_PREDICATE)
+
+# DEPRECATED: use DEFAULT_POLLING to configure LRO polling logic. Construct
+# Retry object using its default values as a baseline for any custom retry logic
+# (not to be confused with polling logic).
+DEFAULT_RETRY = retries.Retry(predicate=RETRY_PREDICATE)
+
+# POLLING_PREDICATE is supposed to poll only on _OperationNotComplete.
+# Any RPC-specific errors (like ServiceUnavailable) will be handled
+# by retry logic (not to be confused with polling logic) which is triggered for
+# every polling RPC independently of polling logic but within its context.
+POLLING_PREDICATE = retries.if_exception_type(
+ _OperationNotComplete,
+)
+
+# Default polling configuration
+DEFAULT_POLLING = retries.Retry(
+ predicate=POLLING_PREDICATE,
+ initial=1.0, # seconds
+ maximum=20.0, # seconds
+ multiplier=1.5,
+ timeout=900, # seconds
+)
class PollingFuture(base.Future):
@@ -45,21 +68,29 @@
The :meth:`done` method should be implemented by subclasses. The polling
behavior will repeatedly call ``done`` until it returns True.
+ The actual polling logic is encapsulated in :meth:`result` method. See
+ documentation for that method for details on how polling works.
+
.. note::
Privacy here is intended to prevent the final class from
overexposing, not to prevent subclasses from accessing methods.
Args:
- retry (google.api_core.retry.Retry): The retry configuration used
- when polling. This can be used to control how often :meth:`done`
- is polled. Regardless of the retry's ``deadline``, it will be
- overridden by the ``timeout`` argument to :meth:`result`.
+ polling (google.api_core.retry.Retry): The configuration used for polling.
+ This parameter controls how often :meth:`done` is polled. If the
+ ``timeout`` argument is specified in :meth:`result` method it will
+ override the ``polling.timeout`` property.
+ retry (google.api_core.retry.Retry): DEPRECATED use ``polling`` instead.
+ If set, it will override ``polling`` parameter for backward
+ compatibility.
"""
- def __init__(self, retry=DEFAULT_RETRY):
+ _DEFAULT_VALUE = object()
+
+ def __init__(self, polling=DEFAULT_POLLING, **kwargs):
super(PollingFuture, self).__init__()
- self._retry = retry
+ self._polling = kwargs.get("retry", polling)
self._result = None
self._exception = None
self._result_set = False
@@ -69,11 +100,13 @@
self._done_callbacks = []
@abc.abstractmethod
- def done(self, retry=DEFAULT_RETRY):
+ def done(self, retry=None):
"""Checks to see if the operation is complete.
Args:
- retry (google.api_core.retry.Retry): (Optional) How to retry the RPC.
+ retry (google.api_core.retry.Retry): (Optional) How to retry the
+ polling RPC (to not be confused with polling configuration. See
+ the documentation for :meth:`result` for details).
Returns:
bool: True if the operation is complete, False otherwise.
@@ -81,45 +114,136 @@
# pylint: disable=redundant-returns-doc, missing-raises-doc
raise NotImplementedError()
- def _done_or_raise(self, retry=DEFAULT_RETRY):
+ def _done_or_raise(self, retry=None):
"""Check if the future is done and raise if it's not."""
- kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry}
-
- if not self.done(**kwargs):
+ if not self.done(retry=retry):
raise _OperationNotComplete()
def running(self):
"""True if the operation is currently running."""
return not self.done()
- def _blocking_poll(self, timeout=None, retry=DEFAULT_RETRY):
- """Poll and wait for the Future to be resolved.
+ def _blocking_poll(self, timeout=_DEFAULT_VALUE, retry=None, polling=None):
+ """Poll and wait for the Future to be resolved."""
- Args:
- timeout (int):
- How long (in seconds) to wait for the operation to complete.
- If None, wait indefinitely.
- """
if self._result_set:
return
- retry_ = self._retry.with_deadline(timeout)
+ polling = polling or self._polling
+ if timeout is not PollingFuture._DEFAULT_VALUE:
+ polling = polling.with_timeout(timeout)
try:
- kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry}
- retry_(self._done_or_raise)(**kwargs)
+ polling(self._done_or_raise)(retry=retry)
except exceptions.RetryError:
raise concurrent.futures.TimeoutError(
- "Operation did not complete within the designated " "timeout."
+ f"Operation did not complete within the designated timeout of "
+ f"{polling.timeout} seconds."
)
- def result(self, timeout=None, retry=DEFAULT_RETRY):
- """Get the result of the operation, blocking if necessary.
+ def result(self, timeout=_DEFAULT_VALUE, retry=None, polling=None):
+ """Get the result of the operation.
+
+ This method will poll for operation status periodically, blocking if
+ necessary. If you just want to make sure that this method does not block
+ for more than X seconds and you do not care about the nitty-gritty of
+ how this method operates, just call it with ``result(timeout=X)``. The
+ other parameters are for advanced use only.
+
+ Every call to this method is controlled by the following three
+ parameters, each of which has a specific, distinct role, even though all three
+ may look very similar: ``timeout``, ``retry`` and ``polling``. In most
+ cases users do not need to specify any custom values for any of these
+ parameters and may simply rely on default ones instead.
+
+ If you choose to specify custom parameters, please make sure you've
+ read the documentation below carefully.
+
+ First, please check :class:`google.api_core.retry.Retry`
+ class documentation for the proper definition of timeout and deadline
+ terms and for the definition the three different types of timeouts.
+ This class operates in terms of Retry Timeout and Polling Timeout. It
+ does not let customizing RPC timeout and the user is expected to rely on
+ default behavior for it.
+
+ The roles of each argument of this method are as follows:
+
+ ``timeout`` (int): (Optional) The Polling Timeout as defined in
+ :class:`google.api_core.retry.Retry`. If the operation does not complete
+ within this timeout an exception will be thrown. This parameter affects
+ neither Retry Timeout nor RPC Timeout.
+
+ ``retry`` (google.api_core.retry.Retry): (Optional) How to retry the
+ polling RPC. The ``retry.timeout`` property of this parameter is the
+ Retry Timeout as defined in :class:`google.api_core.retry.Retry`.
+ This parameter defines ONLY how the polling RPC call is retried
+ (i.e. what to do if the RPC we used for polling returned an error). It
+ does NOT define how the polling is done (i.e. how frequently and for
+ how long to call the polling RPC); use the ``polling`` parameter for that.
+ If a polling RPC throws and error and retrying it fails, the whole
+ future fails with the corresponding exception. If you want to tune which
+ server response error codes are not fatal for operation polling, use this
+ parameter to control that (``retry.predicate`` in particular).
+
+ ``polling`` (google.api_core.retry.Retry): (Optional) How often and
+ for how long to call the polling RPC periodically (i.e. what to do if
+ a polling rpc returned successfully but its returned result indicates
+ that the long running operation is not completed yet, so we need to
+ check it again at some point in future). This parameter does NOT define
+ how to retry each individual polling RPC in case of an error; use the
+ ``retry`` parameter for that. The ``polling.timeout`` of this parameter
+ is Polling Timeout as defined in as defined in
+ :class:`google.api_core.retry.Retry`.
+
+ For each of the arguments, there are also default values in place, which
+ will be used if a user does not specify their own. The default values
+ for the three parameters are not to be confused with the default values
+ for the corresponding arguments in this method (those serve as "not set"
+ markers for the resolution logic).
+
+ If ``timeout`` is provided (i.e.``timeout is not _DEFAULT VALUE``; note
+ the ``None`` value means "infinite timeout"), it will be used to control
+ the actual Polling Timeout. Otherwise, the ``polling.timeout`` value
+ will be used instead (see below for how the ``polling`` config itself
+ gets resolved). In other words, this parameter effectively overrides
+ the ``polling.timeout`` value if specified. This is so to preserve
+ backward compatibility.
+
+ If ``retry`` is provided (i.e. ``retry is not None``) it will be used to
+ control retry behavior for the polling RPC and the ``retry.timeout``
+ will determine the Retry Timeout. If not provided, the
+ polling RPC will be called with whichever default retry config was
+ specified for the polling RPC at the moment of the construction of the
+ polling RPC's client. For example, if the polling RPC is
+ ``operations_client.get_operation()``, the ``retry`` parameter will be
+ controlling its retry behavior (not polling behavior) and, if not
+ specified, that specific method (``operations_client.get_operation()``)
+ will be retried according to the default retry config provided during
+ creation of ``operations_client`` client instead. This argument exists
+ mainly for backward compatibility; users are very unlikely to ever need
+ to set this parameter explicitly.
+
+ If ``polling`` is provided (i.e. ``polling is not None``), it will be used
+ to control the overall polling behavior and ``polling.timeout`` will
+ control Polling Timeout unless it is overridden by ``timeout`` parameter
+ as described above. If not provided, the``polling`` parameter specified
+ during construction of this future (the ``polling`` argument in the
+ constructor) will be used instead. Note: since the ``timeout`` argument may
+ override ``polling.timeout`` value, this parameter should be viewed as
+ coupled with the ``timeout`` parameter as described above.
Args:
- timeout (int):
- How long (in seconds) to wait for the operation to complete.
- If None, wait indefinitely.
+ timeout (int): (Optional) How long (in seconds) to wait for the
+ operation to complete. If None, wait indefinitely.
+ retry (google.api_core.retry.Retry): (Optional) How to retry the
+ polling RPC. This defines ONLY how the polling RPC call is
+ retried (i.e. what to do if the RPC we used for polling returned
+ an error). It does NOT define how the polling is done (i.e. how
+ frequently and for how long to call the polling RPC).
+ polling (google.api_core.retry.Retry): (Optional) How often and
+ for how long to call polling RPC periodically. This parameter
+ does NOT define how to retry each individual polling RPC call
+ (use the ``retry`` parameter for that).
Returns:
google.protobuf.Message: The Operation's result.
@@ -128,8 +252,8 @@
google.api_core.GoogleAPICallError: If the operation errors or if
the timeout is reached before the operation completes.
"""
- kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry}
- self._blocking_poll(timeout=timeout, **kwargs)
+
+ self._blocking_poll(timeout=timeout, retry=retry, polling=polling)
if self._exception is not None:
# pylint: disable=raising-bad-type
@@ -138,12 +262,18 @@
return self._result
- def exception(self, timeout=None):
+ def exception(self, timeout=_DEFAULT_VALUE):
"""Get the exception from the operation, blocking if necessary.
+ See the documentation for the :meth:`result` method for details on how
+ this method operates, as both ``result`` and this method rely on the
+ exact same polling logic. The only difference is that this method does
+ not accept ``retry`` and ``polling`` arguments but relies on the default ones
+ instead.
+
Args:
timeout (int): How long to wait for the operation to complete.
- If None, wait indefinitely.
+ If None, wait indefinitely.
Returns:
Optional[google.api_core.GoogleAPICallError]: The operation's
diff --git a/google/api_core/gapic_v1/client_info.py b/google/api_core/gapic_v1/client_info.py
index fab0f54..4516f33 100644
--- a/google/api_core/gapic_v1/client_info.py
+++ b/google/api_core/gapic_v1/client_info.py
@@ -36,7 +36,7 @@
``'3.9.6'``.
grpc_version (Optional[str]): The gRPC library version.
api_core_version (str): The google-api-core library version.
- gapic_version (Optional[str]): The sversion of gapic-generated client
+ gapic_version (Optional[str]): The version of gapic-generated client
library, if the library was generated by gapic.
client_library_version (Optional[str]): The version of the client
library, generally used if the client library was not generated
@@ -45,6 +45,8 @@
user_agent (Optional[str]): Prefix to the user agent header. This is
used to supply information such as application name or partner tool.
Recommended format: ``application-or-tool-ID/major.minor.version``.
+ rest_version (Optional[str]): A string with labeled versions of the
+ dependencies used for REST transport.
"""
def to_grpc_metadata(self):
diff --git a/google/api_core/gapic_v1/config.py b/google/api_core/gapic_v1/config.py
index 9c72287..36b50d9 100644
--- a/google/api_core/gapic_v1/config.py
+++ b/google/api_core/gapic_v1/config.py
@@ -33,6 +33,9 @@
def _exception_class_for_grpc_status_name(name):
"""Returns the Google API exception class for a gRPC error code name.
+ DEPRECATED: use ``exceptions.exception_class_for_grpc_status`` method
+ directly instead.
+
Args:
name (str): The name of the gRPC status code, for example,
``UNAVAILABLE``.
@@ -47,6 +50,8 @@
def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry):
"""Creates a Retry object given a gapic retry configuration.
+ DEPRECATED: instantiate retry and timeout classes directly instead.
+
Args:
retry_params (dict): The retry parameter values, for example::
@@ -81,6 +86,8 @@
def _timeout_from_retry_config(retry_params):
"""Creates a ExponentialTimeout object given a gapic retry configuration.
+ DEPRECATED: instantiate retry and timeout classes directly instead.
+
Args:
retry_params (dict): The retry parameter values, for example::
@@ -113,6 +120,8 @@
"""Creates default retry and timeout objects for each method in a gapic
interface config.
+ DEPRECATED: instantiate retry and timeout classes directly instead.
+
Args:
interface_config (Mapping): The interface config section of the full
gapic library config. For example, If the full configuration has
diff --git a/google/api_core/gapic_v1/method.py b/google/api_core/gapic_v1/method.py
index 73c8d4b..0f14ea9 100644
--- a/google/api_core/gapic_v1/method.py
+++ b/google/api_core/gapic_v1/method.py
@@ -15,15 +15,15 @@
"""Helpers for wrapping low-level gRPC methods with common functionality.
This is used by gapic clients to provide common error mapping, retry, timeout,
-pagination, and long-running operations to gRPC methods.
+compression, pagination, and long-running operations to gRPC methods.
"""
import enum
import functools
from google.api_core import grpc_helpers
-from google.api_core import timeout
from google.api_core.gapic_v1 import client_info
+from google.api_core.timeout import TimeToDeadlineTimeout
USE_DEFAULT_METADATA = object()
@@ -38,7 +38,7 @@
DEFAULT = _MethodDefault._DEFAULT_VALUE
-"""Sentinel value indicating that a retry or timeout argument was unspecified,
+"""Sentinel value indicating that a retry, timeout, or compression argument was unspecified,
so the default should be used."""
@@ -52,55 +52,14 @@
``decorators`` may contain items that are ``None`` or ``False`` which will
be ignored.
"""
- decorators = filter(_is_not_none_or_false, reversed(decorators))
+ filtered_decorators = filter(_is_not_none_or_false, reversed(decorators))
- for decorator in decorators:
+ for decorator in filtered_decorators:
func = decorator(func)
return func
-def _determine_timeout(default_timeout, specified_timeout, retry):
- """Determines how timeout should be applied to a wrapped method.
-
- Args:
- default_timeout (Optional[Timeout]): The default timeout specified
- at method creation time.
- specified_timeout (Optional[Timeout]): The timeout specified at
- invocation time. If :attr:`DEFAULT`, this will be set to
- the ``default_timeout``.
- retry (Optional[Retry]): The retry specified at invocation time.
-
- Returns:
- Optional[Timeout]: The timeout to apply to the method or ``None``.
- """
- # If timeout is specified as a number instead of a Timeout instance,
- # convert it to a ConstantTimeout.
- if isinstance(specified_timeout, (int, float)):
- specified_timeout = timeout.ConstantTimeout(specified_timeout)
- if isinstance(default_timeout, (int, float)):
- default_timeout = timeout.ConstantTimeout(default_timeout)
-
- if specified_timeout is DEFAULT:
- specified_timeout = default_timeout
-
- if specified_timeout is default_timeout:
- # If timeout is the default and the default timeout is exponential and
- # a non-default retry is specified, make sure the timeout's deadline
- # matches the retry's. This handles the case where the user leaves
- # the timeout default but specifies a lower deadline via the retry.
- if (
- retry
- and retry is not DEFAULT
- and isinstance(default_timeout, timeout.ExponentialTimeout)
- ):
- return default_timeout.with_deadline(retry._deadline)
- else:
- return default_timeout
-
- return specified_timeout
-
-
class _GapicCallable(object):
"""Callable that applies retry, timeout, and metadata logic.
@@ -108,35 +67,51 @@
target (Callable): The low-level RPC method.
retry (google.api_core.retry.Retry): The default retry for the
callable. If ``None``, this callable will not retry by default
- timeout (google.api_core.timeout.Timeout): The default timeout
- for the callable. If ``None``, this callable will not specify
- a timeout argument to the low-level RPC method by default.
+ timeout (google.api_core.timeout.Timeout): The default timeout for the
+ callable (i.e. duration of time within which an RPC must terminate
+ after its start, not to be confused with deadline). If ``None``,
+ this callable will not specify a timeout argument to the low-level
+ RPC method.
+ compression (grpc.Compression): The default compression for the callable.
+ If ``None``, this callable will not specify a compression argument
+ to the low-level RPC method.
metadata (Sequence[Tuple[str, str]]): Additional metadata that is
provided to the RPC method on every invocation. This is merged with
any metadata specified during invocation. If ``None``, no
additional metadata will be passed to the RPC method.
"""
- def __init__(self, target, retry, timeout, metadata=None):
+ def __init__(
+ self,
+ target,
+ retry,
+ timeout,
+ compression,
+ metadata=None,
+ ):
self._target = target
self._retry = retry
self._timeout = timeout
+ self._compression = compression
self._metadata = metadata
- def __call__(self, *args, timeout=DEFAULT, retry=DEFAULT, **kwargs):
- """Invoke the low-level RPC with retry, timeout, and metadata."""
- timeout = _determine_timeout(
- self._timeout,
- timeout,
- # Use only the invocation-specified retry only for this, as we only
- # want to adjust the timeout deadline if the *user* specified
- # a different retry.
- retry,
- )
+ def __call__(
+ self, *args, timeout=DEFAULT, retry=DEFAULT, compression=DEFAULT, **kwargs
+ ):
+ """Invoke the low-level RPC with retry, timeout, compression, and metadata."""
if retry is DEFAULT:
retry = self._retry
+ if timeout is DEFAULT:
+ timeout = self._timeout
+
+ if compression is DEFAULT:
+ compression = self._compression
+
+ if isinstance(timeout, (int, float)):
+ timeout = TimeToDeadlineTimeout(timeout=timeout)
+
# Apply all applicable decorators.
wrapped_func = _apply_decorators(self._target, [retry, timeout])
@@ -150,6 +125,8 @@
metadata = list(metadata)
metadata.extend(self._metadata)
kwargs["metadata"] = metadata
+ if self._compression is not None:
+ kwargs["compression"] = compression
return wrapped_func(*args, **kwargs)
@@ -158,12 +135,15 @@
func,
default_retry=None,
default_timeout=None,
+ default_compression=None,
client_info=client_info.DEFAULT_CLIENT_INFO,
+ *,
+ with_call=False,
):
"""Wrap an RPC method with common behavior.
- This applies common error wrapping, retry, and timeout behavior a function.
- The wrapped function will take optional ``retry`` and ``timeout``
+ This applies common error wrapping, retry, timeout, and compression behavior to a function.
+ The wrapped function will take optional ``retry``, ``timeout``, and ``compression``
arguments.
For example::
@@ -171,6 +151,7 @@
import google.api_core.gapic_v1.method
from google.api_core import retry
from google.api_core import timeout
+ from grpc import Compression
# The original RPC method.
def get_topic(name, timeout=None):
@@ -179,6 +160,7 @@
default_retry = retry.Retry(deadline=60)
default_timeout = timeout.Timeout(deadline=60)
+ default_compression = Compression.NoCompression
wrapped_get_topic = google.api_core.gapic_v1.method.wrap_method(
get_topic, default_retry)
@@ -227,20 +209,34 @@
default_timeout (Optional[google.api_core.Timeout]): The default
timeout strategy. Can also be specified as an int or float. If
``None``, the method will not have timeout specified by default.
+ default_compression (Optional[grpc.Compression]): The default
+ grpc.Compression. If ``None``, the method will not have
+ compression specified by default.
client_info
(Optional[google.api_core.gapic_v1.client_info.ClientInfo]):
Client information used to create a user-agent string that's
passed as gRPC metadata to the method. If unspecified, then
a sane default will be used. If ``None``, then no user agent
metadata will be provided to the RPC method.
+ with_call (bool): If True, wrapped grpc.UnaryUnaryMulticallables will
+ return a tuple of (response, grpc.Call) instead of just the response.
+ This is useful for extracting trailing metadata from unary calls.
+ Defaults to False.
Returns:
- Callable: A new callable that takes optional ``retry`` and ``timeout``
- arguments and applies the common error mapping, retry, timeout,
+ Callable: A new callable that takes optional ``retry``, ``timeout``,
+ and ``compression``
+ arguments and applies the common error mapping, retry, timeout, compression,
and metadata behavior to the low-level RPC method.
"""
+ if with_call:
+ try:
+ func = func.with_call
+ except AttributeError as exc:
+ raise ValueError(
+ "with_call=True is only supported for unary calls."
+ ) from exc
func = grpc_helpers.wrap_errors(func)
-
if client_info is not None:
user_agent_metadata = [client_info.to_grpc_metadata()]
else:
@@ -248,6 +244,10 @@
return functools.wraps(func)(
_GapicCallable(
- func, default_retry, default_timeout, metadata=user_agent_metadata
+ func,
+ default_retry,
+ default_timeout,
+ default_compression,
+ metadata=user_agent_metadata,
)
)
diff --git a/google/api_core/gapic_v1/method_async.py b/google/api_core/gapic_v1/method_async.py
index 84c99aa..c0f38c0 100644
--- a/google/api_core/gapic_v1/method_async.py
+++ b/google/api_core/gapic_v1/method_async.py
@@ -14,7 +14,7 @@
"""AsyncIO helpers for wrapping gRPC methods with common functionality.
This is used by gapic clients to provide common error mapping, retry, timeout,
-pagination, and long-running operations to gRPC methods.
+compression, pagination, and long-running operations to gRPC methods.
"""
import functools
@@ -25,24 +25,35 @@
from google.api_core.gapic_v1.method import DEFAULT # noqa: F401
from google.api_core.gapic_v1.method import USE_DEFAULT_METADATA # noqa: F401
+_DEFAULT_ASYNC_TRANSPORT_KIND = "grpc_asyncio"
+
def wrap_method(
func,
default_retry=None,
default_timeout=None,
+ default_compression=None,
client_info=client_info.DEFAULT_CLIENT_INFO,
+ kind=_DEFAULT_ASYNC_TRANSPORT_KIND,
):
"""Wrap an async RPC method with common behavior.
Returns:
- Callable: A new callable that takes optional ``retry`` and ``timeout``
- arguments and applies the common error mapping, retry, timeout,
- and metadata behavior to the low-level RPC method.
+ Callable: A new callable that takes optional ``retry``, ``timeout``,
+ and ``compression`` arguments and applies the common error mapping,
+ retry, timeout, metadata, and compression behavior to the low-level RPC method.
"""
- func = grpc_helpers_async.wrap_errors(func)
+ if kind == _DEFAULT_ASYNC_TRANSPORT_KIND:
+ func = grpc_helpers_async.wrap_errors(func)
metadata = [client_info.to_grpc_metadata()] if client_info is not None else None
return functools.wraps(func)(
- _GapicCallable(func, default_retry, default_timeout, metadata=metadata)
+ _GapicCallable(
+ func,
+ default_retry,
+ default_timeout,
+ default_compression,
+ metadata=metadata,
+ )
)
diff --git a/google/api_core/gapic_v1/routing_header.py b/google/api_core/gapic_v1/routing_header.py
index a7bcb5a..c0c6f64 100644
--- a/google/api_core/gapic_v1/routing_header.py
+++ b/google/api_core/gapic_v1/routing_header.py
@@ -20,38 +20,68 @@
Generally, these headers are specified as gRPC metadata.
"""
+import functools
+from enum import Enum
from urllib.parse import urlencode
ROUTING_METADATA_KEY = "x-goog-request-params"
+# This is the value for the `maxsize` argument of @functools.lru_cache
+# https://docs.python.org/3/library/functools.html#functools.lru_cache
+# This represents the number of recent function calls to store.
+ROUTING_PARAM_CACHE_SIZE = 32
-def to_routing_header(params):
+def to_routing_header(params, qualified_enums=True):
"""Returns a routing header string for the given request parameters.
Args:
- params (Mapping[str, Any]): A dictionary containing the request
+ params (Mapping[str, str | bytes | Enum]): A dictionary containing the request
parameters used for routing.
+ qualified_enums (bool): Whether to represent enum values
+ as their type-qualified symbol names instead of as their
+ unqualified symbol names.
Returns:
str: The routing header string.
"""
- return urlencode(
- params,
- # Per Google API policy (go/api-url-encoding), / is not encoded.
- safe="/",
- )
+ tuples = params.items() if isinstance(params, dict) else params
+ if not qualified_enums:
+ tuples = [(x[0], x[1].name) if isinstance(x[1], Enum) else x for x in tuples]
+ return "&".join([_urlencode_param(*t) for t in tuples])
-def to_grpc_metadata(params):
+def to_grpc_metadata(params, qualified_enums=True):
"""Returns the gRPC metadata containing the routing headers for the given
request parameters.
Args:
- params (Mapping[str, Any]): A dictionary containing the request
+ params (Mapping[str, str | bytes | Enum]): A dictionary containing the request
parameters used for routing.
+ qualified_enums (bool): Whether to represent enum values
+ as their type-qualified symbol names instead of as their
+ unqualified symbol names.
Returns:
Tuple(str, str): The gRPC metadata containing the routing header key
and value.
"""
- return (ROUTING_METADATA_KEY, to_routing_header(params))
+ return (ROUTING_METADATA_KEY, to_routing_header(params, qualified_enums))
+
+
+# use caching to avoid repeated computation
[email protected]_cache(maxsize=ROUTING_PARAM_CACHE_SIZE)
+def _urlencode_param(key, value):
+ """Cacheable wrapper over urlencode
+
+ Args:
+ key (str): The key of the parameter to encode.
+ value (str | bytes | Enum): The value of the parameter to encode.
+
+ Returns:
+ str: The encoded parameter.
+ """
+ return urlencode(
+ {key: value},
+ # Per Google API policy (go/api-url-encoding), / is not encoded.
+ safe="/",
+ )
diff --git a/google/api_core/general_helpers.py b/google/api_core/general_helpers.py
index fba7802..a6af45b 100644
--- a/google/api_core/general_helpers.py
+++ b/google/api_core/general_helpers.py
@@ -12,5 +12,5 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# This import for backward compatibiltiy only.
+# This import for backward compatibility only.
from functools import wraps # noqa: F401 pragma: NO COVER
diff --git a/google/api_core/grpc_helpers.py b/google/api_core/grpc_helpers.py
index 594df98..0796302 100644
--- a/google/api_core/grpc_helpers.py
+++ b/google/api_core/grpc_helpers.py
@@ -13,38 +13,48 @@
# limitations under the License.
"""Helpers for :mod:`grpc`."""
+from typing import Generic, Iterator, Optional, TypeVar
import collections
import functools
+import warnings
import grpc
-import pkg_resources
from google.api_core import exceptions
import google.auth
import google.auth.credentials
import google.auth.transport.grpc
import google.auth.transport.requests
+import google.protobuf
-try:
- import grpc_gcp
+PROTOBUF_VERSION = google.protobuf.__version__
- HAS_GRPC_GCP = True
-except ImportError:
+# The grpcio-gcp package only has support for protobuf < 4
+if PROTOBUF_VERSION[0:2] == "3.": # pragma: NO COVER
+ try:
+ import grpc_gcp
+
+ warnings.warn(
+ """Support for grpcio-gcp is deprecated. This feature will be
+ removed from `google-api-core` after January 1, 2024. If you need to
+ continue to use this feature, please pin to a specific version of
+ `google-api-core`.""",
+ DeprecationWarning,
+ )
+ HAS_GRPC_GCP = True
+ except ImportError:
+ HAS_GRPC_GCP = False
+else:
HAS_GRPC_GCP = False
-try:
- # google.auth.__version__ was added in 1.26.0
- _GOOGLE_AUTH_VERSION = google.auth.__version__
-except AttributeError:
- try: # try pkg_resources if it is available
- _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
- except pkg_resources.DistributionNotFound: # pragma: NO COVER
- _GOOGLE_AUTH_VERSION = None
# The list of gRPC Callable interfaces that return iterators.
_STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable)
+# denotes the proto response type for grpc calls
+P = TypeVar("P")
+
def _patch_callable_name(callable_):
"""Fix-up gRPC callable attributes.
@@ -70,7 +80,7 @@
return error_remapped_callable
-class _StreamingResponseIterator(grpc.Call):
+class _StreamingResponseIterator(Generic[P], grpc.Call):
def __init__(self, wrapped, prefetch_first_result=True):
self._wrapped = wrapped
@@ -88,11 +98,11 @@
# ignore stop iteration at this time. This should be handled outside of retry.
pass
- def __iter__(self):
+ def __iter__(self) -> Iterator[P]:
"""This iterator is also an iterable that returns itself."""
return self
- def __next__(self):
+ def __next__(self) -> P:
"""Get the next response from the stream.
Returns:
@@ -135,6 +145,10 @@
return self._wrapped.trailing_metadata()
+# public type alias denoting the return type of streaming gapic calls
+GrpcStream = _StreamingResponseIterator[P]
+
+
def _wrap_stream_errors(callable_):
"""Wrap errors for Unary-Stream and Stream-Stream gRPC callables.
@@ -202,6 +216,18 @@
credentials_file (str): A file with credentials that can be loaded with
:func:`google.auth.load_credentials_from_file`. This argument is
mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
default_scopes (Sequence[str]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -246,17 +272,32 @@
# Create the metadata plugin for inserting the authorization header.
metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin(
- credentials, request, default_host=default_host,
+ credentials,
+ request,
+ default_host=default_host,
)
# Create a set of grpc.CallCredentials using the metadata plugin.
google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin)
- if ssl_credentials is None:
- ssl_credentials = grpc.ssl_channel_credentials()
-
- # Combine the ssl credentials and the authorization credentials.
- return grpc.composite_channel_credentials(ssl_credentials, google_auth_credentials)
+ # if `ssl_credentials` is set, use `grpc.composite_channel_credentials` instead of
+ # `grpc.compute_engine_channel_credentials` as the former supports passing
+ # `ssl_credentials` via `channel_credentials` which is needed for mTLS.
+ if ssl_credentials:
+ # Combine the ssl credentials and the authorization credentials.
+ # See https://grpc.github.io/grpc/python/grpc.html#grpc.composite_channel_credentials
+ return grpc.composite_channel_credentials(
+ ssl_credentials, google_auth_credentials
+ )
+ else:
+ # Use grpc.compute_engine_channel_credentials in order to support Direct Path.
+ # See https://grpc.github.io/grpc/python/grpc.html#grpc.compute_engine_channel_credentials
+ # TODO(https://github.com/googleapis/python-api-core/issues/598):
+ # Although `grpc.compute_engine_channel_credentials` returns channel credentials
+ # outside of a Google Compute Engine environment (GCE), we should determine if
+ # there is a way to reliably detect a GCE environment so that
+ # `grpc.compute_engine_channel_credentials` is not called outside of GCE.
+ return grpc.compute_engine_channel_credentials(google_auth_credentials)
def create_channel(
@@ -268,7 +309,9 @@
quota_project_id=None,
default_scopes=None,
default_host=None,
- **kwargs
+ compression=None,
+ attempt_direct_path: Optional[bool] = False,
+ **kwargs,
):
"""Create a secure channel with credentials.
@@ -285,20 +328,58 @@
credentials_file (str): A file with credentials that can be loaded with
:func:`google.auth.load_credentials_from_file`. This argument is
mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
quota_project_id (str): An optional project to use for billing and quota.
default_scopes (Sequence[str]): Default scopes passed by a Google client
library. Use 'scopes' for user-defined scopes.
default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+ compression (grpc.Compression): An optional value indicating the
+ compression method to be used over the lifetime of the channel.
+ attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted
+ when the request is made. Direct Path is only available within a Google
+ Compute Engine (GCE) environment and provides a proxyless connection
+ which increases the available throughput, reduces latency, and increases
+ reliability. Note:
+
+ - This argument should only be set in a GCE environment and for Services
+ that are known to support Direct Path.
+ - If this argument is set outside of GCE, then this request will fail
+ unless the back-end service happens to have configured fall-back to DNS.
+ - If the request causes a `ServiceUnavailable` response, it is recommended
+ that the client repeat the request with `attempt_direct_path` set to
+ `False` as the Service may not support Direct Path.
+ - Using `ssl_credentials` with `attempt_direct_path` set to `True` will
+ result in `ValueError` as this combination is not yet supported.
+
kwargs: Additional key-word args passed to
:func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`.
+ Note: `grpc_gcp` is only supported in environments with protobuf < 4.0.0.
Returns:
grpc.Channel: The created channel.
Raises:
google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`.
"""
+ # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`,
+ # raise ValueError as this is not yet supported.
+ # See https://github.com/googleapis/python-api-core/issues/590
+ if ssl_credentials and attempt_direct_path:
+ raise ValueError("Using ssl_credentials with Direct Path is not supported")
+
composite_credentials = _create_composite_credentials(
credentials=credentials,
credentials_file=credentials_file,
@@ -309,16 +390,60 @@
default_host=default_host,
)
- if HAS_GRPC_GCP:
- # If grpc_gcp module is available use grpc_gcp.secure_channel,
- # otherwise, use grpc.secure_channel to create grpc channel.
+ # Note that grpcio-gcp is deprecated
+ if HAS_GRPC_GCP: # pragma: NO COVER
+ if compression is not None and compression != grpc.Compression.NoCompression:
+ warnings.warn(
+ "The `compression` argument is ignored for grpc_gcp.secure_channel creation.",
+ DeprecationWarning,
+ )
+ if attempt_direct_path:
+ warnings.warn(
+ """The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation.""",
+ DeprecationWarning,
+ )
return grpc_gcp.secure_channel(target, composite_credentials, **kwargs)
- else:
- return grpc.secure_channel(target, composite_credentials, **kwargs)
+
+ if attempt_direct_path:
+ target = _modify_target_for_direct_path(target)
+
+ return grpc.secure_channel(
+ target, composite_credentials, compression=compression, **kwargs
+ )
+
+
+def _modify_target_for_direct_path(target: str) -> str:
+ """
+ Given a target, return a modified version which is compatible with Direct Path.
+
+ Args:
+ target (str): The target service address in the format 'hostname[:port]' or
+ 'dns://hostname[:port]'.
+
+ Returns:
+ target (str): The target service address which is converted into a format compatible with Direct Path.
+ If the target contains `dns:///` or does not contain `:///`, the target will be converted in
+ a format compatible with Direct Path; otherwise the original target will be returned as the
+ original target may already denote Direct Path.
+ """
+
+ # A DNS prefix may be included with the target to indicate the endpoint is living in the Internet,
+ # outside of Google Cloud Platform.
+ dns_prefix = "dns:///"
+ # Remove "dns:///" if `attempt_direct_path` is set to True as
+ # the Direct Path prefix `google-c2p:///` will be used instead.
+ target = target.replace(dns_prefix, "")
+
+ direct_path_separator = ":///"
+ if direct_path_separator not in target:
+ target_without_port = target.split(":")[0]
+ # Modify the target to use Direct Path by adding the `google-c2p:///` prefix
+ target = f"google-c2p{direct_path_separator}{target_without_port}"
+ return target
_MethodCall = collections.namedtuple(
- "_MethodCall", ("request", "timeout", "metadata", "credentials")
+ "_MethodCall", ("request", "timeout", "metadata", "credentials", "compression")
)
_ChannelRequest = collections.namedtuple("_ChannelRequest", ("method", "request"))
@@ -345,11 +470,15 @@
"""List[protobuf.Message]: All requests sent to this callable."""
self.calls = []
"""List[Tuple]: All invocations of this callable. Each tuple is the
- request, timeout, metadata, and credentials."""
+ request, timeout, metadata, compression, and credentials."""
- def __call__(self, request, timeout=None, metadata=None, credentials=None):
+ def __call__(
+ self, request, timeout=None, metadata=None, credentials=None, compression=None
+ ):
self._channel.requests.append(_ChannelRequest(self._method, request))
- self.calls.append(_MethodCall(request, timeout, metadata, credentials))
+ self.calls.append(
+ _MethodCall(request, timeout, metadata, credentials, compression)
+ )
self.requests.append(request)
response = self.response
@@ -464,20 +593,42 @@
except KeyError:
raise AttributeError
- def unary_unary(self, method, request_serializer=None, response_deserializer=None):
+ def unary_unary(
+ self,
+ method,
+ request_serializer=None,
+ response_deserializer=None,
+ _registered_method=False,
+ ):
"""grpc.Channel.unary_unary implementation."""
return self._stub_for_method(method)
- def unary_stream(self, method, request_serializer=None, response_deserializer=None):
+ def unary_stream(
+ self,
+ method,
+ request_serializer=None,
+ response_deserializer=None,
+ _registered_method=False,
+ ):
"""grpc.Channel.unary_stream implementation."""
return self._stub_for_method(method)
- def stream_unary(self, method, request_serializer=None, response_deserializer=None):
+ def stream_unary(
+ self,
+ method,
+ request_serializer=None,
+ response_deserializer=None,
+ _registered_method=False,
+ ):
"""grpc.Channel.stream_unary implementation."""
return self._stub_for_method(method)
def stream_stream(
- self, method, request_serializer=None, response_deserializer=None
+ self,
+ method,
+ request_serializer=None,
+ response_deserializer=None,
+ _registered_method=False,
):
"""grpc.Channel.stream_stream implementation."""
return self._stub_for_method(method)
diff --git a/google/api_core/grpc_helpers_async.py b/google/api_core/grpc_helpers_async.py
index 452e787..af66143 100644
--- a/google/api_core/grpc_helpers_async.py
+++ b/google/api_core/grpc_helpers_async.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""AsyncIO helpers for :mod:`grpc` supporting 3.6+.
+"""AsyncIO helpers for :mod:`grpc` supporting 3.7+.
Please combine more detailed docstring in grpc_helpers.py to use following
functions. This module is implementing the same surface with AsyncIO semantics.
@@ -21,14 +21,15 @@
import asyncio
import functools
+from typing import AsyncGenerator, Generic, Iterator, Optional, TypeVar
+
import grpc
from grpc import aio
from google.api_core import exceptions, grpc_helpers
-
-# TODO(lidiz) Support gRPC GCP wrapper
-HAS_GRPC_GCP = False
+# denotes the proto response type for grpc calls
+P = TypeVar("P")
# NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform
# automatic patching for us. But that means the overhead of creating an
@@ -78,8 +79,8 @@
raise exceptions.from_grpc_error(rpc_error) from rpc_error
-class _WrappedUnaryResponseMixin(_WrappedCall):
- def __await__(self):
+class _WrappedUnaryResponseMixin(Generic[P], _WrappedCall):
+ def __await__(self) -> Iterator[P]:
try:
response = yield from self._call.__await__()
return response
@@ -87,17 +88,17 @@
raise exceptions.from_grpc_error(rpc_error) from rpc_error
-class _WrappedStreamResponseMixin(_WrappedCall):
+class _WrappedStreamResponseMixin(Generic[P], _WrappedCall):
def __init__(self):
self._wrapped_async_generator = None
- async def read(self):
+ async def read(self) -> P:
try:
return await self._call.read()
except grpc.RpcError as rpc_error:
raise exceptions.from_grpc_error(rpc_error) from rpc_error
- async def _wrapped_aiter(self):
+ async def _wrapped_aiter(self) -> AsyncGenerator[P, None]:
try:
# NOTE(lidiz) coverage doesn't understand the exception raised from
# __anext__ method. It is covered by test case:
@@ -107,7 +108,7 @@
except grpc.RpcError as rpc_error:
raise exceptions.from_grpc_error(rpc_error) from rpc_error
- def __aiter__(self):
+ def __aiter__(self) -> AsyncGenerator[P, None]:
if not self._wrapped_async_generator:
self._wrapped_async_generator = self._wrapped_aiter()
return self._wrapped_async_generator
@@ -130,29 +131,34 @@
# NOTE(lidiz) Implementing each individual class separately, so we don't
# expose any API that should not be seen. E.g., __aiter__ in unary-unary
# RPC, or __await__ in stream-stream RPC.
-class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin, aio.UnaryUnaryCall):
+class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin[P], aio.UnaryUnaryCall):
"""Wrapped UnaryUnaryCall to map exceptions."""
-class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin, aio.UnaryStreamCall):
+class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin[P], aio.UnaryStreamCall):
"""Wrapped UnaryStreamCall to map exceptions."""
class _WrappedStreamUnaryCall(
- _WrappedUnaryResponseMixin, _WrappedStreamRequestMixin, aio.StreamUnaryCall
+ _WrappedUnaryResponseMixin[P], _WrappedStreamRequestMixin, aio.StreamUnaryCall
):
"""Wrapped StreamUnaryCall to map exceptions."""
class _WrappedStreamStreamCall(
- _WrappedStreamRequestMixin, _WrappedStreamResponseMixin, aio.StreamStreamCall
+ _WrappedStreamRequestMixin, _WrappedStreamResponseMixin[P], aio.StreamStreamCall
):
"""Wrapped StreamStreamCall to map exceptions."""
+# public type alias denoting the return type of async streaming gapic calls
+GrpcAsyncStream = _WrappedStreamResponseMixin
+# public type alias denoting the return type of unary gapic calls
+AwaitableGrpcCall = _WrappedUnaryResponseMixin
+
+
def _wrap_unary_errors(callable_):
"""Map errors for Unary-Unary async callables."""
- grpc_helpers._patch_callable_name(callable_)
@functools.wraps(callable_)
def error_remapped_callable(*args, **kwargs):
@@ -162,23 +168,13 @@
return error_remapped_callable
-def _wrap_stream_errors(callable_):
+def _wrap_stream_errors(callable_, wrapper_type):
"""Map errors for streaming RPC async callables."""
- grpc_helpers._patch_callable_name(callable_)
@functools.wraps(callable_)
async def error_remapped_callable(*args, **kwargs):
call = callable_(*args, **kwargs)
-
- if isinstance(call, aio.UnaryStreamCall):
- call = _WrappedUnaryStreamCall().with_call(call)
- elif isinstance(call, aio.StreamUnaryCall):
- call = _WrappedStreamUnaryCall().with_call(call)
- elif isinstance(call, aio.StreamStreamCall):
- call = _WrappedStreamStreamCall().with_call(call)
- else:
- raise TypeError("Unexpected type of call %s" % type(call))
-
+ call = wrapper_type().with_call(call)
await call.wait_for_connection()
return call
@@ -200,10 +196,16 @@
Returns: Callable: The wrapped gRPC callable.
"""
- if isinstance(callable_, aio.UnaryUnaryMultiCallable):
- return _wrap_unary_errors(callable_)
+ grpc_helpers._patch_callable_name(callable_)
+
+ if isinstance(callable_, aio.UnaryStreamMultiCallable):
+ return _wrap_stream_errors(callable_, _WrappedUnaryStreamCall)
+ elif isinstance(callable_, aio.StreamUnaryMultiCallable):
+ return _wrap_stream_errors(callable_, _WrappedStreamUnaryCall)
+ elif isinstance(callable_, aio.StreamStreamMultiCallable):
+ return _wrap_stream_errors(callable_, _WrappedStreamStreamCall)
else:
- return _wrap_stream_errors(callable_)
+ return _wrap_unary_errors(callable_)
def create_channel(
@@ -215,6 +217,8 @@
quota_project_id=None,
default_scopes=None,
default_host=None,
+ compression=None,
+ attempt_direct_path: Optional[bool] = False,
**kwargs
):
"""Create an AsyncIO secure channel with credentials.
@@ -232,10 +236,40 @@
credentials_file (str): A file with credentials that can be loaded with
:func:`google.auth.load_credentials_from_file`. This argument is
mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
quota_project_id (str): An optional project to use for billing and quota.
default_scopes (Sequence[str]): Default scopes passed by a Google client
library. Use 'scopes' for user-defined scopes.
default_host (str): The default endpoint. e.g., "pubsub.googleapis.com".
+ compression (grpc.Compression): An optional value indicating the
+ compression method to be used over the lifetime of the channel.
+ attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted
+ when the request is made. Direct Path is only available within a Google
+ Compute Engine (GCE) environment and provides a proxyless connection
+ which increases the available throughput, reduces latency, and increases
+ reliability. Note:
+
+ - This argument should only be set in a GCE environment and for Services
+ that are known to support Direct Path.
+ - If this argument is set outside of GCE, then this request will fail
+ unless the back-end service happens to have configured fall-back to DNS.
+ - If the request causes a `ServiceUnavailable` response, it is recommended
+ that the client repeat the request with `attempt_direct_path` set to
+ `False` as the Service may not support Direct Path.
+ - Using `ssl_credentials` with `attempt_direct_path` set to `True` will
+ result in `ValueError` as this combination is not yet supported.
+
kwargs: Additional key-word args passed to :func:`aio.secure_channel`.
Returns:
@@ -243,8 +277,15 @@
Raises:
google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed.
+ ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`.
"""
+ # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`,
+ # raise ValueError as this is not yet supported.
+ # See https://github.com/googleapis/python-api-core/issues/590
+ if ssl_credentials and attempt_direct_path:
+ raise ValueError("Using ssl_credentials with Direct Path is not supported")
+
composite_credentials = grpc_helpers._create_composite_credentials(
credentials=credentials,
credentials_file=credentials_file,
@@ -255,7 +296,12 @@
default_host=default_host,
)
- return aio.secure_channel(target, composite_credentials, **kwargs)
+ if attempt_direct_path:
+ target = grpc_helpers._modify_target_for_direct_path(target)
+
+ return aio.secure_channel(
+ target, composite_credentials, compression=compression, **kwargs
+ )
class FakeUnaryUnaryCall(_WrappedUnaryUnaryCall):
diff --git a/google/api_core/operation.py b/google/api_core/operation.py
index b17f753..4b9c9a5 100644
--- a/google/api_core/operation.py
+++ b/google/api_core/operation.py
@@ -61,10 +61,13 @@
result.
metadata_type (func:`type`): The protobuf type for the operation's
metadata.
- retry (google.api_core.retry.Retry): The retry configuration used
- when polling. This can be used to control how often :meth:`done`
- is polled. Regardless of the retry's ``deadline``, it will be
- overridden by the ``timeout`` argument to :meth:`result`.
+ polling (google.api_core.retry.Retry): The configuration used for polling.
+ This parameter controls how often :meth:`done` is polled. If the
+ ``timeout`` argument is specified in the :meth:`result` method, it will
+ override the ``polling.timeout`` property.
+ retry (google.api_core.retry.Retry): DEPRECATED: use ``polling`` instead.
+ If specified it will override ``polling`` parameter to maintain
+ backward compatibility.
"""
def __init__(
@@ -74,9 +77,10 @@
cancel,
result_type,
metadata_type=None,
- retry=polling.DEFAULT_RETRY,
+ polling=polling.DEFAULT_POLLING,
+ **kwargs
):
- super(Operation, self).__init__(retry=retry)
+ super(Operation, self).__init__(polling=polling, **kwargs)
self._operation = operation
self._refresh = refresh
self._cancel = cancel
@@ -146,7 +150,7 @@
)
self.set_exception(exception)
- def _refresh_and_update(self, retry=polling.DEFAULT_RETRY):
+ def _refresh_and_update(self, retry=None):
"""Refresh the operation and update the result if needed.
Args:
@@ -155,10 +159,10 @@
# If the currently cached operation is done, no need to make another
# RPC as it will not change once done.
if not self._operation.done:
- self._operation = self._refresh(retry=retry)
+ self._operation = self._refresh(retry=retry) if retry else self._refresh()
self._set_result_from_operation()
- def done(self, retry=polling.DEFAULT_RETRY):
+ def done(self, retry=None):
"""Checks to see if the operation is complete.
Args:
@@ -311,10 +315,16 @@
operation.
"""
refresh = functools.partial(
- _refresh_grpc, operations_stub, operation.name, metadata=grpc_metadata
+ _refresh_grpc,
+ operations_stub,
+ operation.name,
+ metadata=grpc_metadata,
)
cancel = functools.partial(
- _cancel_grpc, operations_stub, operation.name, metadata=grpc_metadata
+ _cancel_grpc,
+ operations_stub,
+ operation.name,
+ metadata=grpc_metadata,
)
return Operation(operation, refresh, cancel, result_type, **kwargs)
@@ -343,9 +353,13 @@
operation.
"""
refresh = functools.partial(
- operations_client.get_operation, operation.name, metadata=grpc_metadata
+ operations_client.get_operation,
+ operation.name,
+ metadata=grpc_metadata,
)
cancel = functools.partial(
- operations_client.cancel_operation, operation.name, metadata=grpc_metadata
+ operations_client.cancel_operation,
+ operation.name,
+ metadata=grpc_metadata,
)
return Operation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/google/api_core/operation_async.py b/google/api_core/operation_async.py
index 6bae865..2fd341d 100644
--- a/google/api_core/operation_async.py
+++ b/google/api_core/operation_async.py
@@ -213,9 +213,13 @@
operation.
"""
refresh = functools.partial(
- operations_client.get_operation, operation.name, metadata=grpc_metadata
+ operations_client.get_operation,
+ operation.name,
+ metadata=grpc_metadata,
)
cancel = functools.partial(
- operations_client.cancel_operation, operation.name, metadata=grpc_metadata
+ operations_client.cancel_operation,
+ operation.name,
+ metadata=grpc_metadata,
)
return AsyncOperation(operation, refresh, cancel, result_type, **kwargs)
diff --git a/google/api_core/operations_v1/__init__.py b/google/api_core/operations_v1/__init__.py
index 6118645..4db32a4 100644
--- a/google/api_core/operations_v1/__init__.py
+++ b/google/api_core/operations_v1/__init__.py
@@ -25,3 +25,16 @@
"OperationsClient",
"OperationsRestTransport"
]
+
+try:
+ from google.api_core.operations_v1.transports.rest_asyncio import (
+ AsyncOperationsRestTransport,
+ )
+ from google.api_core.operations_v1.operations_rest_client_async import AsyncOperationsRestClient
+
+ __all__ += ["AsyncOperationsRestClient", "AsyncOperationsRestTransport"]
+except ImportError:
+ # This import requires the `async_rest` extra.
+ # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported
+ # as other transports are still available.
+ pass
diff --git a/google/api_core/operations_v1/abstract_operations_base_client.py b/google/api_core/operations_v1/abstract_operations_base_client.py
new file mode 100644
index 0000000..160c2a8
--- /dev/null
+++ b/google/api_core/operations_v1/abstract_operations_base_client.py
@@ -0,0 +1,370 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Optional, Type, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+
+try:
+ from google.api_core.operations_v1.transports.rest_asyncio import (
+ AsyncOperationsRestTransport,
+ )
+
+ HAS_ASYNC_REST_DEPENDENCIES = True
+except ImportError as e:
+ HAS_ASYNC_REST_DEPENDENCIES = False
+ ASYNC_REST_EXCEPTION = e
+
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.auth.transport import mtls # type: ignore
+
+
+class AbstractOperationsBaseClientMeta(type):
+ """Metaclass for the Operations Base client.
+
+ This provides base class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
+ _transport_registry["rest"] = OperationsRestTransport
+ if HAS_ASYNC_REST_DEPENDENCIES:
+ _transport_registry["rest_asyncio"] = AsyncOperationsRestTransport
+
+ def get_transport_class(
+ cls,
+ label: Optional[str] = None,
+ ) -> Type[OperationsTransport]:
+ """Returns an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if (
+ label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES
+ ): # pragma: NO COVER
+ raise ASYNC_REST_EXCEPTION
+
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class AbstractOperationsBaseClient(metaclass=AbstractOperationsBaseClientMeta):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Converts api endpoint to mTLS endpoint.
+
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "longrunning.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """
+ This class method should be overridden by the subclasses.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Raises:
+ NotImplementedError: If the method is called on the base class.
+ """
+ raise NotImplementedError("`from_service_account_info` is not implemented.")
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """
+ This class method should be overridden by the subclasses.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Raises:
+ NotImplementedError: If the method is called on the base class.
+ """
+ raise NotImplementedError("`from_service_account_file` is not implemented.")
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> OperationsTransport:
+ """Returns the transport used by the client instance.
+
+ Returns:
+ OperationsTransport: The transport used by the client
+ instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def common_billing_account_path(
+ billing_account: str,
+ ) -> str:
+ """Returns a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(
+ folder: str,
+ ) -> str:
+ """Returns a fully-qualified folder string."""
+ return "folders/{folder}".format(
+ folder=folder,
+ )
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P<folder>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(
+ organization: str,
+ ) -> str:
+ """Returns a fully-qualified organization string."""
+ return "organizations/{organization}".format(
+ organization=organization,
+ )
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(
+ project: str,
+ ) -> str:
+ """Returns a fully-qualified project string."""
+ return "projects/{project}".format(
+ project=project,
+ )
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P<project>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(
+ project: str,
+ location: str,
+ ) -> str:
+ """Returns a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project,
+ location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = os.getenv(
+ "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
+ ).lower()
+ if use_client_cert not in ("true", "false"):
+ raise ValueError(
+ "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert == "true":
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, OperationsTransport):
+ # transport is a OperationsTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, provide its scopes "
+ "directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=True,
+ )
diff --git a/google/api_core/operations_v1/abstract_operations_client.py b/google/api_core/operations_v1/abstract_operations_client.py
index 631094e..fc44536 100644
--- a/google/api_core/operations_v1/abstract_operations_client.py
+++ b/google/api_core/operations_v1/abstract_operations_client.py
@@ -13,11 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from collections import OrderedDict
-from distutils import util
-import os
-import re
-from typing import Dict, Optional, Sequence, Tuple, Type, Union
+from typing import Optional, Sequence, Tuple, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import gapic_v1 # type: ignore
@@ -27,49 +23,18 @@
DEFAULT_CLIENT_INFO,
OperationsTransport,
)
-from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+from google.api_core.operations_v1.abstract_operations_base_client import (
+ AbstractOperationsBaseClient,
+)
from google.auth import credentials as ga_credentials # type: ignore
-from google.auth.exceptions import MutualTLSChannelError # type: ignore
-from google.auth.transport import mtls # type: ignore
from google.longrunning import operations_pb2
from google.oauth2 import service_account # type: ignore
+import grpc
OptionalRetry = Union[retries.Retry, object]
-class AbstractOperationsClientMeta(type):
- """Metaclass for the Operations client.
-
- This provides class-level methods for building and retrieving
- support objects (e.g. transport) without polluting the client instance
- objects.
- """
-
- _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
- _transport_registry["rest"] = OperationsRestTransport
-
- def get_transport_class(
- cls, label: Optional[str] = None,
- ) -> Type[OperationsTransport]:
- """Returns an appropriate transport class.
-
- Args:
- label: The name of the desired transport. If none is
- provided, then the first transport in the registry is used.
-
- Returns:
- The transport class to use.
- """
- # If a specific transport is requested, return that one.
- if label:
- return cls._transport_registry[label]
-
- # No transport is requested; return the default (that is, the first one
- # in the dictionary).
- return next(iter(cls._transport_registry.values()))
-
-
-class AbstractOperationsClient(metaclass=AbstractOperationsClientMeta):
+class AbstractOperationsClient(AbstractOperationsBaseClient):
"""Manages long-running operations with an API service.
When an API method normally takes long time to complete, it can be
@@ -82,147 +47,6 @@
interface so developers can have a consistent client experience.
"""
- @staticmethod
- def _get_default_mtls_endpoint(api_endpoint):
- """Converts api endpoint to mTLS endpoint.
-
- Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
- "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
- Args:
- api_endpoint (Optional[str]): the api endpoint to convert.
- Returns:
- str: converted mTLS api endpoint.
- """
- if not api_endpoint:
- return api_endpoint
-
- mtls_endpoint_re = re.compile(
- r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
- )
-
- m = mtls_endpoint_re.match(api_endpoint)
- name, mtls, sandbox, googledomain = m.groups()
- if mtls or not googledomain:
- return api_endpoint
-
- if sandbox:
- return api_endpoint.replace(
- "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
- )
-
- return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
-
- DEFAULT_ENDPOINT = "longrunning.googleapis.com"
- DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
- DEFAULT_ENDPOINT
- )
-
- @classmethod
- def from_service_account_info(cls, info: dict, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- info.
-
- Args:
- info (dict): The service account private key info.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- AbstractOperationsClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_info(info)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- @classmethod
- def from_service_account_file(cls, filename: str, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- file.
-
- Args:
- filename (str): The path to the service account private key json
- file.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- AbstractOperationsClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_file(filename)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- from_service_account_json = from_service_account_file
-
- @property
- def transport(self) -> OperationsTransport:
- """Returns the transport used by the client instance.
-
- Returns:
- OperationsTransport: The transport used by the client
- instance.
- """
- return self._transport
-
- @staticmethod
- def common_billing_account_path(billing_account: str,) -> str:
- """Returns a fully-qualified billing_account string."""
- return "billingAccounts/{billing_account}".format(
- billing_account=billing_account,
- )
-
- @staticmethod
- def parse_common_billing_account_path(path: str) -> Dict[str, str]:
- """Parse a billing_account path into its component segments."""
- m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_folder_path(folder: str,) -> str:
- """Returns a fully-qualified folder string."""
- return "folders/{folder}".format(folder=folder,)
-
- @staticmethod
- def parse_common_folder_path(path: str) -> Dict[str, str]:
- """Parse a folder path into its component segments."""
- m = re.match(r"^folders/(?P<folder>.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_organization_path(organization: str,) -> str:
- """Returns a fully-qualified organization string."""
- return "organizations/{organization}".format(organization=organization,)
-
- @staticmethod
- def parse_common_organization_path(path: str) -> Dict[str, str]:
- """Parse a organization path into its component segments."""
- m = re.match(r"^organizations/(?P<organization>.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_project_path(project: str,) -> str:
- """Returns a fully-qualified project string."""
- return "projects/{project}".format(project=project,)
-
- @staticmethod
- def parse_common_project_path(path: str) -> Dict[str, str]:
- """Parse a project path into its component segments."""
- m = re.match(r"^projects/(?P<project>.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_location_path(project: str, location: str,) -> str:
- """Returns a fully-qualified location string."""
- return "projects/{project}/locations/{location}".format(
- project=project, location=location,
- )
-
- @staticmethod
- def parse_common_location_path(path: str) -> Dict[str, str]:
- """Parse a location path into its component segments."""
- m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
- return m.groupdict() if m else {}
-
def __init__(
self,
*,
@@ -268,77 +92,49 @@
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
- if isinstance(client_options, dict):
- client_options = client_options_lib.from_dict(client_options)
- if client_options is None:
- client_options = client_options_lib.ClientOptions()
-
- # Create SSL credentials for mutual TLS if needed.
- use_client_cert = bool(
- util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ super().__init__(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
)
- client_cert_source_func = None
- is_mtls = False
- if use_client_cert:
- if client_options.client_cert_source:
- is_mtls = True
- client_cert_source_func = client_options.client_cert_source
- else:
- is_mtls = mtls.has_default_client_cert_source()
- if is_mtls:
- client_cert_source_func = mtls.default_client_cert_source()
- else:
- client_cert_source_func = None
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
- # Figure out which api endpoint to use.
- if client_options.api_endpoint is not None:
- api_endpoint = client_options.api_endpoint
- else:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
- if use_mtls_env == "never":
- api_endpoint = self.DEFAULT_ENDPOINT
- elif use_mtls_env == "always":
- api_endpoint = self.DEFAULT_MTLS_ENDPOINT
- elif use_mtls_env == "auto":
- if is_mtls:
- api_endpoint = self.DEFAULT_MTLS_ENDPOINT
- else:
- api_endpoint = self.DEFAULT_ENDPOINT
- else:
- raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
- "values: never, auto, always"
- )
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
- # Save or instantiate the transport.
- # Ordinarily, we provide the transport, but allowing a custom transport
- # instance provides an extensibility point for unusual situations.
- if isinstance(transport, OperationsTransport):
- # transport is a OperationsTransport instance.
- if credentials or client_options.credentials_file:
- raise ValueError(
- "When providing a transport instance, "
- "provide its credentials directly."
- )
- if client_options.scopes:
- raise ValueError(
- "When providing a transport instance, provide its scopes "
- "directly."
- )
- self._transport = transport
- else:
- Transport = type(self).get_transport_class(transport)
- self._transport = Transport(
- credentials=credentials,
- credentials_file=client_options.credentials_file,
- host=api_endpoint,
- scopes=client_options.scopes,
- client_cert_source_for_mtls=client_cert_source_func,
- quota_project_id=client_options.quota_project_id,
- client_info=client_info,
- always_use_jwt_access=True,
- )
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
def list_operations(
self,
@@ -349,6 +145,7 @@
page_token: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListOperationsPager:
r"""Lists operations that match the specified filter in the request.
@@ -406,12 +203,21 @@
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListOperationsPager(
- method=rpc, request=request, response=response, metadata=metadata,
+ method=rpc,
+ request=request,
+ response=response,
+ metadata=metadata,
)
# Done; return the response.
@@ -423,6 +229,7 @@
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
@@ -441,7 +248,7 @@
Returns:
google.longrunning.operations_pb2.Operation:
This resource represents a long-
- unning operation that is the result of a
+ running operation that is the result of a
network API call.
"""
@@ -459,7 +266,13 @@
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
# Done; return the response.
return response
@@ -470,6 +283,7 @@
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes a long-running operation. This method indicates that the
@@ -506,7 +320,11 @@
# Send the request.
rpc(
- request, retry=retry, timeout=timeout, metadata=metadata,
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
)
def cancel_operation(
@@ -515,6 +333,7 @@
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
@@ -560,5 +379,9 @@
# Send the request.
rpc(
- request, retry=retry, timeout=timeout, metadata=metadata,
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
)
diff --git a/google/api_core/operations_v1/operations_async_client.py b/google/api_core/operations_v1/operations_async_client.py
index 5a5e556..a60c717 100644
--- a/google/api_core/operations_v1/operations_async_client.py
+++ b/google/api_core/operations_v1/operations_async_client.py
@@ -24,9 +24,12 @@
import functools
+from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1, page_iterator_async
-from google.api_core.operations_v1 import operations_client_config
+from google.api_core import retry_async as retries
+from google.api_core import timeout as timeouts
from google.longrunning import operations_pb2
+from grpc import Compression
class OperationsAsyncClient:
@@ -41,39 +44,50 @@
the default configuration is used.
"""
- def __init__(self, channel, client_config=operations_client_config.config):
+ def __init__(self, channel, client_config=None):
# Create the gRPC client stub with gRPC AsyncIO channel.
self.operations_stub = operations_pb2.OperationsStub(channel)
- # Create all wrapped methods using the interface configuration.
- # The interface config contains all of the default settings for retry
- # and timeout for each RPC method.
- interfaces = client_config["interfaces"]
- interface_config = interfaces["google.longrunning.Operations"]
- method_configs = gapic_v1.config_async.parse_method_configs(interface_config)
+ default_retry = retries.AsyncRetry(
+ initial=0.1, # seconds
+ maximum=60.0, # seconds
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
+ ),
+ timeout=600.0, # seconds
+ )
+ default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0)
+
+ default_compression = Compression.NoCompression
self._get_operation = gapic_v1.method_async.wrap_method(
self.operations_stub.GetOperation,
- default_retry=method_configs["GetOperation"].retry,
- default_timeout=method_configs["GetOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._list_operations = gapic_v1.method_async.wrap_method(
self.operations_stub.ListOperations,
- default_retry=method_configs["ListOperations"].retry,
- default_timeout=method_configs["ListOperations"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._cancel_operation = gapic_v1.method_async.wrap_method(
self.operations_stub.CancelOperation,
- default_retry=method_configs["CancelOperation"].retry,
- default_timeout=method_configs["CancelOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._delete_operation = gapic_v1.method_async.wrap_method(
self.operations_stub.DeleteOperation,
- default_retry=method_configs["DeleteOperation"].retry,
- default_timeout=method_configs["DeleteOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
async def get_operation(
@@ -81,6 +95,7 @@
name,
retry=gapic_v1.method_async.DEFAULT,
timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
metadata=None,
):
"""Gets the latest state of a long-running operation.
@@ -107,6 +122,8 @@
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
metadata (Optional[List[Tuple[str, str]]]):
Additional gRPC metadata.
@@ -126,7 +143,11 @@
metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
return await self._get_operation(
- request, retry=retry, timeout=timeout, metadata=metadata
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
)
async def list_operations(
@@ -135,6 +156,7 @@
filter_,
retry=gapic_v1.method_async.DEFAULT,
timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
metadata=None,
):
"""
@@ -171,6 +193,8 @@
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
metadata.
@@ -195,7 +219,11 @@
# Create the method used to fetch pages
method = functools.partial(
- self._list_operations, retry=retry, timeout=timeout, metadata=metadata
+ self._list_operations,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
)
iterator = page_iterator_async.AsyncGRPCIterator(
@@ -214,6 +242,7 @@
name,
retry=gapic_v1.method_async.DEFAULT,
timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
metadata=None,
):
"""Starts asynchronous cancellation on a long-running operation.
@@ -254,6 +283,8 @@
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
metadata.
"""
@@ -265,7 +296,11 @@
metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
await self._cancel_operation(
- request, retry=retry, timeout=timeout, metadata=metadata
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
)
async def delete_operation(
@@ -273,6 +308,7 @@
name,
retry=gapic_v1.method_async.DEFAULT,
timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
metadata=None,
):
"""Deletes a long-running operation.
@@ -299,6 +335,8 @@
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
metadata.
@@ -318,5 +356,9 @@
metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
await self._delete_operation(
- request, retry=retry, timeout=timeout, metadata=metadata
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
)
diff --git a/google/api_core/operations_v1/operations_client.py b/google/api_core/operations_v1/operations_client.py
index e48eac0..d1d3fd5 100644
--- a/google/api_core/operations_v1/operations_client.py
+++ b/google/api_core/operations_v1/operations_client.py
@@ -37,10 +37,13 @@
import functools
+from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import page_iterator
-from google.api_core.operations_v1 import operations_client_config
+from google.api_core import retry as retries
+from google.api_core import timeout as timeouts
from google.longrunning import operations_pb2
+from grpc import Compression
class OperationsClient(object):
@@ -54,39 +57,50 @@
the default configuration is used.
"""
- def __init__(self, channel, client_config=operations_client_config.config):
+ def __init__(self, channel, client_config=None):
# Create the gRPC client stub.
self.operations_stub = operations_pb2.OperationsStub(channel)
- # Create all wrapped methods using the interface configuration.
- # The interface config contains all of the default settings for retry
- # and timeout for each RPC method.
- interfaces = client_config["interfaces"]
- interface_config = interfaces["google.longrunning.Operations"]
- method_configs = gapic_v1.config.parse_method_configs(interface_config)
+ default_retry = retries.Retry(
+ initial=0.1, # seconds
+ maximum=60.0, # seconds
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.ServiceUnavailable,
+ ),
+ timeout=600.0, # seconds
+ )
+ default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0)
+
+ default_compression = Compression.NoCompression
self._get_operation = gapic_v1.method.wrap_method(
self.operations_stub.GetOperation,
- default_retry=method_configs["GetOperation"].retry,
- default_timeout=method_configs["GetOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._list_operations = gapic_v1.method.wrap_method(
self.operations_stub.ListOperations,
- default_retry=method_configs["ListOperations"].retry,
- default_timeout=method_configs["ListOperations"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._cancel_operation = gapic_v1.method.wrap_method(
self.operations_stub.CancelOperation,
- default_retry=method_configs["CancelOperation"].retry,
- default_timeout=method_configs["CancelOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
self._delete_operation = gapic_v1.method.wrap_method(
self.operations_stub.DeleteOperation,
- default_retry=method_configs["DeleteOperation"].retry,
- default_timeout=method_configs["DeleteOperation"].timeout,
+ default_retry=default_retry,
+ default_timeout=default_timeout,
+ default_compression=default_compression,
)
# Service calls
@@ -95,6 +109,7 @@
name,
retry=gapic_v1.method.DEFAULT,
timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
metadata=None,
):
"""Gets the latest state of a long-running operation.
@@ -121,6 +136,8 @@
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
metadata (Optional[List[Tuple[str, str]]]):
Additional gRPC metadata.
@@ -140,7 +157,11 @@
metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
return self._get_operation(
- request, retry=retry, timeout=timeout, metadata=metadata
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
)
def list_operations(
@@ -149,6 +170,7 @@
filter_,
retry=gapic_v1.method.DEFAULT,
timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
metadata=None,
):
"""
@@ -185,6 +207,8 @@
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
metadata.
@@ -209,7 +233,11 @@
# Create the method used to fetch pages
method = functools.partial(
- self._list_operations, retry=retry, timeout=timeout, metadata=metadata
+ self._list_operations,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
)
iterator = page_iterator.GRPCIterator(
@@ -228,6 +256,7 @@
name,
retry=gapic_v1.method.DEFAULT,
timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
metadata=None,
):
"""Starts asynchronous cancellation on a long-running operation.
@@ -260,6 +289,8 @@
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
metadata.
@@ -278,13 +309,20 @@
metadata = metadata or []
metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
- self._cancel_operation(request, retry=retry, timeout=timeout, metadata=metadata)
+ self._cancel_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
def delete_operation(
self,
name,
retry=gapic_v1.method.DEFAULT,
timeout=gapic_v1.method.DEFAULT,
+ compression=gapic_v1.method.DEFAULT,
metadata=None,
):
"""Deletes a long-running operation.
@@ -311,6 +349,8 @@
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
+ compression (grpc.Compression): An element of grpc.compression
+ e.g. grpc.compression.Gzip.
metadata (Optional[List[Tuple[str, str]]]): Additional gRPC
metadata.
@@ -329,4 +369,10 @@
metadata = metadata or []
metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name}))
- self._delete_operation(request, retry=retry, timeout=timeout, metadata=metadata)
+ self._delete_operation(
+ request,
+ retry=retry,
+ timeout=timeout,
+ compression=compression,
+ metadata=metadata,
+ )
diff --git a/google/api_core/operations_v1/operations_client_config.py b/google/api_core/operations_v1/operations_client_config.py
index 6cf9575..3ad3548 100644
--- a/google/api_core/operations_v1/operations_client_config.py
+++ b/google/api_core/operations_v1/operations_client_config.py
@@ -12,8 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-"""gapic configuration for the googe.longrunning.operations client."""
+"""gapic configuration for the google.longrunning.operations client."""
+# DEPRECATED: retry and timeout classes are instantiated directly
config = {
"interfaces": {
"google.longrunning.Operations": {
diff --git a/google/api_core/operations_v1/operations_rest_client_async.py b/google/api_core/operations_v1/operations_rest_client_async.py
new file mode 100644
index 0000000..7ab0cd3
--- /dev/null
+++ b/google/api_core/operations_v1/operations_rest_client_async.py
@@ -0,0 +1,345 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import Optional, Sequence, Tuple, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core.operations_v1 import pagers_async as pagers
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.abstract_operations_base_client import (
+ AbstractOperationsBaseClient,
+)
+from google.longrunning import operations_pb2
+
+try:
+ from google.auth.aio import credentials as ga_credentials # type: ignore
+except ImportError as e: # pragma: NO COVER
+ raise ImportError(
+ "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running "
+ "`pip install google-api-core[async_rest]`."
+ ) from e
+
+
+class AsyncOperationsRestClient(AbstractOperationsBaseClient):
+ """Manages long-running operations with a REST API service for the asynchronous client.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.aio.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, this defaults to 'rest_asyncio'.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ super().__init__(
+ credentials=credentials, # type: ignore
+ # NOTE: If a transport is not provided, we force the client to use the async
+ # REST transport.
+ transport=transport or "rest_asyncio",
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def get_operation(
+ self,
+ name: str,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Gets the latest state of a long-running operation.
+ Clients can use this method to poll the operation result
+ at intervals as recommended by the API service.
+
+ Args:
+ name (str):
+ The name of the operation resource.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def list_operations(
+ self,
+ name: str,
+ filter_: Optional[str] = None,
+ *,
+ page_size: Optional[int] = None,
+ page_token: Optional[str] = None,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListOperationsAsyncPager:
+ r"""Lists operations that match the specified filter in the request.
+ If the server doesn't support this method, it returns
+ ``UNIMPLEMENTED``.
+
+ NOTE: the ``name`` binding allows API services to override the
+ binding to use different resource name schemes, such as
+ ``users/*/operations``. To override the binding, API services
+ can add a binding such as ``"/v1/{name=users/*}/operations"`` to
+ their service configuration. For backwards compatibility, the
+ default name includes the operations collection id, however
+ overriding users must ensure the name binding is the parent
+ resource, without the operations collection id.
+
+ Args:
+ name (str):
+ The name of the operation's parent
+ resource.
+ filter_ (str):
+ The standard list filter.
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operations_v1.pagers.ListOperationsPager:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create a protobuf request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+ if page_size is not None:
+ request.page_size = page_size
+ if page_token is not None:
+ request.page_token = page_token
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListOperationsAsyncPager(
+ method=rpc,
+ request=request,
+ response=response,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def delete_operation(
+ self,
+ name: str,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a long-running operation. This method indicates that the
+ client is no longer interested in the operation result. It does
+ not cancel the operation. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be deleted.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ async def cancel_operation(
+ self,
+ name: Optional[str] = None,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Starts asynchronous cancellation on a long-running operation.
+ The server makes a best effort to cancel the operation, but
+ success is not guaranteed. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients
+ can use
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]
+ or other methods to check whether the cancellation succeeded or
+ whether the operation completed despite cancellation. On
+ successful cancellation, the operation is not deleted; instead,
+ it becomes an operation with an
+ [Operation.error][google.api_core.operations_v1.Operation.error] value with
+ a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ corresponding to ``Code.CANCELLED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be cancelled.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
diff --git a/google/api_core/operations_v1/pagers.py b/google/api_core/operations_v1/pagers.py
index b8a4775..132f1c6 100644
--- a/google/api_core/operations_v1/pagers.py
+++ b/google/api_core/operations_v1/pagers.py
@@ -14,7 +14,6 @@
# limitations under the License.
#
from typing import (
- Any,
Callable,
Iterator,
Sequence,
@@ -22,9 +21,10 @@
)
from google.longrunning import operations_pb2
+from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase
-class ListOperationsPager:
+class ListOperationsPager(ListOperationsPagerBase):
"""A pager for iterating through ``list_operations`` requests.
This class thinly wraps an initial
@@ -50,25 +50,9 @@
*,
metadata: Sequence[Tuple[str, str]] = ()
):
- """Instantiate the pager.
-
- Args:
- method (Callable): The method that was originally called, and
- which instantiated this pager.
- request (google.longrunning.operations_pb2.ListOperationsRequest):
- The initial request object.
- response (google.longrunning.operations_pb2.ListOperationsResponse):
- The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
- self._method = method
- self._request = request
- self._response = response
- self._metadata = metadata
-
- def __getattr__(self, name: str) -> Any:
- return getattr(self._response, name)
+ super().__init__(
+ method=method, request=request, response=response, metadata=metadata
+ )
@property
def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]:
@@ -81,6 +65,3 @@
def __iter__(self) -> Iterator[operations_pb2.Operation]:
for page in self.pages:
yield from page.operations
-
- def __repr__(self) -> str:
- return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/api_core/operations_v1/pagers_async.py b/google/api_core/operations_v1/pagers_async.py
new file mode 100644
index 0000000..e2909dd
--- /dev/null
+++ b/google/api_core/operations_v1/pagers_async.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Callable,
+ AsyncIterator,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase
+
+
+class ListOperationsAsyncPager(ListOperationsPagerBase):
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ super().__init__(
+ method=method, request=request, response=response, metadata=metadata
+ )
+
+ @property
+ async def pages(self) -> AsyncIterator[operations_pb2.ListOperationsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]:
+ async def async_generator():
+ async for page in self.pages:
+ for operation in page.operations:
+ yield operation
+
+ return async_generator()
diff --git a/google/api_core/operations_v1/pagers_base.py b/google/api_core/operations_v1/pagers_base.py
new file mode 100644
index 0000000..24caf74
--- /dev/null
+++ b/google/api_core/operations_v1/pagers_base.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Any,
+ Callable,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+
+
+class ListOperationsPagerBase:
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.longrunning.operations_pb2.ListOperationsRequest):
+ The initial request object.
+ response (google.longrunning.operations_pb2.ListOperationsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = request
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/api_core/operations_v1/transports/__init__.py b/google/api_core/operations_v1/transports/__init__.py
index b443c07..8c24ce6 100644
--- a/google/api_core/operations_v1/transports/__init__.py
+++ b/google/api_core/operations_v1/transports/__init__.py
@@ -14,17 +14,26 @@
# limitations under the License.
#
from collections import OrderedDict
-from typing import Dict, Type
+from typing import cast, Dict, Tuple
from .base import OperationsTransport
from .rest import OperationsRestTransport
-
# Compile a registry of transports.
-_transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
-_transport_registry["rest"] = OperationsRestTransport
+_transport_registry: Dict[str, OperationsTransport] = OrderedDict()
+_transport_registry["rest"] = cast(OperationsTransport, OperationsRestTransport)
-__all__ = (
- "OperationsTransport",
- "OperationsRestTransport",
-)
+__all__: Tuple[str, ...] = ("OperationsTransport", "OperationsRestTransport")
+
+try:
+ from .rest_asyncio import AsyncOperationsRestTransport
+
+ __all__ += ("AsyncOperationsRestTransport",)
+ _transport_registry["rest_asyncio"] = cast(
+ OperationsTransport, AsyncOperationsRestTransport
+ )
+except ImportError:
+ # This import requires the `async_rest` extra.
+ # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported
+ # as other transports are still available.
+ pass
diff --git a/google/api_core/operations_v1/transports/base.py b/google/api_core/operations_v1/transports/base.py
index 460e646..71764c1 100644
--- a/google/api_core/operations_v1/transports/base.py
+++ b/google/api_core/operations_v1/transports/base.py
@@ -14,28 +14,28 @@
# limitations under the License.
#
import abc
+import re
from typing import Awaitable, Callable, Optional, Sequence, Union
-import pkg_resources
-
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
+from google.api_core import version
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.longrunning import operations_pb2
from google.oauth2 import service_account # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
+import google.protobuf
+from google.protobuf import empty_pb2, json_format # type: ignore
+from grpc import Compression
-try:
- DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
- gapic_version=pkg_resources.get_distribution(
- "google.api_core.operations_v1",
- ).version,
- )
-except pkg_resources.DistributionNotFound:
- DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+PROTOBUF_VERSION = google.protobuf.__version__
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=version.__version__,
+)
class OperationsTransport(abc.ABC):
@@ -49,12 +49,14 @@
self,
*,
host: str = DEFAULT_HOST,
- credentials: ga_credentials.Credentials = None,
+ # TODO(https://github.com/googleapis/python-api-core/issues/709): update type hint for credentials to include `google.auth.aio.Credentials`.
+ credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
+ url_scheme="https",
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -70,6 +72,18 @@
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
@@ -80,10 +94,23 @@
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
"""
+ maybe_url_match = re.match("^(?P<scheme>http(?:s)?://)?(?P<host>.*)$", host)
+ if maybe_url_match is None:
+ raise ValueError(
+ f"Unexpected hostname structure: {host}"
+ ) # pragma: NO COVER
+
+ url_match_items = maybe_url_match.groupdict()
+
+ host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
- host += ":443"
+ host += ":443" # pragma: NO COVER
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
@@ -134,6 +161,7 @@
deadline=10.0,
),
default_timeout=10.0,
+ default_compression=Compression.NoCompression,
client_info=client_info,
),
self.get_operation: gapic_v1.method.wrap_method(
@@ -148,6 +176,7 @@
deadline=10.0,
),
default_timeout=10.0,
+ default_compression=Compression.NoCompression,
client_info=client_info,
),
self.delete_operation: gapic_v1.method.wrap_method(
@@ -162,6 +191,7 @@
deadline=10.0,
),
default_timeout=10.0,
+ default_compression=Compression.NoCompression,
client_info=client_info,
),
self.cancel_operation: gapic_v1.method.wrap_method(
@@ -176,6 +206,7 @@
deadline=10.0,
),
default_timeout=10.0,
+ default_compression=Compression.NoCompression,
client_info=client_info,
),
}
@@ -183,12 +214,43 @@
def close(self):
"""Closes resources associated with the transport.
- .. warning::
- Only call this method if the transport is NOT shared
- with other clients - this may cause errors in other clients!
+ .. warning::
+ Only call this method if the transport is NOT shared
+ with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
+ def _convert_protobuf_message_to_dict(
+ self, message: google.protobuf.message.Message
+ ):
+ r"""Converts protobuf message to a dictionary.
+
+ When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
+
+ Args:
+ message(google.protobuf.message.Message): The protocol buffers message
+ instance to serialize.
+
+ Returns:
+ A dict representation of the protocol buffer message.
+ """
+ # TODO(https://github.com/googleapis/python-api-core/issues/643): For backwards compatibility
+ # with protobuf 3.x 4.x, Remove once support for protobuf 3.x and 4.x is dropped.
+ if PROTOBUF_VERSION[0:2] in ["3.", "4."]:
+ result = json_format.MessageToDict(
+ message,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True, # type: ignore # backward compatibility
+ )
+ else:
+ result = json_format.MessageToDict(
+ message,
+ preserving_proto_field_name=True,
+ always_print_fields_with_no_presence=True,
+ )
+
+ return result
+
@property
def list_operations(
self,
diff --git a/google/api_core/operations_v1/transports/rest.py b/google/api_core/operations_v1/transports/rest.py
index 27ed766..0705c51 100644
--- a/google/api_core/operations_v1/transports/rest.py
+++ b/google/api_core/operations_v1/transports/rest.py
@@ -28,14 +28,19 @@
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import json_format # type: ignore
+import google.protobuf
+
+import grpc
from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport
+PROTOBUF_VERSION = google.protobuf.__version__
+
OptionalRetry = Union[retries.Retry, object]
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
grpc_version=None,
- rest_version=requests_version,
+ rest_version=f"requests@{requests_version}",
)
@@ -64,7 +69,7 @@
self,
*,
host: str = "longrunning.googleapis.com",
- credentials: ga_credentials.Credentials = None,
+ credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
@@ -73,6 +78,7 @@
always_use_jwt_access: Optional[bool] = False,
url_scheme: str = "https",
http_options: Optional[Dict] = None,
+ path_prefix: str = "v1",
) -> None:
"""Instantiate the transport.
@@ -88,6 +94,18 @@
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
@@ -106,8 +124,10 @@
"https", but for testing or local servers,
"http" can be specified.
http_options: a dictionary of http_options for transcoding, to override
- the defaults from operatons.proto. Each method has an entry
+ the defaults from operations.proto. Each method has an entry
with the corresponding http rules as value.
+ path_prefix: path prefix (usually represents API version). Set to
+ "v1" by default.
"""
# Run the base constructor
@@ -125,15 +145,20 @@
)
if client_cert_source_for_mtls:
self._session.configure_mtls_channel(client_cert_source_for_mtls)
+ # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables.
self._prep_wrapped_messages(client_info)
self._http_options = http_options or {}
+ self._path_prefix = path_prefix
def _list_operations(
self,
request: operations_pb2.ListOperationsRequest,
*,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Call the list operations method over HTTP.
@@ -157,18 +182,17 @@
"""
http_options = [
- {"method": "get", "uri": "/v1/{name=operations}"},
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**}}/operations".format(self._path_prefix),
+ },
]
if "google.longrunning.Operations.ListOperations" in self._http_options:
http_options = self._http_options[
"google.longrunning.Operations.ListOperations"
]
- request_kwargs = json_format.MessageToDict(
- request,
- preserving_proto_field_name=True,
- including_default_value_fields=True,
- )
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
transcoded_request = path_template.transcode(http_options, **request_kwargs)
uri = transcoded_request["uri"]
@@ -179,7 +203,6 @@
json_format.ParseDict(transcoded_request["query_params"], query_params_request)
query_params = json_format.MessageToDict(
query_params_request,
- including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
)
@@ -187,8 +210,9 @@
# Send the request
headers = dict(metadata)
headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
response = getattr(self._session, method)(
- "https://{host}{uri}".format(host=self._host, uri=uri),
+ "{host}{uri}".format(host=self._host, uri=uri),
timeout=timeout,
headers=headers,
params=rest_helpers.flatten_query_params(query_params),
@@ -208,8 +232,11 @@
self,
request: operations_pb2.GetOperationRequest,
*,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> operations_pb2.Operation:
r"""Call the get operation method over HTTP.
@@ -228,24 +255,23 @@
Returns:
~.operations_pb2.Operation:
This resource represents a long-
- unning operation that is the result of a
+ running operation that is the result of a
network API call.
"""
http_options = [
- {"method": "get", "uri": "/v1/{name=operations/**}"},
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
]
if "google.longrunning.Operations.GetOperation" in self._http_options:
http_options = self._http_options[
"google.longrunning.Operations.GetOperation"
]
- request_kwargs = json_format.MessageToDict(
- request,
- preserving_proto_field_name=True,
- including_default_value_fields=True,
- )
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
transcoded_request = path_template.transcode(http_options, **request_kwargs)
uri = transcoded_request["uri"]
@@ -256,7 +282,6 @@
json_format.ParseDict(transcoded_request["query_params"], query_params_request)
query_params = json_format.MessageToDict(
query_params_request,
- including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
)
@@ -264,8 +289,9 @@
# Send the request
headers = dict(metadata)
headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
response = getattr(self._session, method)(
- "https://{host}{uri}".format(host=self._host, uri=uri),
+ "{host}{uri}".format(host=self._host, uri=uri),
timeout=timeout,
headers=headers,
params=rest_helpers.flatten_query_params(query_params),
@@ -285,8 +311,11 @@
self,
request: operations_pb2.DeleteOperationRequest,
*,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> empty_pb2.Empty:
r"""Call the delete operation method over HTTP.
@@ -304,18 +333,17 @@
"""
http_options = [
- {"method": "delete", "uri": "/v1/{name=operations/**}"},
+ {
+ "method": "delete",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
]
if "google.longrunning.Operations.DeleteOperation" in self._http_options:
http_options = self._http_options[
"google.longrunning.Operations.DeleteOperation"
]
- request_kwargs = json_format.MessageToDict(
- request,
- preserving_proto_field_name=True,
- including_default_value_fields=True,
- )
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
transcoded_request = path_template.transcode(http_options, **request_kwargs)
uri = transcoded_request["uri"]
@@ -326,7 +354,6 @@
json_format.ParseDict(transcoded_request["query_params"], query_params_request)
query_params = json_format.MessageToDict(
query_params_request,
- including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
)
@@ -334,8 +361,9 @@
# Send the request
headers = dict(metadata)
headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
response = getattr(self._session, method)(
- "https://{host}{uri}".format(host=self._host, uri=uri),
+ "{host}{uri}".format(host=self._host, uri=uri),
timeout=timeout,
headers=headers,
params=rest_helpers.flatten_query_params(query_params),
@@ -352,8 +380,11 @@
self,
request: operations_pb2.CancelOperationRequest,
*,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
+ compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
metadata: Sequence[Tuple[str, str]] = (),
) -> empty_pb2.Empty:
r"""Call the cancel operation method over HTTP.
@@ -371,18 +402,18 @@
"""
http_options = [
- {"method": "post", "uri": "/v1/{name=operations/**}:cancel", "body": "*"},
+ {
+ "method": "post",
+ "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix),
+ "body": "*",
+ },
]
if "google.longrunning.Operations.CancelOperation" in self._http_options:
http_options = self._http_options[
"google.longrunning.Operations.CancelOperation"
]
- request_kwargs = json_format.MessageToDict(
- request,
- preserving_proto_field_name=True,
- including_default_value_fields=True,
- )
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
transcoded_request = path_template.transcode(http_options, **request_kwargs)
# Jsonify the request body
@@ -390,7 +421,6 @@
json_format.ParseDict(transcoded_request["body"], body_request)
body = json_format.MessageToDict(
body_request,
- including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
)
@@ -402,7 +432,6 @@
json_format.ParseDict(transcoded_request["query_params"], query_params_request)
query_params = json_format.MessageToDict(
query_params_request,
- including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
)
@@ -410,8 +439,9 @@
# Send the request
headers = dict(metadata)
headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
response = getattr(self._session, method)(
- "https://{host}{uri}".format(host=self._host, uri=uri),
+ "{host}{uri}".format(host=self._host, uri=uri),
timeout=timeout,
headers=headers,
params=rest_helpers.flatten_query_params(query_params),
diff --git a/google/api_core/operations_v1/transports/rest_asyncio.py b/google/api_core/operations_v1/transports/rest_asyncio.py
new file mode 100644
index 0000000..71c20eb
--- /dev/null
+++ b/google/api_core/operations_v1/transports/rest_asyncio.py
@@ -0,0 +1,560 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import json
+from typing import Any, Callable, Coroutine, Dict, Optional, Sequence, Tuple
+
+from google.auth import __version__ as auth_version
+
+try:
+ from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore
+except ImportError as e: # pragma: NO COVER
+ raise ImportError(
+ "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running "
+ "`pip install google-api-core[async_rest]`."
+ ) from e
+
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import path_template # type: ignore
+from google.api_core import rest_helpers # type: ignore
+from google.api_core import retry_async as retries_async # type: ignore
+from google.auth.aio import credentials as ga_credentials_async # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+from google.protobuf import json_format # type: ignore
+
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+ grpc_version=None,
+ rest_version=f"google-auth@{auth_version}",
+)
+
+
+class AsyncOperationsRestTransport(OperationsTransport):
+ """Asynchronous REST backend transport for Operations.
+
+ Manages async long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "longrunning.googleapis.com",
+ credentials: Optional[ga_credentials_async.Credentials] = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ http_options: Optional[Dict] = None,
+ path_prefix: str = "v1",
+ # TODO(https://github.com/googleapis/python-api-core/issues/715): Add docstring for `credentials_file` to async REST transport.
+ # TODO(https://github.com/googleapis/python-api-core/issues/716): Add docstring for `scopes` to async REST transport.
+ # TODO(https://github.com/googleapis/python-api-core/issues/717): Add docstring for `quota_project_id` to async REST transport.
+ # TODO(https://github.com/googleapis/python-api-core/issues/718): Add docstring for `client_cert_source` to async REST transport.
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.aio.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ http_options: a dictionary of http_options for transcoding, to override
+ the defaults from operations.proto. Each method has an entry
+ with the corresponding http rules as value.
+ path_prefix: path prefix (usually represents API version). Set to
+ "v1" by default.
+
+ """
+ unsupported_params = {
+ # TODO(https://github.com/googleapis/python-api-core/issues/715): Add support for `credentials_file` to async REST transport.
+ "google.api_core.client_options.ClientOptions.credentials_file": credentials_file,
+ # TODO(https://github.com/googleapis/python-api-core/issues/716): Add support for `scopes` to async REST transport.
+ "google.api_core.client_options.ClientOptions.scopes": scopes,
+ # TODO(https://github.com/googleapis/python-api-core/issues/717): Add support for `quota_project_id` to async REST transport.
+ "google.api_core.client_options.ClientOptions.quota_project_id": quota_project_id,
+ # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport.
+ "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls,
+ # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport.
+ "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls,
+ }
+ provided_unsupported_params = [
+ name for name, value in unsupported_params.items() if value is not None
+ ]
+ if provided_unsupported_params:
+ raise core_exceptions.AsyncRestUnsupportedParameterError(
+ f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}"
+ )
+
+ super().__init__(
+ host=host,
+ # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved.
+ credentials=credentials, # type: ignore
+ client_info=client_info,
+ # TODO(https://github.com/googleapis/python-api-core/issues/725): Set always_use_jwt_access token when supported.
+ always_use_jwt_access=False,
+ )
+ # TODO(https://github.com/googleapis/python-api-core/issues/708): add support for
+ # `default_host` in AsyncAuthorizedSession for feature parity with the synchronous
+ # code.
+ # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved.
+ self._session = AsyncAuthorizedSession(self._credentials) # type: ignore
+ # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables.
+ self._prep_wrapped_messages(client_info)
+ self._http_options = http_options or {}
+ self._path_prefix = path_prefix
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_operations: gapic_v1.method_async.wrap_method(
+ self.list_operations,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ self.get_operation: gapic_v1.method_async.wrap_method(
+ self.get_operation,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ self.delete_operation: gapic_v1.method_async.wrap_method(
+ self.delete_operation,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ self.cancel_operation: gapic_v1.method_async.wrap_method(
+ self.cancel_operation,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ }
+
+ async def _list_operations(
+ self,
+ request: operations_pb2.ListOperationsRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.ListOperationsResponse:
+ r"""Asynchronously call the list operations method over HTTP.
+
+ Args:
+ request (~.operations_pb2.ListOperationsRequest):
+ The request object. The request message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.ListOperationsResponse:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**}}/operations".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.ListOperations" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.ListOperations"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.ListOperationsRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+ content = await response.read()
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ # Return the response
+ api_response = operations_pb2.ListOperationsResponse()
+ json_format.Parse(content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ async def _get_operation(
+ self,
+ request: operations_pb2.GetOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Asynchronously call the get operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.GetOperationRequest):
+ The request object. The request message for
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation].
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.GetOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.GetOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.GetOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+ content = await response.read()
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ # Return the response
+ api_response = operations_pb2.Operation()
+ json_format.Parse(content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ async def _delete_operation(
+ self,
+ request: operations_pb2.DeleteOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Asynchronously call the delete operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.DeleteOperationRequest):
+ The request object. The request message for
+ [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "delete",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.DeleteOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.DeleteOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.DeleteOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ content = await response.read()
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ return empty_pb2.Empty()
+
+ async def _cancel_operation(
+ self,
+ request: operations_pb2.CancelOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Add `retry` parameter
+ # to allow configuring retryable error codes.
+ ) -> empty_pb2.Empty:
+ r"""Asynchronously call the cancel operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.CancelOperationRequest):
+ The request object. The request message for
+ [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation].
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "post",
+ "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix),
+ "body": "*",
+ },
+ ]
+ if "google.longrunning.Operations.CancelOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.CancelOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ # Jsonify the request body
+ body_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["body"], body_request)
+ body = json_format.MessageToDict(
+ body_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ data=body,
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ content = await response.read()
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ return empty_pb2.Empty()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest],
+ Coroutine[Any, Any, operations_pb2.ListOperationsResponse],
+ ]:
+ return self._list_operations
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.GetOperationRequest],
+ Coroutine[Any, Any, operations_pb2.Operation],
+ ]:
+ return self._get_operation
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.DeleteOperationRequest], Coroutine[Any, Any, empty_pb2.Empty]
+ ]:
+ return self._delete_operation
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.CancelOperationRequest], Coroutine[Any, Any, empty_pb2.Empty]
+ ]:
+ return self._cancel_operation
+
+
+__all__ = ("AsyncOperationsRestTransport",)
diff --git a/google/api_core/page_iterator.py b/google/api_core/page_iterator.py
index 7ddc5cb..23761ec 100644
--- a/google/api_core/page_iterator.py
+++ b/google/api_core/page_iterator.py
@@ -448,7 +448,7 @@
page_iter (google.gax.PageIterator): A GAX page iterator to be wrapped
to conform to the :class:`Iterator` interface.
item_to_value (Callable[Iterator, Any]): Callable to convert an item
- from the the protobuf response into a native object. Will
+ from the protobuf response into a native object. Will
be called with the iterator and a single item.
max_results (int): The maximum number of results to fetch.
diff --git a/google/api_core/path_template.py b/google/api_core/path_template.py
index 41fbd4f..b8ebb2a 100644
--- a/google/api_core/path_template.py
+++ b/google/api_core/path_template.py
@@ -176,7 +176,7 @@
"""Get the value of a field from a given dictionary.
Args:
- request (dict): A dictionary object.
+ request (dict | Message): A dictionary or a Message object.
field (str): The key to the request in dot notation.
Returns:
@@ -184,10 +184,12 @@
"""
parts = field.split(".")
value = request
+
for part in parts:
if not isinstance(value, dict):
- return
- value = value.get(part)
+ value = getattr(value, part, None)
+ else:
+ value = value.get(part)
if isinstance(value, dict):
return
return value
@@ -197,19 +199,27 @@
"""Delete the value of a field from a given dictionary.
Args:
- request (dict): A dictionary object.
+ request (dict | Message): A dictionary object or a Message.
field (str): The key to the request in dot notation.
"""
parts = deque(field.split("."))
while len(parts) > 1:
- if not isinstance(request, dict):
- return
part = parts.popleft()
- request = request.get(part)
+ if not isinstance(request, dict):
+ if hasattr(request, part):
+ request = getattr(request, part, None)
+ else:
+ return
+ else:
+ request = request.get(part)
part = parts.popleft()
if not isinstance(request, dict):
- return
- request.pop(part, None)
+ if hasattr(request, part):
+ request.ClearField(part)
+ else:
+ return
+ else:
+ request.pop(part, None)
def validate(tmpl, path):
@@ -237,59 +247,72 @@
return True if re.match(pattern, path) is not None else False
-def transcode(http_options, **request_kwargs):
+def transcode(http_options, message=None, **request_kwargs):
"""Transcodes a grpc request pattern into a proper HTTP request following the rules outlined here,
- https://github.com/googleapis/googleapis/blob/master/google/api/http.proto#L44-L312
+ https://github.com/googleapis/googleapis/blob/master/google/api/http.proto#L44-L312
- Args:
- http_options (list(dict)): A list of dicts which consist of these keys,
- 'method' (str): The http method
- 'uri' (str): The path template
- 'body' (str): The body field name (optional)
- (This is a simplified representation of the proto option `google.api.http`)
+ Args:
+ http_options (list(dict)): A list of dicts which consist of these keys,
+ 'method' (str): The http method
+ 'uri' (str): The path template
+ 'body' (str): The body field name (optional)
+ (This is a simplified representation of the proto option `google.api.http`)
- request_kwargs (dict) : A dict representing the request object
+ message (Message) : A request object (optional)
+ request_kwargs (dict) : A dict representing the request object
- Returns:
- dict: The transcoded request with these keys,
- 'method' (str) : The http method
- 'uri' (str) : The expanded uri
- 'body' (dict) : A dict representing the body (optional)
- 'query_params' (dict) : A dict mapping query parameter variables and values
+ Returns:
+ dict: The transcoded request with these keys,
+ 'method' (str) : The http method
+ 'uri' (str) : The expanded uri
+ 'body' (dict | Message) : A dict or a Message representing the body (optional)
+ 'query_params' (dict | Message) : A dict or Message mapping query parameter variables and values
- Raises:
- ValueError: If the request does not match the given template.
+ Raises:
+ ValueError: If the request does not match the given template.
"""
+ transcoded_value = message or request_kwargs
+ bindings = []
for http_option in http_options:
request = {}
# Assign path
uri_template = http_option["uri"]
- path_fields = [
- match.group("name") for match in _VARIABLE_RE.finditer(uri_template)
+ fields = [
+ (m.group("name"), m.group("template"))
+ for m in _VARIABLE_RE.finditer(uri_template)
]
- path_args = {field: get_field(request_kwargs, field) for field in path_fields}
- request["uri"] = expand(uri_template, **path_args)
+ bindings.append((uri_template, fields))
- # Remove fields used in uri path from request
- leftovers = copy.deepcopy(request_kwargs)
- for path_field in path_fields:
- delete_field(leftovers, path_field)
+ path_args = {field: get_field(transcoded_value, field) for field, _ in fields}
+ request["uri"] = expand(uri_template, **path_args)
if not validate(uri_template, request["uri"]) or not all(path_args.values()):
continue
+ # Remove fields used in uri path from request
+ leftovers = copy.deepcopy(transcoded_value)
+ for path_field, _ in fields:
+ delete_field(leftovers, path_field)
+
# Assign body and query params
body = http_option.get("body")
if body:
if body == "*":
request["body"] = leftovers
- request["query_params"] = {}
+ if message:
+ request["query_params"] = message.__class__()
+ else:
+ request["query_params"] = {}
else:
try:
- request["body"] = leftovers.pop(body)
- except KeyError:
+ if message:
+ request["body"] = getattr(leftovers, body)
+ delete_field(leftovers, body)
+ else:
+ request["body"] = leftovers.pop(body)
+ except (KeyError, AttributeError):
continue
request["query_params"] = leftovers
else:
@@ -297,4 +320,27 @@
request["method"] = http_option["method"]
return request
- raise ValueError("Request obj does not match any template")
+ bindings_description = [
+ '\n\tURI: "{}"'
+ "\n\tRequired request fields:\n\t\t{}".format(
+ uri,
+ "\n\t\t".join(
+ [
+ 'field: "{}", pattern: "{}"'.format(n, p if p else "*")
+ for n, p in fields
+ ]
+ ),
+ )
+ for uri, fields in bindings
+ ]
+
+ raise ValueError(
+ "Invalid request."
+ "\nSome of the fields of the request message are either not initialized or "
+ "initialized with an invalid value."
+ "\nPlease make sure your request matches at least one accepted HTTP binding."
+ "\nTo match a binding the request message must have all the required fields "
+ "initialized with values matching their patterns as listed below:{}".format(
+ "\n".join(bindings_description)
+ )
+ )
diff --git a/google/api_core/protobuf_helpers.py b/google/api_core/protobuf_helpers.py
index 896e89c..30cd7c8 100644
--- a/google/api_core/protobuf_helpers.py
+++ b/google/api_core/protobuf_helpers.py
@@ -63,9 +63,7 @@
# Unpack the Any object and populate the protobuf message instance.
if not any_pb.Unpack(msg_pb):
raise TypeError(
- "Could not convert {} to {}".format(
- any_pb.__class__.__name__, pb_type.__name__
- )
+ f"Could not convert `{any_pb.TypeName()}` with underlying type `google.protobuf.any_pb2.Any` to `{msg_pb.DESCRIPTOR.full_name}`"
)
# Done; return the message.
@@ -288,10 +286,10 @@
Args:
original (~google.protobuf.message.Message): the original message.
- If set to None, this field will be interpretted as an empty
+ If set to None, this field will be interpreted as an empty
message.
modified (~google.protobuf.message.Message): the modified message.
- If set to None, this field will be interpretted as an empty
+ If set to None, this field will be interpreted as an empty
message.
Returns:
@@ -313,7 +311,7 @@
modified = copy.deepcopy(original)
modified.Clear()
- if type(original) != type(modified):
+ if not isinstance(original, type(modified)):
raise ValueError(
"expected that both original and modified should be of the "
'same type, received "{!r}" and "{!r}".'.format(
diff --git a/google/api_core/rest_helpers.py b/google/api_core/rest_helpers.py
index 23fb614..a78822f 100644
--- a/google/api_core/rest_helpers.py
+++ b/google/api_core/rest_helpers.py
@@ -18,8 +18,8 @@
import operator
-def flatten_query_params(obj):
- """Flatten a nested dict into a list of (name,value) tuples.
+def flatten_query_params(obj, strict=False):
+ """Flatten a dict into a list of (name,value) tuples.
The result is suitable for setting query params on an http request.
@@ -28,9 +28,10 @@
>>> obj = {'a':
... {'b':
... {'c': ['x', 'y', 'z']} },
- ... 'd': 'uvw', }
- >>> flatten_query_params(obj)
- [('a.b.c', 'x'), ('a.b.c', 'y'), ('a.b.c', 'z'), ('d', 'uvw')]
+ ... 'd': 'uvw',
+ ... 'e': True, }
+ >>> flatten_query_params(obj, strict=True)
+ [('a.b.c', 'x'), ('a.b.c', 'y'), ('a.b.c', 'z'), ('d', 'uvw'), ('e', 'true')]
Note that, as described in
https://github.com/googleapis/googleapis/blob/48d9fb8c8e287c472af500221c6450ecd45d7d39/google/api/http.proto#L117,
@@ -38,7 +39,9 @@
This is enforced in this function.
Args:
- obj: a nested dictionary (from json), or None
+ obj: a possibly nested dictionary (from json), or None
+ strict: a bool, defaulting to False, to enforce that all values in the
+ result tuples be strings and, if boolean, lower-cased.
Returns: a list of tuples, with each tuple having a (possibly) multi-part name
and a scalar value.
@@ -51,17 +54,17 @@
if obj is not None and not isinstance(obj, dict):
raise TypeError("flatten_query_params must be called with dict object")
- return _flatten(obj, key_path=[])
+ return _flatten(obj, key_path=[], strict=strict)
-def _flatten(obj, key_path):
+def _flatten(obj, key_path, strict=False):
if obj is None:
return []
if isinstance(obj, dict):
- return _flatten_dict(obj, key_path=key_path)
+ return _flatten_dict(obj, key_path=key_path, strict=strict)
if isinstance(obj, list):
- return _flatten_list(obj, key_path=key_path)
- return _flatten_value(obj, key_path=key_path)
+ return _flatten_list(obj, key_path=key_path, strict=strict)
+ return _flatten_value(obj, key_path=key_path, strict=strict)
def _is_primitive_value(obj):
@@ -74,21 +77,33 @@
return True
-def _flatten_value(obj, key_path):
- return [(".".join(key_path), obj)]
+def _flatten_value(obj, key_path, strict=False):
+ return [(".".join(key_path), _canonicalize(obj, strict=strict))]
-def _flatten_dict(obj, key_path):
- items = (_flatten(value, key_path=key_path + [key]) for key, value in obj.items())
+def _flatten_dict(obj, key_path, strict=False):
+ items = (
+ _flatten(value, key_path=key_path + [key], strict=strict)
+ for key, value in obj.items()
+ )
return functools.reduce(operator.concat, items, [])
-def _flatten_list(elems, key_path):
+def _flatten_list(elems, key_path, strict=False):
# Only lists of scalar values are supported.
# The name (key_path) is repeated for each value.
items = (
- _flatten_value(elem, key_path=key_path)
+ _flatten_value(elem, key_path=key_path, strict=strict)
for elem in elems
if _is_primitive_value(elem)
)
return functools.reduce(operator.concat, items, [])
+
+
+def _canonicalize(obj, strict=False):
+ if strict:
+ value = str(obj)
+ if isinstance(obj, bool):
+ value = value.lower()
+ return value
+ return obj
diff --git a/google/api_core/rest_streaming.py b/google/api_core/rest_streaming.py
new file mode 100644
index 0000000..84aa270
--- /dev/null
+++ b/google/api_core/rest_streaming.py
@@ -0,0 +1,66 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for server-side streaming in REST."""
+
+from typing import Union
+
+import proto
+import requests
+import google.protobuf.message
+from google.api_core._rest_streaming_base import BaseResponseIterator
+
+
+class ResponseIterator(BaseResponseIterator):
+ """Iterator over REST API responses.
+
+ Args:
+ response (requests.Response): An API response object.
+ response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response
+ class expected to be returned from an API.
+
+ Raises:
+ ValueError:
+ - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`.
+ """
+
+ def __init__(
+ self,
+ response: requests.Response,
+ response_message_cls: Union[proto.Message, google.protobuf.message.Message],
+ ):
+ self._response = response
+ # Inner iterator over HTTP response's content.
+ self._response_itr = self._response.iter_content(decode_unicode=True)
+ super(ResponseIterator, self).__init__(
+ response_message_cls=response_message_cls
+ )
+
+ def cancel(self):
+ """Cancel existing streaming operation."""
+ self._response.close()
+
+ def __next__(self):
+ while not self._ready_objs:
+ try:
+ chunk = next(self._response_itr)
+ self._process_chunk(chunk)
+ except StopIteration as e:
+ if self._level > 0:
+ raise ValueError("Unfinished stream: %s" % self._obj)
+ raise e
+ return self._grab()
+
+ def __iter__(self):
+ return self
diff --git a/google/api_core/rest_streaming_async.py b/google/api_core/rest_streaming_async.py
new file mode 100644
index 0000000..370c2b5
--- /dev/null
+++ b/google/api_core/rest_streaming_async.py
@@ -0,0 +1,89 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for asynchronous server-side streaming in REST."""
+
+from typing import Union
+
+import proto
+
+try:
+ import google.auth.aio.transport
+except ImportError as e: # pragma: NO COVER
+ raise ImportError(
+ "`google-api-core[async_rest]` is required to use asynchronous rest streaming. "
+ "Install the `async_rest` extra of `google-api-core` using "
+ "`pip install google-api-core[async_rest]`."
+ ) from e
+
+import google.protobuf.message
+from google.api_core._rest_streaming_base import BaseResponseIterator
+
+
+class AsyncResponseIterator(BaseResponseIterator):
+ """Asynchronous Iterator over REST API responses.
+
+ Args:
+ response (google.auth.aio.transport.Response): An API response object.
+ response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response
+ class expected to be returned from an API.
+
+ Raises:
+ ValueError:
+ - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`.
+ """
+
+ def __init__(
+ self,
+ response: google.auth.aio.transport.Response,
+ response_message_cls: Union[proto.Message, google.protobuf.message.Message],
+ ):
+ self._response = response
+ self._chunk_size = 1024
+ # TODO(https://github.com/googleapis/python-api-core/issues/703): mypy does not recognize the abstract content
+ # method as an async generator as it looks for the `yield` keyword in the implementation.
+ # Given that the abstract method is not implemented, mypy fails to recognize it as an async generator.
+ # mypy warnings are silenced until the linked issue is resolved.
+ self._response_itr = self._response.content(self._chunk_size).__aiter__() # type: ignore
+ super(AsyncResponseIterator, self).__init__(
+ response_message_cls=response_message_cls
+ )
+
+ async def __aenter__(self):
+ return self
+
+ async def cancel(self):
+ """Cancel existing streaming operation."""
+ await self._response.close()
+
+ async def __anext__(self):
+ while not self._ready_objs:
+ try:
+ chunk = await self._response_itr.__anext__()
+ chunk = chunk.decode("utf-8")
+ self._process_chunk(chunk)
+ except StopAsyncIteration as e:
+ if self._level > 0:
+ raise ValueError("i Unfinished stream: %s" % self._obj)
+ raise e
+ except ValueError as e:
+ raise e
+ return self._grab()
+
+ def __aiter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc, tb):
+ """Cancel existing async streaming operation."""
+ await self._response.close()
diff --git a/google/api_core/retry.py b/google/api_core/retry.py
deleted file mode 100644
index bd3a4a6..0000000
--- a/google/api_core/retry.py
+++ /dev/null
@@ -1,366 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helpers for retrying functions with exponential back-off.
-
-The :class:`Retry` decorator can be used to retry functions that raise
-exceptions using exponential backoff. Because a exponential sleep algorithm is
-used, the retry is limited by a `deadline`. The deadline is the maxmimum amount
-of time a method can block. This is used instead of total number of retries
-because it is difficult to ascertain the amount of time a function can block
-when using total number of retries and exponential backoff.
-
-By default, this decorator will retry transient
-API errors (see :func:`if_transient_error`). For example:
-
-.. code-block:: python
-
- @retry.Retry()
- def call_flaky_rpc():
- return client.flaky_rpc()
-
- # Will retry flaky_rpc() if it raises transient API errors.
- result = call_flaky_rpc()
-
-You can pass a custom predicate to retry on different exceptions, such as
-waiting for an eventually consistent item to be available:
-
-.. code-block:: python
-
- @retry.Retry(predicate=if_exception_type(exceptions.NotFound))
- def check_if_exists():
- return client.does_thing_exist()
-
- is_available = check_if_exists()
-
-Some client library methods apply retry automatically. These methods can accept
-a ``retry`` parameter that allows you to configure the behavior:
-
-.. code-block:: python
-
- my_retry = retry.Retry(deadline=60)
- result = client.some_method(retry=my_retry)
-
-"""
-
-from __future__ import unicode_literals
-
-import datetime
-import functools
-import logging
-import random
-import time
-
-import requests.exceptions
-
-from google.api_core import datetime_helpers
-from google.api_core import exceptions
-from google.auth import exceptions as auth_exceptions
-
-_LOGGER = logging.getLogger(__name__)
-_DEFAULT_INITIAL_DELAY = 1.0 # seconds
-_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
-_DEFAULT_DELAY_MULTIPLIER = 2.0
-_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
-
-
-def if_exception_type(*exception_types):
- """Creates a predicate to check if the exception is of a given type.
-
- Args:
- exception_types (Sequence[:func:`type`]): The exception types to check
- for.
-
- Returns:
- Callable[Exception]: A predicate that returns True if the provided
- exception is of the given type(s).
- """
-
- def if_exception_type_predicate(exception):
- """Bound predicate for checking an exception type."""
- return isinstance(exception, exception_types)
-
- return if_exception_type_predicate
-
-
-# pylint: disable=invalid-name
-# Pylint sees this as a constant, but it is also an alias that should be
-# considered a function.
-if_transient_error = if_exception_type(
- exceptions.InternalServerError,
- exceptions.TooManyRequests,
- exceptions.ServiceUnavailable,
- requests.exceptions.ConnectionError,
- requests.exceptions.ChunkedEncodingError,
- auth_exceptions.TransportError,
-)
-"""A predicate that checks if an exception is a transient API error.
-
-The following server errors are considered transient:
-
-- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC
- ``INTERNAL(13)`` and its subclasses.
-- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429
-- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503
-- :class:`requests.exceptions.ConnectionError`
-- :class:`requests.exceptions.ChunkedEncodingError` - The server declared
- chunked encoding but sent an invalid chunk.
-- :class:`google.auth.exceptions.TransportError` - Used to indicate an
- error occurred during an HTTP request.
-"""
-# pylint: enable=invalid-name
-
-
-def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER):
- """Generates sleep intervals based on the exponential back-off algorithm.
-
- This implements the `Truncated Exponential Back-off`_ algorithm.
-
- .. _Truncated Exponential Back-off:
- https://cloud.google.com/storage/docs/exponential-backoff
-
- Args:
- initial (float): The minimum amount of time to delay. This must
- be greater than 0.
- maximum (float): The maximum amount of time to delay.
- multiplier (float): The multiplier applied to the delay.
-
- Yields:
- float: successive sleep intervals.
- """
- delay = initial
- while True:
- # Introduce jitter by yielding a delay that is uniformly distributed
- # to average out to the delay time.
- yield min(random.uniform(0.0, delay * 2.0), maximum)
- delay = delay * multiplier
-
-
-def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
- """Call a function and retry if it fails.
-
- This is the lowest-level retry helper. Generally, you'll use the
- higher-level retry helper :class:`Retry`.
-
- Args:
- target(Callable): The function to call and retry. This must be a
- nullary function - apply arguments with `functools.partial`.
- predicate (Callable[Exception]): A callable used to determine if an
- exception raised by the target should be considered retryable.
- It should return True to retry or False otherwise.
- sleep_generator (Iterable[float]): An infinite iterator that determines
- how long to sleep between retries.
- deadline (float): How long to keep retrying the target. The last sleep
- period is shortened as necessary, so that the last retry runs at
- ``deadline`` (and not considerably beyond it).
- on_error (Callable[Exception]): A function to call while processing a
- retryable exception. Any error raised by this function will *not*
- be caught.
-
- Returns:
- Any: the return value of the target function.
-
- Raises:
- google.api_core.RetryError: If the deadline is exceeded while retrying.
- ValueError: If the sleep generator stops yielding values.
- Exception: If the target raises a method that isn't retryable.
- """
- if deadline is not None:
- deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta(
- seconds=deadline
- )
- else:
- deadline_datetime = None
-
- last_exc = None
-
- for sleep in sleep_generator:
- try:
- return target()
-
- # pylint: disable=broad-except
- # This function explicitly must deal with broad exceptions.
- except Exception as exc:
- if not predicate(exc):
- raise
- last_exc = exc
- if on_error is not None:
- on_error(exc)
-
- now = datetime_helpers.utcnow()
-
- if deadline_datetime is not None:
- if deadline_datetime <= now:
- raise exceptions.RetryError(
- "Deadline of {:.1f}s exceeded while calling {}".format(
- deadline, target
- ),
- last_exc,
- ) from last_exc
- else:
- time_to_deadline = (deadline_datetime - now).total_seconds()
- sleep = min(time_to_deadline, sleep)
-
- _LOGGER.debug(
- "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep)
- )
- time.sleep(sleep)
-
- raise ValueError("Sleep generator stopped yielding sleep values.")
-
-
-class Retry(object):
- """Exponential retry decorator.
-
- This class is a decorator used to add exponential back-off retry behavior
- to an RPC call.
-
- Although the default behavior is to retry transient API errors, a
- different predicate can be provided to retry other exceptions.
-
- Args:
- predicate (Callable[Exception]): A callable that should return ``True``
- if the given exception is retryable.
- initial (float): The minimum amount of time to delay in seconds. This
- must be greater than 0.
- maximum (float): The maximum amount of time to delay in seconds.
- multiplier (float): The multiplier applied to the delay.
- deadline (float): How long to keep retrying in seconds. The last sleep
- period is shortened as necessary, so that the last retry runs at
- ``deadline`` (and not considerably beyond it).
- """
-
- def __init__(
- self,
- predicate=if_transient_error,
- initial=_DEFAULT_INITIAL_DELAY,
- maximum=_DEFAULT_MAXIMUM_DELAY,
- multiplier=_DEFAULT_DELAY_MULTIPLIER,
- deadline=_DEFAULT_DEADLINE,
- on_error=None,
- ):
- self._predicate = predicate
- self._initial = initial
- self._multiplier = multiplier
- self._maximum = maximum
- self._deadline = deadline
- self._on_error = on_error
-
- def __call__(self, func, on_error=None):
- """Wrap a callable with retry behavior.
-
- Args:
- func (Callable): The callable to add retry behavior to.
- on_error (Callable[Exception]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
-
- Returns:
- Callable: A callable that will invoke ``func`` with retry
- behavior.
- """
- if self._on_error is not None:
- on_error = self._on_error
-
- @functools.wraps(func)
- def retry_wrapped_func(*args, **kwargs):
- """A wrapper that calls target function with retry."""
- target = functools.partial(func, *args, **kwargs)
- sleep_generator = exponential_sleep_generator(
- self._initial, self._maximum, multiplier=self._multiplier
- )
- return retry_target(
- target,
- self._predicate,
- sleep_generator,
- self._deadline,
- on_error=on_error,
- )
-
- return retry_wrapped_func
-
- @property
- def deadline(self):
- return self._deadline
-
- def with_deadline(self, deadline):
- """Return a copy of this retry with the given deadline.
-
- Args:
- deadline (float): How long to keep retrying.
-
- Returns:
- Retry: A new retry instance with the given deadline.
- """
- return Retry(
- predicate=self._predicate,
- initial=self._initial,
- maximum=self._maximum,
- multiplier=self._multiplier,
- deadline=deadline,
- on_error=self._on_error,
- )
-
- def with_predicate(self, predicate):
- """Return a copy of this retry with the given predicate.
-
- Args:
- predicate (Callable[Exception]): A callable that should return
- ``True`` if the given exception is retryable.
-
- Returns:
- Retry: A new retry instance with the given predicate.
- """
- return Retry(
- predicate=predicate,
- initial=self._initial,
- maximum=self._maximum,
- multiplier=self._multiplier,
- deadline=self._deadline,
- on_error=self._on_error,
- )
-
- def with_delay(self, initial=None, maximum=None, multiplier=None):
- """Return a copy of this retry with the given delay options.
-
- Args:
- initial (float): The minimum amount of time to delay. This must
- be greater than 0.
- maximum (float): The maximum amount of time to delay.
- multiplier (float): The multiplier applied to the delay.
-
- Returns:
- Retry: A new retry instance with the given predicate.
- """
- return Retry(
- predicate=self._predicate,
- initial=initial if initial is not None else self._initial,
- maximum=maximum if maximum is not None else self._maximum,
- multiplier=multiplier if multiplier is not None else self._multiplier,
- deadline=self._deadline,
- on_error=self._on_error,
- )
-
- def __str__(self):
- return (
- "<Retry predicate={}, initial={:.1f}, maximum={:.1f}, "
- "multiplier={:.1f}, deadline={:.1f}, on_error={}>".format(
- self._predicate,
- self._initial,
- self._maximum,
- self._multiplier,
- self._deadline,
- self._on_error,
- )
- )
diff --git a/google/api_core/retry/__init__.py b/google/api_core/retry/__init__.py
new file mode 100644
index 0000000..1724fdb
--- /dev/null
+++ b/google/api_core/retry/__init__.py
@@ -0,0 +1,52 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Retry implementation for Google API client libraries."""
+
+from .retry_base import exponential_sleep_generator
+from .retry_base import if_exception_type
+from .retry_base import if_transient_error
+from .retry_base import build_retry_error
+from .retry_base import RetryFailureReason
+from .retry_unary import Retry
+from .retry_unary import retry_target
+from .retry_unary_async import AsyncRetry
+from .retry_unary_async import retry_target as retry_target_async
+from .retry_streaming import StreamingRetry
+from .retry_streaming import retry_target_stream
+from .retry_streaming_async import AsyncStreamingRetry
+from .retry_streaming_async import retry_target_stream as retry_target_stream_async
+
+# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry.py
+#
+# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576)
+from google.api_core import datetime_helpers # noqa: F401
+from google.api_core import exceptions # noqa: F401
+from google.auth import exceptions as auth_exceptions # noqa: F401
+
+__all__ = (
+ "exponential_sleep_generator",
+ "if_exception_type",
+ "if_transient_error",
+ "build_retry_error",
+ "RetryFailureReason",
+ "Retry",
+ "AsyncRetry",
+ "StreamingRetry",
+ "AsyncStreamingRetry",
+ "retry_target",
+ "retry_target_async",
+ "retry_target_stream",
+ "retry_target_stream_async",
+)
diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py
new file mode 100644
index 0000000..1606e0f
--- /dev/null
+++ b/google/api_core/retry/retry_base.py
@@ -0,0 +1,361 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared classes and functions for retrying requests.
+
+:class:`_BaseRetry` is the base class for :class:`Retry`,
+:class:`AsyncRetry`, :class:`StreamingRetry`, and :class:`AsyncStreamingRetry`.
+"""
+
+from __future__ import annotations
+
+import logging
+import random
+import time
+
+from enum import Enum
+from typing import Any, Callable, Optional, TYPE_CHECKING
+
+import requests.exceptions
+
+from google.api_core import exceptions
+from google.auth import exceptions as auth_exceptions
+
+if TYPE_CHECKING:
+ import sys
+
+ if sys.version_info >= (3, 11):
+ from typing import Self
+ else:
+ from typing_extensions import Self
+
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+
+_LOGGER = logging.getLogger("google.api_core.retry")
+
+
+def if_exception_type(
+ *exception_types: type[Exception],
+) -> Callable[[Exception], bool]:
+ """Creates a predicate to check if the exception is of a given type.
+
+ Args:
+ exception_types (Sequence[:func:`type`]): The exception types to check
+ for.
+
+ Returns:
+ Callable[Exception]: A predicate that returns True if the provided
+ exception is of the given type(s).
+ """
+
+ def if_exception_type_predicate(exception: Exception) -> bool:
+ """Bound predicate for checking an exception type."""
+ return isinstance(exception, exception_types)
+
+ return if_exception_type_predicate
+
+
+# pylint: disable=invalid-name
+# Pylint sees this as a constant, but it is also an alias that should be
+# considered a function.
+if_transient_error = if_exception_type(
+ exceptions.InternalServerError,
+ exceptions.TooManyRequests,
+ exceptions.ServiceUnavailable,
+ requests.exceptions.ConnectionError,
+ requests.exceptions.ChunkedEncodingError,
+ auth_exceptions.TransportError,
+)
+"""A predicate that checks if an exception is a transient API error.
+
+The following server errors are considered transient:
+
+- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC
+ ``INTERNAL(13)`` and its subclasses.
+- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429
+- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503
+- :class:`requests.exceptions.ConnectionError`
+- :class:`requests.exceptions.ChunkedEncodingError` - The server declared
+ chunked encoding but sent an invalid chunk.
+- :class:`google.auth.exceptions.TransportError` - Used to indicate an
+ error occurred during an HTTP request.
+"""
+# pylint: enable=invalid-name
+
+
+def exponential_sleep_generator(
+ initial: float, maximum: float, multiplier: float = _DEFAULT_DELAY_MULTIPLIER
+):
+ """Generates sleep intervals based on the exponential back-off algorithm.
+
+ This implements the `Truncated Exponential Back-off`_ algorithm.
+
+ .. _Truncated Exponential Back-off:
+ https://cloud.google.com/storage/docs/exponential-backoff
+
+ Args:
+ initial (float): The minimum amount of time to delay. This must
+ be greater than 0.
+ maximum (float): The maximum amount of time to delay.
+ multiplier (float): The multiplier applied to the delay.
+
+ Yields:
+ float: successive sleep intervals.
+ """
+ max_delay = min(initial, maximum)
+ while True:
+ yield random.uniform(0.0, max_delay)
+ max_delay = min(max_delay * multiplier, maximum)
+
+
+class RetryFailureReason(Enum):
+ """
+ The cause of a failed retry, used when building exceptions
+ """
+
+ TIMEOUT = 0
+ NON_RETRYABLE_ERROR = 1
+
+
+def build_retry_error(
+ exc_list: list[Exception],
+ reason: RetryFailureReason,
+ timeout_val: float | None,
+ **kwargs: Any,
+) -> tuple[Exception, Exception | None]:
+ """
+ Default exception_factory implementation.
+
+ Returns a RetryError if the failure is due to a timeout, otherwise
+ returns the last exception encountered.
+
+ Args:
+ - exc_list: list of exceptions that occurred during the retry
+ - reason: reason for the retry failure.
+ Can be TIMEOUT or NON_RETRYABLE_ERROR
+ - timeout_val: the original timeout value for the retry (in seconds), for use in the exception message
+
+ Returns:
+ - tuple: a tuple of the exception to be raised, and the cause exception if any
+ """
+ if reason == RetryFailureReason.TIMEOUT:
+ # return RetryError with the most recent exception as the cause
+ src_exc = exc_list[-1] if exc_list else None
+ timeout_val_str = f"of {timeout_val:0.1f}s " if timeout_val is not None else ""
+ return (
+ exceptions.RetryError(
+ f"Timeout {timeout_val_str}exceeded",
+ src_exc,
+ ),
+ src_exc,
+ )
+ elif exc_list:
+ # return most recent exception encountered
+ return exc_list[-1], None
+ else:
+ # no exceptions were given in exc_list. Raise generic RetryError
+ return exceptions.RetryError("Unknown error", None), None
+
+
+def _retry_error_helper(
+ exc: Exception,
+ deadline: float | None,
+ next_sleep: float,
+ error_list: list[Exception],
+ predicate_fn: Callable[[Exception], bool],
+ on_error_fn: Callable[[Exception], None] | None,
+ exc_factory_fn: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ],
+ original_timeout: float | None,
+):
+ """
+ Shared logic for handling an error for all retry implementations
+
+ - Raises an error on timeout or non-retryable error
+ - Calls on_error_fn if provided
+ - Logs the error
+
+ Args:
+ - exc: the exception that was raised
+ - deadline: the deadline for the retry, calculated as a diff from time.monotonic()
+ - next_sleep: the next sleep interval
+ - error_list: the list of exceptions that have been raised so far
+ - predicate_fn: takes `exc` and returns true if the operation should be retried
+ - on_error_fn: callback to execute when a retryable error occurs
+ - exc_factory_fn: callback used to build the exception to be raised on terminal failure
+ - original_timeout_val: the original timeout value for the retry (in seconds),
+ to be passed to the exception factory for building an error message
+ """
+ error_list.append(exc)
+ if not predicate_fn(exc):
+ final_exc, source_exc = exc_factory_fn(
+ error_list,
+ RetryFailureReason.NON_RETRYABLE_ERROR,
+ original_timeout,
+ )
+ raise final_exc from source_exc
+ if on_error_fn is not None:
+ on_error_fn(exc)
+ if deadline is not None and time.monotonic() + next_sleep > deadline:
+ final_exc, source_exc = exc_factory_fn(
+ error_list,
+ RetryFailureReason.TIMEOUT,
+ original_timeout,
+ )
+ raise final_exc from source_exc
+ _LOGGER.debug(
+ "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], next_sleep)
+ )
+
+
+class _BaseRetry(object):
+ """
+ Base class for retry configuration objects. This class is intended to capture retry
+ and backoff configuration that is common to both synchronous and asynchronous retries,
+ for both unary and streaming RPCs. It is not intended to be instantiated directly,
+ but rather to be subclassed by the various retry configuration classes.
+ """
+
+ def __init__(
+ self,
+ predicate: Callable[[Exception], bool] = if_transient_error,
+ initial: float = _DEFAULT_INITIAL_DELAY,
+ maximum: float = _DEFAULT_MAXIMUM_DELAY,
+ multiplier: float = _DEFAULT_DELAY_MULTIPLIER,
+ timeout: Optional[float] = _DEFAULT_DEADLINE,
+ on_error: Optional[Callable[[Exception], Any]] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._predicate = predicate
+ self._initial = initial
+ self._multiplier = multiplier
+ self._maximum = maximum
+ self._timeout = kwargs.get("deadline", timeout)
+ self._deadline = self._timeout
+ self._on_error = on_error
+
+ def __call__(self, *args, **kwargs) -> Any:
+ raise NotImplementedError("Not implemented in base class")
+
+ @property
+ def deadline(self) -> float | None:
+ """
+ DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class
+ documentation for details.
+ """
+ return self._timeout
+
+ @property
+ def timeout(self) -> float | None:
+ return self._timeout
+
+ def with_deadline(self, deadline: float | None) -> Self:
+ """Return a copy of this retry with the given timeout.
+
+ DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class
+ documentation for details.
+
+ Args:
+ deadline (float|None): How long to keep retrying, in seconds. If None,
+ no timeout is enforced.
+
+ Returns:
+ Retry: A new retry instance with the given timeout.
+ """
+ return self.with_timeout(deadline)
+
+ def with_timeout(self, timeout: float | None) -> Self:
+ """Return a copy of this retry with the given timeout.
+
+ Args:
+ timeout (float): How long to keep retrying, in seconds. If None,
+ no timeout will be enforced.
+
+ Returns:
+ Retry: A new retry instance with the given timeout.
+ """
+ return type(self)(
+ predicate=self._predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ timeout=timeout,
+ on_error=self._on_error,
+ )
+
+ def with_predicate(self, predicate: Callable[[Exception], bool]) -> Self:
+ """Return a copy of this retry with the given predicate.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return
+ ``True`` if the given exception is retryable.
+
+ Returns:
+ Retry: A new retry instance with the given predicate.
+ """
+ return type(self)(
+ predicate=predicate,
+ initial=self._initial,
+ maximum=self._maximum,
+ multiplier=self._multiplier,
+ timeout=self._timeout,
+ on_error=self._on_error,
+ )
+
+ def with_delay(
+ self,
+ initial: Optional[float] = None,
+ maximum: Optional[float] = None,
+ multiplier: Optional[float] = None,
+ ) -> Self:
+ """Return a copy of this retry with the given delay options.
+
+ Args:
+ initial (float): The minimum amount of time to delay (in seconds). This must
+ be greater than 0. If None, the current value is used.
+ maximum (float): The maximum amount of time to delay (in seconds). If None, the
+ current value is used.
+ multiplier (float): The multiplier applied to the delay. If None, the current
+ value is used.
+
+ Returns:
+ Retry: A new retry instance with the given delay options.
+ """
+ return type(self)(
+ predicate=self._predicate,
+ initial=initial if initial is not None else self._initial,
+ maximum=maximum if maximum is not None else self._maximum,
+ multiplier=multiplier if multiplier is not None else self._multiplier,
+ timeout=self._timeout,
+ on_error=self._on_error,
+ )
+
+ def __str__(self) -> str:
+ return (
+ "<{} predicate={}, initial={:.1f}, maximum={:.1f}, "
+ "multiplier={:.1f}, timeout={}, on_error={}>".format(
+ type(self).__name__,
+ self._predicate,
+ self._initial,
+ self._maximum,
+ self._multiplier,
+ self._timeout, # timeout can be None, thus no {:.1f}
+ self._on_error,
+ )
+ )
diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py
new file mode 100644
index 0000000..e113323
--- /dev/null
+++ b/google/api_core/retry/retry_streaming.py
@@ -0,0 +1,263 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generator wrapper for retryable streaming RPCs.
+"""
+from __future__ import annotations
+
+from typing import (
+ Callable,
+ Optional,
+ List,
+ Tuple,
+ Iterable,
+ Generator,
+ TypeVar,
+ Any,
+ TYPE_CHECKING,
+)
+
+import sys
+import time
+import functools
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry import exponential_sleep_generator
+from google.api_core.retry import build_retry_error
+from google.api_core.retry import RetryFailureReason
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _Y = TypeVar("_Y") # yielded values
+
+
+def retry_target_stream(
+ target: Callable[_P, Iterable[_Y]],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: Optional[float] = None,
+ on_error: Optional[Callable[[Exception], None]] = None,
+ exception_factory: Callable[
+ [List[Exception], RetryFailureReason, Optional[float]],
+ Tuple[Exception, Optional[Exception]],
+ ] = build_retry_error,
+ init_args: _P.args = (),
+ init_kwargs: _P.kwargs = {},
+ **kwargs,
+) -> Generator[_Y, Any, None]:
+ """Create a generator wrapper that retries the wrapped stream if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target: The generator function to call and retry.
+ predicate: A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator: An infinite iterator that determines
+ how long to sleep between retries.
+ timeout: How long to keep retrying the target.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error: If given, the on_error callback will be called with each
+ retryable exception raised by the target. Any error raised by this
+ function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ init_args: Positional arguments to pass to the target function.
+ init_kwargs: Keyword arguments to pass to the target function.
+
+ Returns:
+ Generator: A retryable generator that wraps the target generator function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+
+ timeout = kwargs.get("deadline", timeout)
+ deadline: Optional[float] = (
+ time.monotonic() + timeout if timeout is not None else None
+ )
+ error_list: list[Exception] = []
+
+ for sleep in sleep_generator:
+ # Start a new retry loop
+ try:
+ # Note: in the future, we can add a ResumptionStrategy object
+ # to generate new args between calls. For now, use the same args
+ # for each attempt.
+ subgenerator = target(*init_args, **init_kwargs)
+ return (yield from subgenerator)
+ # handle exceptions raised by the subgenerator
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ _retry_error_helper(
+ exc,
+ deadline,
+ sleep,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ time.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class StreamingRetry(_BaseRetry):
+ """Exponential retry decorator for streaming synchronous RPCs.
+
+ This class returns a Generator when called, which wraps the target
+ stream in retry logic. If any exception is raised by the target, the
+ entire stream will be retried within the wrapper.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Important Note: when a stream encounters a retryable error, it will
+ silently construct a fresh iterator instance in the background
+ and continue yielding (likely duplicate) values as if no error occurred.
+ This is the most general way to retry a stream, but it often is not the
+ desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...]
+
+ There are two ways to build more advanced retry logic for streams:
+
+ 1. Wrap the target
+ Use a ``target`` that maintains state between retries, and creates a
+ different generator on each retry call. For example, you can wrap a
+ network call in a function that modifies the request based on what has
+ already been returned:
+
+ .. code-block:: python
+
+ def attempt_with_modified_request(target, request, seen_items=[]):
+ # remove seen items from request on each attempt
+ new_request = modify_request(request, seen_items)
+ new_generator = target(new_request)
+ for item in new_generator:
+ yield item
+ seen_items.append(item)
+
+ retry_wrapped_fn = StreamingRetry()(attempt_with_modified_request)
+ retryable_generator = retry_wrapped_fn(target, request)
+
+ 2. Wrap the retry generator
+ Alternatively, you can wrap the retryable generator itself before
+ passing it to the end-user to add a filter on the stream. For
+ example, you can keep track of the items that were successfully yielded
+ in previous retry attempts, and only yield new items when the
+ new attempt surpasses the previous ones:
+
+ .. code-block:: python
+
+ def retryable_with_filter(target):
+ stream_idx = 0
+ # reset stream_idx when the stream is retried
+ def on_error(e):
+ nonlocal stream_idx
+ stream_idx = 0
+ # build retryable
+ retryable_gen = StreamingRetry(...)(target)
+ # keep track of what has been yielded out of filter
+ seen_items = []
+ for item in retryable_gen():
+ if stream_idx >= len(seen_items):
+ seen_items.append(item)
+ yield item
+ elif item != seen_items[stream_idx]:
+ raise ValueError("Stream differs from last attempt")
+ stream_idx += 1
+
+ filter_retry_wrapped = retryable_with_filter(target)
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (float): How long to keep retrying, in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ deadline (float): DEPRECATED: use `timeout` instead. For backward
+ compatibility, if specified it will override the ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[_P, Iterable[_Y]],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, Generator[_Y, Any, None]]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ def retry_wrapped_func(
+ *args: _P.args, **kwargs: _P.kwargs
+ ) -> Generator[_Y, Any, None]:
+ """A wrapper that calls target function with retry."""
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target_stream(
+ func,
+ predicate=self._predicate,
+ sleep_generator=sleep_generator,
+ timeout=self._timeout,
+ on_error=on_error,
+ init_args=args,
+ init_kwargs=kwargs,
+ )
+
+ return retry_wrapped_func
diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py
new file mode 100644
index 0000000..2924ba1
--- /dev/null
+++ b/google/api_core/retry/retry_streaming_async.py
@@ -0,0 +1,325 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generator wrapper for retryable async streaming RPCs.
+"""
+from __future__ import annotations
+
+from typing import (
+ cast,
+ Any,
+ Callable,
+ Iterable,
+ AsyncIterator,
+ AsyncIterable,
+ Awaitable,
+ TypeVar,
+ AsyncGenerator,
+ TYPE_CHECKING,
+)
+
+import asyncio
+import time
+import sys
+import functools
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry import exponential_sleep_generator
+from google.api_core.retry import build_retry_error
+from google.api_core.retry import RetryFailureReason
+
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _Y = TypeVar("_Y") # yielded values
+
+
+async def retry_target_stream(
+ target: Callable[_P, AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: float | None = None,
+ on_error: Callable[[Exception], None] | None = None,
+ exception_factory: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ] = build_retry_error,
+ init_args: _P.args = (),
+ init_kwargs: _P.kwargs = {},
+ **kwargs,
+) -> AsyncGenerator[_Y, None]:
+ """Create a generator wrapper that retries the wrapped stream if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`AsyncRetry`.
+
+ Args:
+ target: The generator function to call and retry.
+ predicate: A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator: An infinite iterator that determines
+ how long to sleep between retries.
+ timeout: How long to keep retrying the target.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error: If given, the on_error callback will be called with each
+ retryable exception raised by the target. Any error raised by this
+ function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ init_args: Positional arguments to pass to the target function.
+ init_kwargs: Keyword arguments to pass to the target function.
+
+ Returns:
+ AsyncGenerator: A retryable generator that wraps the target generator function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+ target_iterator: AsyncIterator[_Y] | None = None
+ timeout = kwargs.get("deadline", timeout)
+ deadline = time.monotonic() + timeout if timeout else None
+ # keep track of retryable exceptions we encounter to pass in to exception_factory
+ error_list: list[Exception] = []
+ target_is_generator: bool | None = None
+
+ for sleep in sleep_generator:
+ # Start a new retry loop
+ try:
+ # Note: in the future, we can add a ResumptionStrategy object
+ # to generate new args between calls. For now, use the same args
+ # for each attempt.
+ target_output: AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]] = target(
+ *init_args, **init_kwargs
+ )
+ try:
+ # gapic functions return the generator behind an awaitable
+ # unwrap the awaitable so we can work with the generator directly
+ target_output = await target_output # type: ignore
+ except TypeError:
+ # was not awaitable, continue
+ pass
+ target_iterator = cast(AsyncIterable["_Y"], target_output).__aiter__()
+
+ if target_is_generator is None:
+ # Check if target supports generator features (asend, athrow, aclose)
+ target_is_generator = bool(getattr(target_iterator, "asend", None))
+
+ sent_in = None
+ while True:
+ ## Read from target_iterator
+ # If the target is a generator, we will advance it with `asend`
+ # otherwise, we will use `anext`
+ if target_is_generator:
+ next_value = await target_iterator.asend(sent_in) # type: ignore
+ else:
+ next_value = await target_iterator.__anext__()
+ ## Yield from Wrapper to caller
+ try:
+ # yield latest value from target
+ # exceptions from `athrow` and `aclose` are injected here
+ sent_in = yield next_value
+ except GeneratorExit:
+ # if wrapper received `aclose` while waiting on yield,
+ # it will raise GeneratorExit here
+ if target_is_generator:
+ # pass to inner target_iterator for handling
+ await cast(AsyncGenerator["_Y", None], target_iterator).aclose()
+ else:
+ raise
+ return
+ except: # noqa: E722
+ # bare except catches any exception passed to `athrow`
+ if target_is_generator:
+ # delegate error handling to target_iterator
+ await cast(AsyncGenerator["_Y", None], target_iterator).athrow(
+ cast(BaseException, sys.exc_info()[1])
+ )
+ else:
+ raise
+ return
+ except StopAsyncIteration:
+ # if iterator exhausted, return
+ return
+ # handle exceptions raised by the target_iterator
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ _retry_error_helper(
+ exc,
+ deadline,
+ sleep,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ await asyncio.sleep(sleep)
+ finally:
+ if target_is_generator and target_iterator is not None:
+ await cast(AsyncGenerator["_Y", None], target_iterator).aclose()
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class AsyncStreamingRetry(_BaseRetry):
+ """Exponential retry decorator for async streaming rpcs.
+
+ This class returns an AsyncGenerator when called, which wraps the target
+ stream in retry logic. If any exception is raised by the target, the
+ entire stream will be retried within the wrapper.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Important Note: when a stream is encounters a retryable error, it will
+ silently construct a fresh iterator instance in the background
+ and continue yielding (likely duplicate) values as if no error occurred.
+ This is the most general way to retry a stream, but it often is not the
+ desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...]
+
+ There are two ways to build more advanced retry logic for streams:
+
+ 1. Wrap the target
+ Use a ``target`` that maintains state between retries, and creates a
+ different generator on each retry call. For example, you can wrap a
+ grpc call in a function that modifies the request based on what has
+ already been returned:
+
+ .. code-block:: python
+
+ async def attempt_with_modified_request(target, request, seen_items=[]):
+ # remove seen items from request on each attempt
+ new_request = modify_request(request, seen_items)
+ new_generator = await target(new_request)
+ async for item in new_generator:
+ yield item
+ seen_items.append(item)
+
+ retry_wrapped = AsyncRetry(is_stream=True,...)(attempt_with_modified_request, target, request, [])
+
+ 2. Wrap the retry generator
+ Alternatively, you can wrap the retryable generator itself before
+ passing it to the end-user to add a filter on the stream. For
+ example, you can keep track of the items that were successfully yielded
+ in previous retry attempts, and only yield new items when the
+ new attempt surpasses the previous ones:
+
+ .. code-block:: python
+
+ async def retryable_with_filter(target):
+ stream_idx = 0
+ # reset stream_idx when the stream is retried
+ def on_error(e):
+ nonlocal stream_idx
+ stream_idx = 0
+ # build retryable
+ retryable_gen = AsyncRetry(is_stream=True, ...)(target)
+ # keep track of what has been yielded out of filter
+ seen_items = []
+ async for item in retryable_gen:
+ if stream_idx >= len(seen_items):
+ yield item
+ seen_items.append(item)
+ elif item != previous_stream[stream_idx]:
+ raise ValueError("Stream differs from last attempt")"
+ stream_idx += 1
+
+ filter_retry_wrapped = retryable_with_filter(target)
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (Optional[float]): How long to keep retrying in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ is_stream (bool): Indicates whether the input function
+ should be treated as a stream function (i.e. an AsyncGenerator,
+ or function or coroutine that returns an AsyncIterable).
+ If True, the iterable will be wrapped with retry logic, and any
+ failed outputs will restart the stream. If False, only the input
+ function call itself will be retried. Defaults to False.
+ To avoid duplicate values, retryable streams should typically be
+ wrapped in additional filter logic before use.
+ deadline (float): DEPRECATED use ``timeout`` instead. If set it will
+ override ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[..., AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, Awaitable[AsyncGenerator[_Y, None]]]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable or stream to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ async def retry_wrapped_func(
+ *args: _P.args, **kwargs: _P.kwargs
+ ) -> AsyncGenerator[_Y, None]:
+ """A wrapper that calls target function with retry."""
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target_stream(
+ func,
+ self._predicate,
+ sleep_generator,
+ self._timeout,
+ on_error,
+ init_args=args,
+ init_kwargs=kwargs,
+ )
+
+ return retry_wrapped_func
diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py
new file mode 100644
index 0000000..d5dff66
--- /dev/null
+++ b/google/api_core/retry/retry_unary.py
@@ -0,0 +1,301 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying functions with exponential back-off.
+
+The :class:`Retry` decorator can be used to retry functions that raise
+exceptions using exponential backoff. Because a exponential sleep algorithm is
+used, the retry is limited by a `timeout`. The timeout determines the window
+in which retries will be attempted. This is used instead of total number of retries
+because it is difficult to ascertain the amount of time a function can block
+when using total number of retries and exponential backoff.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry.Retry()
+ def call_flaky_rpc():
+ return client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry.Retry(predicate=if_exception_type(exceptions.NotFound))
+ def check_if_exists():
+ return client.does_thing_exist()
+
+ is_available = check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry.Retry(timeout=60)
+ result = client.some_method(retry=my_retry)
+
+"""
+
+from __future__ import annotations
+
+import functools
+import sys
+import time
+import inspect
+import warnings
+from typing import Any, Callable, Iterable, TypeVar, TYPE_CHECKING
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry.retry_base import exponential_sleep_generator
+from google.api_core.retry.retry_base import build_retry_error
+from google.api_core.retry.retry_base import RetryFailureReason
+
+
+if TYPE_CHECKING:
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _R = TypeVar("_R") # target function returned value
+
+_ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead."
+
+
+def retry_target(
+ target: Callable[[], _R],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: float | None = None,
+ on_error: Callable[[Exception], None] | None = None,
+ exception_factory: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ] = build_retry_error,
+ **kwargs,
+):
+ """Call a function and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ timeout (Optional[float]): How long to keep retrying the target.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): If given, the on_error
+ callback will be called with each retryable exception raised by the
+ target. Any error raised by this function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ deadline (float): DEPRECATED: use ``timeout`` instead. For backward
+ compatibility, if specified it will override ``timeout`` parameter.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+
+ timeout = kwargs.get("deadline", timeout)
+
+ deadline = time.monotonic() + timeout if timeout is not None else None
+ error_list: list[Exception] = []
+
+ for sleep in sleep_generator:
+ try:
+ result = target()
+ if inspect.isawaitable(result):
+ warnings.warn(_ASYNC_RETRY_WARNING)
+ return result
+
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ _retry_error_helper(
+ exc,
+ deadline,
+ sleep,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ time.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class Retry(_BaseRetry):
+ """Exponential retry decorator for unary synchronous RPCs.
+
+ This class is a decorator used to add retry or polling behavior to an RPC
+ call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ There are two important concepts that retry/polling behavior may operate on,
+ Deadline and Timeout, which need to be properly defined for the correct
+ usage of this class and the rest of the library.
+
+ Deadline: a fixed point in time by which a certain operation must
+ terminate. For example, if a certain operation has a deadline
+ "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an
+ error) by that time, regardless of when it was started or whether it
+ was started at all.
+
+ Timeout: the maximum duration of time after which a certain operation
+ must terminate (successfully or with an error). The countdown begins right
+ after an operation was started. For example, if an operation was started at
+ 09:24:00 with timeout of 75 seconds, it must terminate no later than
+ 09:25:15.
+
+ Unfortunately, in the past this class (and the api-core library as a whole) has not
+ been properly distinguishing the concepts of "timeout" and "deadline", and the
+ ``deadline`` parameter has meant ``timeout``. That is why
+ ``deadline`` has been deprecated and ``timeout`` should be used instead. If the
+ ``deadline`` parameter is set, it will override the ``timeout`` parameter.
+ In other words, ``retry.deadline`` should be treated as just a deprecated alias for
+ ``retry.timeout``.
+
+ Said another way, it is safe to assume that this class and the rest of this
+ library operate in terms of timeouts (not deadlines) unless explicitly
+ noted the usage of deadline semantics.
+
+ It is also important to
+ understand the three most common applications of the Timeout concept in the
+ context of this library.
+
+ Usually the generic Timeout term may stand for one of the following actual
+ timeouts: RPC Timeout, Retry Timeout, or Polling Timeout.
+
+ RPC Timeout: a value supplied by the client to the server so
+ that the server side knows the maximum amount of time it is expected to
+ spend handling that specific RPC. For example, in the case of gRPC transport,
+ RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2
+ request. The `timeout` property of this class normally never represents the
+ RPC Timeout as it is handled separately by the ``google.api_core.timeout``
+ module of this library.
+
+ Retry Timeout: this is the most common meaning of the ``timeout`` property
+ of this class, and defines how long a certain RPC may be retried in case
+ the server returns an error.
+
+ Polling Timeout: defines how long the
+ client side is allowed to call the polling RPC repeatedly to check a status of a
+ long-running operation. Each polling RPC is
+ expected to succeed (its errors are supposed to be handled by the retry
+ logic). The decision as to whether a new polling attempt needs to be made is based
+ not on the RPC status code but on the status of the returned
+ status of an operation. In other words: we will poll a long-running operation until
+ the operation is done or the polling timeout expires. Each poll will inform us of
+ the status of the operation. The poll consists of an RPC to the server that may
+ itself be retried as per the poll-specific retry settings in case of errors. The
+ operation-level retry settings do NOT apply to polling-RPC retries.
+
+ With the actual timeout types being defined above, the client libraries
+ often refer to just Timeout without clarifying which type specifically
+ that is. In that case the actual timeout type (sometimes also referred to as
+ Logical Timeout) can be determined from the context. If it is a unary rpc
+ call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if
+ provided directly as a standalone value) or Retry Timeout (if provided as
+ ``retry.timeout`` property of the unary RPC's retry config). For
+ ``Operation`` or ``PollingFuture`` in general Timeout stands for
+ Polling Timeout.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (Optional[float]): How long to keep retrying, in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Callable[Exception]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ deadline (float): DEPRECATED: use `timeout` instead. For backward
+ compatibility, if specified it will override the ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[_P, _R],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, _R]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R:
+ """A wrapper that calls target function with retry."""
+ target = functools.partial(func, *args, **kwargs)
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return retry_target(
+ target,
+ self._predicate,
+ sleep_generator,
+ timeout=self._timeout,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py
new file mode 100644
index 0000000..e76a37b
--- /dev/null
+++ b/google/api_core/retry/retry_unary_async.py
@@ -0,0 +1,238 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for retrying coroutine functions with exponential back-off.
+
+The :class:`AsyncRetry` decorator shares most functionality and behavior with
+:class:`Retry`, but supports coroutine functions. Please refer to description
+of :class:`Retry` for more details.
+
+By default, this decorator will retry transient
+API errors (see :func:`if_transient_error`). For example:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry()
+ async def call_flaky_rpc():
+ return await client.flaky_rpc()
+
+ # Will retry flaky_rpc() if it raises transient API errors.
+ result = await call_flaky_rpc()
+
+You can pass a custom predicate to retry on different exceptions, such as
+waiting for an eventually consistent item to be available:
+
+.. code-block:: python
+
+ @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound))
+ async def check_if_exists():
+ return await client.does_thing_exist()
+
+ is_available = await check_if_exists()
+
+Some client library methods apply retry automatically. These methods can accept
+a ``retry`` parameter that allows you to configure the behavior:
+
+.. code-block:: python
+
+ my_retry = retry_async.AsyncRetry(timeout=60)
+ result = await client.some_method(retry=my_retry)
+
+"""
+
+from __future__ import annotations
+
+import asyncio
+import time
+import functools
+from typing import (
+ Awaitable,
+ Any,
+ Callable,
+ Iterable,
+ TypeVar,
+ TYPE_CHECKING,
+)
+
+from google.api_core.retry.retry_base import _BaseRetry
+from google.api_core.retry.retry_base import _retry_error_helper
+from google.api_core.retry.retry_base import exponential_sleep_generator
+from google.api_core.retry.retry_base import build_retry_error
+from google.api_core.retry.retry_base import RetryFailureReason
+
+# for backwards compatibility, expose helpers in this module
+from google.api_core.retry.retry_base import if_exception_type # noqa
+from google.api_core.retry.retry_base import if_transient_error # noqa
+
+if TYPE_CHECKING:
+ import sys
+
+ if sys.version_info >= (3, 10):
+ from typing import ParamSpec
+ else:
+ from typing_extensions import ParamSpec
+
+ _P = ParamSpec("_P") # target function call parameters
+ _R = TypeVar("_R") # target function returned value
+
+_DEFAULT_INITIAL_DELAY = 1.0 # seconds
+_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
+_DEFAULT_DELAY_MULTIPLIER = 2.0
+_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
+_DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds
+
+
+async def retry_target(
+ target: Callable[[], Awaitable[_R]],
+ predicate: Callable[[Exception], bool],
+ sleep_generator: Iterable[float],
+ timeout: float | None = None,
+ on_error: Callable[[Exception], None] | None = None,
+ exception_factory: Callable[
+ [list[Exception], RetryFailureReason, float | None],
+ tuple[Exception, Exception | None],
+ ] = build_retry_error,
+ **kwargs,
+):
+ """Await a coroutine and retry if it fails.
+
+ This is the lowest-level retry helper. Generally, you'll use the
+ higher-level retry helper :class:`Retry`.
+
+ Args:
+ target(Callable[[], Any]): The function to call and retry. This must be a
+ nullary function - apply arguments with `functools.partial`.
+ predicate (Callable[Exception]): A callable used to determine if an
+ exception raised by the target should be considered retryable.
+ It should return True to retry or False otherwise.
+ sleep_generator (Iterable[float]): An infinite iterator that determines
+ how long to sleep between retries.
+ timeout (Optional[float]): How long to keep retrying the target, in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): If given, the on_error
+ callback will be called with each retryable exception raised by the
+ target. Any error raised by this function will *not* be caught.
+ exception_factory: A function that is called when the retryable reaches
+ a terminal failure state, used to construct an exception to be raised.
+ It takes a list of all exceptions encountered, a retry.RetryFailureReason
+ enum indicating the failure cause, and the original timeout value
+ as arguments. It should return a tuple of the exception to be raised,
+ along with the cause exception if any. The default implementation will raise
+ a RetryError on timeout, or the last exception encountered otherwise.
+ deadline (float): DEPRECATED use ``timeout`` instead. For backward
+ compatibility, if set it will override the ``timeout`` parameter.
+
+ Returns:
+ Any: the return value of the target function.
+
+ Raises:
+ ValueError: If the sleep generator stops yielding values.
+ Exception: a custom exception specified by the exception_factory if provided.
+ If no exception_factory is provided:
+ google.api_core.RetryError: If the timeout is exceeded while retrying.
+ Exception: If the target raises an error that isn't retryable.
+ """
+
+ timeout = kwargs.get("deadline", timeout)
+
+ deadline = time.monotonic() + timeout if timeout is not None else None
+ error_list: list[Exception] = []
+
+ for sleep in sleep_generator:
+ try:
+ return await target()
+ # pylint: disable=broad-except
+ # This function explicitly must deal with broad exceptions.
+ except Exception as exc:
+ # defer to shared logic for handling errors
+ _retry_error_helper(
+ exc,
+ deadline,
+ sleep,
+ error_list,
+ predicate,
+ on_error,
+ exception_factory,
+ timeout,
+ )
+ # if exception not raised, sleep before next attempt
+ await asyncio.sleep(sleep)
+
+ raise ValueError("Sleep generator stopped yielding sleep values.")
+
+
+class AsyncRetry(_BaseRetry):
+ """Exponential retry decorator for async coroutines.
+
+ This class is a decorator used to add exponential back-off retry behavior
+ to an RPC call.
+
+ Although the default behavior is to retry transient API errors, a
+ different predicate can be provided to retry other exceptions.
+
+ Args:
+ predicate (Callable[Exception]): A callable that should return ``True``
+ if the given exception is retryable.
+ initial (float): The minimum amount of time to delay in seconds. This
+ must be greater than 0.
+ maximum (float): The maximum amount of time to delay in seconds.
+ multiplier (float): The multiplier applied to the delay.
+ timeout (Optional[float]): How long to keep retrying in seconds.
+ Note: timeout is only checked before initiating a retry, so the target may
+ run past the timeout value as long as it is healthy.
+ on_error (Optional[Callable[Exception]]): A function to call while processing
+ a retryable exception. Any error raised by this function will
+ *not* be caught.
+ deadline (float): DEPRECATED use ``timeout`` instead. If set it will
+ override ``timeout`` parameter.
+ """
+
+ def __call__(
+ self,
+ func: Callable[..., Awaitable[_R]],
+ on_error: Callable[[Exception], Any] | None = None,
+ ) -> Callable[_P, Awaitable[_R]]:
+ """Wrap a callable with retry behavior.
+
+ Args:
+ func (Callable): The callable or stream to add retry behavior to.
+ on_error (Optional[Callable[Exception]]): If given, the
+ on_error callback will be called with each retryable exception
+ raised by the wrapped function. Any error raised by this
+ function will *not* be caught. If on_error was specified in the
+ constructor, this value will be ignored.
+
+ Returns:
+ Callable: A callable that will invoke ``func`` with retry
+ behavior.
+ """
+ if self._on_error is not None:
+ on_error = self._on_error
+
+ @functools.wraps(func)
+ async def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R:
+ """A wrapper that calls target function with retry."""
+ sleep_generator = exponential_sleep_generator(
+ self._initial, self._maximum, multiplier=self._multiplier
+ )
+ return await retry_target(
+ functools.partial(func, *args, **kwargs),
+ predicate=self._predicate,
+ sleep_generator=sleep_generator,
+ timeout=self._timeout,
+ on_error=on_error,
+ )
+
+ return retry_wrapped_func
diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py
index 2dfa2f6..90a2d5a 100644
--- a/google/api_core/retry_async.py
+++ b/google/api_core/retry_async.py
@@ -1,4 +1,4 @@
-# Copyright 2020 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -11,281 +11,24 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-"""Helpers for retrying coroutine functions with exponential back-off.
-
-The :class:`AsyncRetry` decorator shares most functionality and behavior with
-:class:`Retry`, but supports coroutine functions. Please refer to description
-of :class:`Retry` for more details.
-
-By default, this decorator will retry transient
-API errors (see :func:`if_transient_error`). For example:
-
-.. code-block:: python
-
- @retry_async.AsyncRetry()
- async def call_flaky_rpc():
- return await client.flaky_rpc()
-
- # Will retry flaky_rpc() if it raises transient API errors.
- result = await call_flaky_rpc()
-
-You can pass a custom predicate to retry on different exceptions, such as
-waiting for an eventually consistent item to be available:
-
-.. code-block:: python
-
- @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound))
- async def check_if_exists():
- return await client.does_thing_exist()
-
- is_available = await check_if_exists()
-
-Some client library methods apply retry automatically. These methods can accept
-a ``retry`` parameter that allows you to configure the behavior:
-
-.. code-block:: python
-
- my_retry = retry_async.AsyncRetry(deadline=60)
- result = await client.some_method(retry=my_retry)
-
-"""
-
-import asyncio
-import datetime
-import functools
-import logging
-
-from google.api_core import datetime_helpers
-from google.api_core import exceptions
-from google.api_core.retry import exponential_sleep_generator
+#
+# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry_async.py
+#
+# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576)
+from google.api_core import datetime_helpers # noqa: F401
+from google.api_core import exceptions # noqa: F401
+from google.api_core.retry import exponential_sleep_generator # noqa: F401
from google.api_core.retry import if_exception_type # noqa: F401
-from google.api_core.retry import if_transient_error
+from google.api_core.retry import if_transient_error # noqa: F401
+from google.api_core.retry.retry_unary_async import AsyncRetry
+from google.api_core.retry.retry_unary_async import retry_target
-
-_LOGGER = logging.getLogger(__name__)
-_DEFAULT_INITIAL_DELAY = 1.0 # seconds
-_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds
-_DEFAULT_DELAY_MULTIPLIER = 2.0
-_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds
-
-
-async def retry_target(target, predicate, sleep_generator, deadline, on_error=None):
- """Call a function and retry if it fails.
-
- This is the lowest-level retry helper. Generally, you'll use the
- higher-level retry helper :class:`Retry`.
-
- Args:
- target(Callable): The function to call and retry. This must be a
- nullary function - apply arguments with `functools.partial`.
- predicate (Callable[Exception]): A callable used to determine if an
- exception raised by the target should be considered retryable.
- It should return True to retry or False otherwise.
- sleep_generator (Iterable[float]): An infinite iterator that determines
- how long to sleep between retries.
- deadline (float): How long to keep retrying the target. The last sleep
- period is shortened as necessary, so that the last retry runs at
- ``deadline`` (and not considerably beyond it).
- on_error (Callable[Exception]): A function to call while processing a
- retryable exception. Any error raised by this function will *not*
- be caught.
-
- Returns:
- Any: the return value of the target function.
-
- Raises:
- google.api_core.RetryError: If the deadline is exceeded while retrying.
- ValueError: If the sleep generator stops yielding values.
- Exception: If the target raises a method that isn't retryable.
- """
- deadline_dt = (
- (datetime_helpers.utcnow() + datetime.timedelta(seconds=deadline))
- if deadline
- else None
- )
-
- last_exc = None
-
- for sleep in sleep_generator:
- try:
- if not deadline_dt:
- return await target()
- else:
- return await asyncio.wait_for(
- target(),
- timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds(),
- )
- # pylint: disable=broad-except
- # This function explicitly must deal with broad exceptions.
- except Exception as exc:
- if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError):
- raise
- last_exc = exc
- if on_error is not None:
- on_error(exc)
-
- now = datetime_helpers.utcnow()
-
- if deadline_dt:
- if deadline_dt <= now:
- # Chains the raising RetryError with the root cause error,
- # which helps observability and debugability.
- raise exceptions.RetryError(
- "Deadline of {:.1f}s exceeded while calling {}".format(
- deadline, target
- ),
- last_exc,
- ) from last_exc
- else:
- time_to_deadline = (deadline_dt - now).total_seconds()
- sleep = min(time_to_deadline, sleep)
-
- _LOGGER.debug(
- "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep)
- )
- await asyncio.sleep(sleep)
-
- raise ValueError("Sleep generator stopped yielding sleep values.")
-
-
-class AsyncRetry:
- """Exponential retry decorator for async functions.
-
- This class is a decorator used to add exponential back-off retry behavior
- to an RPC call.
-
- Although the default behavior is to retry transient API errors, a
- different predicate can be provided to retry other exceptions.
-
- Args:
- predicate (Callable[Exception]): A callable that should return ``True``
- if the given exception is retryable.
- initial (float): The minimum a,out of time to delay in seconds. This
- must be greater than 0.
- maximum (float): The maximum amout of time to delay in seconds.
- multiplier (float): The multiplier applied to the delay.
- deadline (float): How long to keep retrying in seconds. The last sleep
- period is shortened as necessary, so that the last retry runs at
- ``deadline`` (and not considerably beyond it).
- on_error (Callable[Exception]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
- """
-
- def __init__(
- self,
- predicate=if_transient_error,
- initial=_DEFAULT_INITIAL_DELAY,
- maximum=_DEFAULT_MAXIMUM_DELAY,
- multiplier=_DEFAULT_DELAY_MULTIPLIER,
- deadline=_DEFAULT_DEADLINE,
- on_error=None,
- ):
- self._predicate = predicate
- self._initial = initial
- self._multiplier = multiplier
- self._maximum = maximum
- self._deadline = deadline
- self._on_error = on_error
-
- def __call__(self, func, on_error=None):
- """Wrap a callable with retry behavior.
-
- Args:
- func (Callable): The callable to add retry behavior to.
- on_error (Callable[Exception]): A function to call while processing
- a retryable exception. Any error raised by this function will
- *not* be caught.
-
- Returns:
- Callable: A callable that will invoke ``func`` with retry
- behavior.
- """
- if self._on_error is not None:
- on_error = self._on_error
-
- @functools.wraps(func)
- async def retry_wrapped_func(*args, **kwargs):
- """A wrapper that calls target function with retry."""
- target = functools.partial(func, *args, **kwargs)
- sleep_generator = exponential_sleep_generator(
- self._initial, self._maximum, multiplier=self._multiplier
- )
- return await retry_target(
- target,
- self._predicate,
- sleep_generator,
- self._deadline,
- on_error=on_error,
- )
-
- return retry_wrapped_func
-
- def _replace(
- self,
- predicate=None,
- initial=None,
- maximum=None,
- multiplier=None,
- deadline=None,
- on_error=None,
- ):
- return AsyncRetry(
- predicate=predicate or self._predicate,
- initial=initial or self._initial,
- maximum=maximum or self._maximum,
- multiplier=multiplier or self._multiplier,
- deadline=deadline or self._deadline,
- on_error=on_error or self._on_error,
- )
-
- def with_deadline(self, deadline):
- """Return a copy of this retry with the given deadline.
-
- Args:
- deadline (float): How long to keep retrying.
-
- Returns:
- AsyncRetry: A new retry instance with the given deadline.
- """
- return self._replace(deadline=deadline)
-
- def with_predicate(self, predicate):
- """Return a copy of this retry with the given predicate.
-
- Args:
- predicate (Callable[Exception]): A callable that should return
- ``True`` if the given exception is retryable.
-
- Returns:
- AsyncRetry: A new retry instance with the given predicate.
- """
- return self._replace(predicate=predicate)
-
- def with_delay(self, initial=None, maximum=None, multiplier=None):
- """Return a copy of this retry with the given delay options.
-
- Args:
- initial (float): The minimum amout of time to delay. This must
- be greater than 0.
- maximum (float): The maximum amout of time to delay.
- multiplier (float): The multiplier applied to the delay.
-
- Returns:
- AsyncRetry: A new retry instance with the given predicate.
- """
- return self._replace(initial=initial, maximum=maximum, multiplier=multiplier)
-
- def __str__(self):
- return (
- "<AsyncRetry predicate={}, initial={:.1f}, maximum={:.1f}, "
- "multiplier={:.1f}, deadline={:.1f}, on_error={}>".format(
- self._predicate,
- self._initial,
- self._maximum,
- self._multiplier,
- self._deadline,
- self._on_error,
- )
- )
+__all__ = (
+ "AsyncRetry",
+ "datetime_helpers",
+ "exceptions",
+ "exponential_sleep_generator",
+ "if_exception_type",
+ "if_transient_error",
+ "retry_target",
+)
diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py
index 7323218..55b195e 100644
--- a/google/api_core/timeout.py
+++ b/google/api_core/timeout.py
@@ -14,8 +14,9 @@
"""Decorators for applying timeout arguments to functions.
-These decorators are used to wrap API methods to apply either a constant
-or exponential timeout argument.
+These decorators are used to wrap API methods to apply either a
+Deadline-dependent (recommended), constant (DEPRECATED) or exponential
+(DEPRECATED) timeout argument.
For example, imagine an API method that can take a while to return results,
such as one that might block until a resource is ready:
@@ -66,9 +67,79 @@
_DEFAULT_DEADLINE = None
+class TimeToDeadlineTimeout(object):
+ """A decorator that decreases timeout set for an RPC based on how much time
+ has left till its deadline. The deadline is calculated as
+ ``now + initial_timeout`` when this decorator is first called for an rpc.
+
+ In other words this decorator implements deadline semantics in terms of a
+ sequence of decreasing timeouts t0 > t1 > t2 ... tn >= 0.
+
+ Args:
+ timeout (Optional[float]): the timeout (in seconds) to applied to the
+ wrapped function. If `None`, the target function is expected to
+ never timeout.
+ """
+
+ def __init__(self, timeout=None, clock=datetime_helpers.utcnow):
+ self._timeout = timeout
+ self._clock = clock
+
+ def __call__(self, func):
+ """Apply the timeout decorator.
+
+ Args:
+ func (Callable): The function to apply the timeout argument to.
+ This function must accept a timeout keyword argument.
+
+ Returns:
+ Callable: The wrapped function.
+ """
+
+ first_attempt_timestamp = self._clock().timestamp()
+
+ @functools.wraps(func)
+ def func_with_timeout(*args, **kwargs):
+ """Wrapped function that adds timeout."""
+
+ if self._timeout is not None:
+ # All calculations are in seconds
+ now_timestamp = self._clock().timestamp()
+
+ # To avoid usage of nonlocal but still have round timeout
+ # numbers for first attempt (in most cases the only attempt made
+ # for an RPC.
+ if now_timestamp - first_attempt_timestamp < 0.001:
+ now_timestamp = first_attempt_timestamp
+
+ time_since_first_attempt = now_timestamp - first_attempt_timestamp
+ remaining_timeout = self._timeout - time_since_first_attempt
+
+ # Although the `deadline` parameter in `google.api_core.retry.Retry`
+ # is deprecated, and should be treated the same as the `timeout`,
+ # it is still possible for the `deadline` argument in
+ # `google.api_core.retry.Retry` to be larger than the `timeout`.
+ # See https://github.com/googleapis/python-api-core/issues/654
+ # Only positive non-zero timeouts are supported.
+ # Revert back to the initial timeout for negative or 0 timeout values.
+ if remaining_timeout < 1:
+ remaining_timeout = self._timeout
+
+ kwargs["timeout"] = remaining_timeout
+
+ return func(*args, **kwargs)
+
+ return func_with_timeout
+
+ def __str__(self):
+ return "<TimeToDeadlineTimeout timeout={:.1f}>".format(self._timeout)
+
+
class ConstantTimeout(object):
"""A decorator that adds a constant timeout argument.
+ DEPRECATED: use ``TimeToDeadlineTimeout`` instead.
+
This is effectively equivalent to
``functools.partial(func, timeout=timeout)``.
@@ -140,6 +211,9 @@
class ExponentialTimeout(object):
"""A decorator that adds an exponentially increasing timeout argument.
+ DEPRECATED: the concept of incrementing timeout exponentially has been
+ deprecated. Use ``TimeToDeadlineTimeout`` instead.
+
This is useful if a function is called multiple times. Each time the
function is called this decorator will calculate a new timeout parameter
based on the the number of times the function has been called.
@@ -156,9 +230,9 @@
deadline (Optional[float]): The overall deadline across all
invocations. This is used to prevent a very large calculated
timeout from pushing the overall execution time over the deadline.
- This is especially useful in conjuction with
+ This is especially useful in conjunction with
:mod:`google.api_core.retry`. If ``None``, the timeouts will not
- be adjusted to accomodate an overall deadline.
+ be adjusted to accommodate an overall deadline.
"""
def __init__(
@@ -174,7 +248,7 @@
self._deadline = deadline
def with_deadline(self, deadline):
- """Return a copy of this teimout with the given deadline.
+ """Return a copy of this timeout with the given deadline.
Args:
deadline (float): The overall deadline across all invocations.
diff --git a/google/api_core/universe.py b/google/api_core/universe.py
new file mode 100644
index 0000000..3566964
--- /dev/null
+++ b/google/api_core/universe.py
@@ -0,0 +1,82 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for universe domain."""
+
+from typing import Any, Optional
+
+DEFAULT_UNIVERSE = "googleapis.com"
+
+
+class EmptyUniverseError(ValueError):
+ def __init__(self):
+ message = "Universe Domain cannot be an empty string."
+ super().__init__(message)
+
+
+class UniverseMismatchError(ValueError):
+ def __init__(self, client_universe, credentials_universe):
+ message = (
+ f"The configured universe domain ({client_universe}) does not match the universe domain "
+ f"found in the credentials ({credentials_universe}). "
+ "If you haven't configured the universe domain explicitly, "
+ f"`{DEFAULT_UNIVERSE}` is the default."
+ )
+ super().__init__(message)
+
+
+def determine_domain(
+ client_universe_domain: Optional[str], universe_domain_env: Optional[str]
+) -> str:
+ """Return the universe domain used by the client.
+
+ Args:
+ client_universe_domain (Optional[str]): The universe domain configured via the client options.
+ universe_domain_env (Optional[str]): The universe domain configured via the
+ "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+ Returns:
+ str: The universe domain to be used by the client.
+
+ Raises:
+ ValueError: If the universe domain is an empty string.
+ """
+ universe_domain = DEFAULT_UNIVERSE
+ if client_universe_domain is not None:
+ universe_domain = client_universe_domain
+ elif universe_domain_env is not None:
+ universe_domain = universe_domain_env
+ if len(universe_domain.strip()) == 0:
+ raise EmptyUniverseError
+ return universe_domain
+
+
+def compare_domains(client_universe: str, credentials: Any) -> bool:
+ """Returns True iff the universe domains used by the client and credentials match.
+
+ Args:
+ client_universe (str): The universe domain configured via the client options.
+ credentials Any: The credentials being used in the client.
+
+ Returns:
+ bool: True iff client_universe matches the universe in credentials.
+
+ Raises:
+ ValueError: when client_universe does not match the universe in credentials.
+ """
+ credentials_universe = getattr(credentials, "universe_domain", DEFAULT_UNIVERSE)
+
+ if client_universe != credentials_universe:
+ raise UniverseMismatchError(client_universe, credentials_universe)
+ return True
diff --git a/google/api_core/version.py b/google/api_core/version.py
index 999199f..96f8443 100644
--- a/google/api_core/version.py
+++ b/google/api_core/version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "2.3.0"
+__version__ = "2.24.1"
diff --git a/google/api_core/version_header.py b/google/api_core/version_header.py
new file mode 100644
index 0000000..cf1972a
--- /dev/null
+++ b/google/api_core/version_header.py
@@ -0,0 +1,29 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+API_VERSION_METADATA_KEY = "x-goog-api-version"
+
+
+def to_api_version_header(version_identifier):
+ """Returns data for the API Version header for the given `version_identifier`.
+
+ Args:
+ version_identifier (str): The version identifier to be used in the
+ tuple returned.
+
+ Returns:
+ Tuple(str, str): A tuple containing the API Version metadata key and
+ value.
+ """
+ return (API_VERSION_METADATA_KEY, version_identifier)
diff --git a/mypy.ini b/mypy.ini
deleted file mode 100644
index 5c11157..0000000
--- a/mypy.ini
+++ /dev/null
@@ -1,4 +0,0 @@
-[mypy]
-python_version = 3.6
-namespace_packages = True
-ignore_missing_imports = True
diff --git a/noxfile.py b/noxfile.py
index db37c56..cffef97 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -15,18 +15,22 @@
from __future__ import absolute_import
import os
import pathlib
+import re
import shutil
+import unittest
# https://github.com/google/importlab/issues/25
import nox # pytype: disable=import-error
-BLACK_VERSION = "black==19.10b0"
+BLACK_VERSION = "black==22.3.0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
# Black and flake8 clash on the syntax for ignoring flake8's F401 in this file.
BLACK_EXCLUDES = ["--exclude", "^/google/api_core/operations_v1/__init__.py"]
-DEFAULT_PYTHON_VERSION = "3.7"
+PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
+
+DEFAULT_PYTHON_VERSION = "3.10"
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
@@ -34,6 +38,8 @@
"unit",
"unit_grpc_gcp",
"unit_wo_grpc",
+ "unit_w_prerelease_deps",
+ "unit_w_async_rest_extra",
"cover",
"pytype",
"mypy",
@@ -43,15 +49,8 @@
"docs",
]
-
-def _greater_or_equal_than_36(version_string):
- tokens = version_string.split(".")
- for i, token in enumerate(tokens):
- try:
- tokens[i] = int(token)
- except ValueError:
- pass
- return tokens >= [3, 6]
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -61,10 +60,13 @@
Returns a failure if the linters find linting errors or sufficiently
serious code quality issues.
"""
- session.install("flake8", "flake8-import-order", BLACK_VERSION)
+ session.install("flake8", BLACK_VERSION)
session.install(".")
session.run(
- "black", "--check", *BLACK_EXCLUDES, *BLACK_PATHS,
+ "black",
+ "--check",
+ *BLACK_EXCLUDES,
+ *BLACK_PATHS,
)
session.run("flake8", "google", "tests")
@@ -79,7 +81,38 @@
session.run("black", *BLACK_EXCLUDES, *BLACK_PATHS)
-def default(session, install_grpc=True):
+def install_prerelease_dependencies(session, constraints_path):
+ with open(constraints_path, encoding="utf-8") as constraints_file:
+ constraints_text = constraints_file.read()
+ # Ignore leading whitespace and comment lines.
+ constraints_deps = [
+ match.group(1)
+ for match in re.finditer(
+ r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE
+ )
+ ]
+ session.install(*constraints_deps)
+ prerel_deps = [
+ "google-auth",
+ "googleapis-common-protos",
+ # Exclude grpcio!=1.67.0rc1 which does not support python 3.13
+ "grpcio!=1.67.0rc1",
+ "grpcio-status",
+ "proto-plus",
+ "protobuf",
+ ]
+
+ for dep in prerel_deps:
+ session.install("--pre", "--no-deps", "--upgrade", dep)
+
+ # Remaining dependencies
+ other_deps = [
+ "requests",
+ ]
+ session.install(*other_deps)
+
+
+def default(session, install_grpc=True, prerelease=False, install_async_rest=False):
"""Default unit test session.
This is intended to be run **without** an interpreter set, so
@@ -87,70 +120,144 @@
Python corresponding to the ``nox`` binary the ``PATH`` can
run the tests.
"""
- constraints_path = str(
- CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ if prerelease and not install_grpc:
+ unittest.skip("The pre-release session cannot be run without grpc")
+
+ session.install(
+ "dataclasses",
+ "mock; python_version=='3.7'",
+ "pytest",
+ "pytest-cov",
+ "pytest-xdist",
)
- # Install all test dependencies, then install this package in-place.
- session.install("mock", "pytest", "pytest-cov")
+ install_extras = []
if install_grpc:
- session.install("-e", ".[grpc]", "-c", constraints_path)
+ # Note: The extra is called `grpc` and not `grpcio`.
+ install_extras.append("grpc")
+
+ constraints_dir = str(CURRENT_DIRECTORY / "testing")
+ if install_async_rest:
+ install_extras.append("async_rest")
+ constraints_type = "async-rest-"
else:
- session.install("-e", ".", "-c", constraints_path)
+ constraints_type = ""
+
+ lib_with_extras = f".[{','.join(install_extras)}]" if len(install_extras) else "."
+ if prerelease:
+ install_prerelease_dependencies(
+ session,
+ f"{constraints_dir}/constraints-{constraints_type}{PYTHON_VERSIONS[0]}.txt",
+ )
+ # This *must* be the last install command to get the package from source.
+ session.install("-e", lib_with_extras, "--no-deps")
+ else:
+ constraints_file = (
+ f"{constraints_dir}/constraints-{constraints_type}{session.python}.txt"
+ )
+ # fall back to standard constraints file
+ if not pathlib.Path(constraints_file).exists():
+ constraints_file = f"{constraints_dir}/constraints-{session.python}.txt"
+
+ session.install(
+ "-e",
+ lib_with_extras,
+ "-c",
+ constraints_file,
+ )
+
+ # Print out package versions of dependencies
+ session.run(
+ "python", "-c", "import google.protobuf; print(google.protobuf.__version__)"
+ )
+ # Support for proto.version was added in v1.23.0
+ # https://github.com/googleapis/proto-plus-python/releases/tag/v1.23.0
+ session.run(
+ "python",
+ "-c",
+ """import proto; hasattr(proto, "version") and print(proto.version.__version__)""",
+ )
+ if install_grpc:
+ session.run("python", "-c", "import grpc; print(grpc.__version__)")
+ session.run("python", "-c", "import google.auth; print(google.auth.__version__)")
pytest_args = [
"python",
"-m",
- "py.test",
- "--quiet",
- "--cov=google.api_core",
- "--cov=tests.unit",
- "--cov-append",
- "--cov-config=.coveragerc",
- "--cov-report=",
- "--cov-fail-under=0",
- os.path.join("tests", "unit"),
+ "pytest",
+ *(
+ # Helpful for running a single test or testfile.
+ session.posargs
+ or [
+ "--quiet",
+ "--cov=google.api_core",
+ "--cov=tests.unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ # Running individual tests with parallelism enabled is usually not helpful.
+ "-n=auto",
+ os.path.join("tests", "unit"),
+ ]
+ ),
]
- pytest_args.extend(session.posargs)
- # Inject AsyncIO content and proto-plus, if version >= 3.6.
- # proto-plus is needed for a field mask test in test_protobuf_helpers.py
- if _greater_or_equal_than_36(session.python):
- session.install("asyncmock", "pytest-asyncio", "proto-plus")
+ session.install("asyncmock", "pytest-asyncio")
+ # Having positional arguments means the user wants to run specific tests.
+ # Best not to add additional tests to that list.
+ if not session.posargs:
pytest_args.append("--cov=tests.asyncio")
pytest_args.append(os.path.join("tests", "asyncio"))
- session.run(*pytest_args)
- else:
- # Run py.test against the unit tests.
- session.run(*pytest_args)
+
+ session.run(*pytest_args)
[email protected](python=["3.6", "3.7", "3.8", "3.9", "3.10"])
[email protected](python=PYTHON_VERSIONS)
def unit(session):
"""Run the unit test suite."""
default(session)
[email protected](python=["3.6", "3.7", "3.8", "3.9"])
[email protected](python=PYTHON_VERSIONS)
+def unit_w_prerelease_deps(session):
+ """Run the unit test suite."""
+ default(session, prerelease=True)
+
+
[email protected](python=PYTHON_VERSIONS)
def unit_grpc_gcp(session):
- """Run the unit test suite with grpcio-gcp installed."""
+ """
+ Run the unit test suite with grpcio-gcp installed.
+ `grpcio-gcp` doesn't support protobuf 4+.
+ Remove extra `grpcgcp` when protobuf 3.x is dropped.
+ https://github.com/googleapis/python-api-core/issues/594
+ """
constraints_path = str(
CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
# Install grpcio-gcp
session.install("-e", ".[grpcgcp]", "-c", constraints_path)
+ # Install protobuf < 4.0.0
+ session.install("protobuf<4.0.0")
default(session)
[email protected](python=["3.6", "3.10"])
[email protected](python=PYTHON_VERSIONS)
def unit_wo_grpc(session):
"""Run the unit test suite w/o grpcio installed"""
default(session, install_grpc=False)
[email protected](python="3.6")
[email protected](python=PYTHON_VERSIONS)
+def unit_w_async_rest_extra(session):
+ """Run the unit test suite with the `async_rest` extra"""
+ default(session, install_async_rest=True)
+
+
[email protected](python=DEFAULT_PYTHON_VERSION)
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
@@ -158,25 +265,28 @@
session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
-# No 3.7 because pytype supports up to 3.6 only.
[email protected](python="3.6")
[email protected](python=DEFAULT_PYTHON_VERSION)
def pytype(session):
"""Run type-checking."""
- session.install(".[grpc, grpcgcp]", "pytype >= 2019.3.21")
+ session.install(".[grpc]", "pytype")
session.run("pytype")
@nox.session(python=DEFAULT_PYTHON_VERSION)
def mypy(session):
"""Run type-checking."""
- session.install(".[grpc, grpcgcp]", "mypy")
+ session.install(".[grpc,async_rest]", "mypy")
session.install(
- "types-setuptools", "types-requests", "types-protobuf", "types-mock"
+ "types-setuptools",
+ "types-requests",
+ "types-protobuf",
+ "types-dataclasses",
+ "types-mock; python_version=='3.7'",
)
session.run("mypy", "google", "tests")
[email protected](python="3.6")
[email protected](python=DEFAULT_PYTHON_VERSION)
def cover(session):
"""Run the final coverage report.
@@ -188,12 +298,25 @@
session.run("coverage", "erase")
[email protected](python="3.8")
[email protected](python="3.10")
def docs(session):
"""Build the docs for this library."""
- session.install("-e", ".[grpc, grpcgcp]")
- session.install("sphinx==4.0.1", "alabaster", "recommonmark")
+ session.install("-e", ".[grpc]")
+ session.install(
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
+ "sphinx==4.5.0",
+ "alabaster",
+ "recommonmark",
+ )
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
@@ -210,13 +333,24 @@
)
[email protected](python="3.8")
[email protected](python="3.10")
def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
session.install(
- "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml"
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
+ "gcp-sphinx-docfx-yaml",
+ "alabaster",
+ "recommonmark",
)
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
diff --git a/owlbot.py b/owlbot.py
index 451f7c4..c8c7654 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -16,6 +16,7 @@
import synthtool as s
from synthtool import gcp
+from synthtool.languages import python
common = gcp.CommonTemplates()
@@ -28,6 +29,9 @@
".flake8", # flake8-import-order, layout
".coveragerc", # layout
"CONTRIBUTING.rst", # no systests
+ ".github/workflows/unittest.yml", # exclude unittest gh action
+ ".github/workflows/lint.yml", # exclude lint gh action
+ "README.rst",
]
templated_files = common.py_library(microgenerator=True, cov_level=100)
s.move(templated_files, excludes=excludes)
@@ -44,4 +48,6 @@
""",
)
+python.configure_previous_major_version_branches()
+
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..fda8f01
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,107 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+[build-system]
+requires = ["setuptools"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "google-api-core"
+authors = [{ name = "Google LLC", email = "[email protected]" }]
+license = { text = "Apache 2.0" }
+requires-python = ">=3.7"
+readme = "README.rst"
+description = "Google API client core library"
+classifiers = [
+ # Should be one of:
+ # "Development Status :: 3 - Alpha"
+ # "Development Status :: 4 - Beta"
+ # "Development Status :: 5 - Production/Stable"
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Operating System :: OS Independent",
+ "Topic :: Internet",
+]
+dependencies = [
+ "googleapis-common-protos >= 1.56.2, < 2.0.dev0",
+ "protobuf >= 3.19.5, < 6.0.0.dev0, != 3.20.0, != 3.20.1, != 4.21.0, != 4.21.1, != 4.21.2, != 4.21.3, != 4.21.4, != 4.21.5",
+ "proto-plus >= 1.22.3, < 2.0.0dev",
+ "proto-plus >= 1.25.0, < 2.0.0dev; python_version >= '3.13'",
+ "google-auth >= 2.14.1, < 3.0.dev0",
+ "requests >= 2.18.0, < 3.0.0.dev0",
+]
+dynamic = ["version"]
+
+[project.urls]
+Documentation = "https://googleapis.dev/python/google-api-core/latest/"
+Repository = "https://github.com/googleapis/python-api-core"
+
+[project.optional-dependencies]
+async_rest = ["google-auth[aiohttp] >= 2.35.0, < 3.0.dev0"]
+grpc = [
+ "grpcio >= 1.33.2, < 2.0dev",
+ "grpcio >= 1.49.1, < 2.0dev; python_version >= '3.11'",
+ "grpcio-status >= 1.33.2, < 2.0.dev0",
+ "grpcio-status >= 1.49.1, < 2.0.dev0; python_version >= '3.11'",
+]
+grpcgcp = ["grpcio-gcp >= 0.2.2, < 1.0.dev0"]
+grpcio-gcp = ["grpcio-gcp >= 0.2.2, < 1.0.dev0"]
+
+[tool.setuptools.dynamic]
+version = { attr = "google.api_core.version.__version__" }
+
+[tool.setuptools.packages.find]
+# Only include packages under the 'google' namespace. Do not include tests,
+# benchmarks, etc.
+include = ["google*"]
+
+[tool.mypy]
+python_version = "3.7"
+namespace_packages = true
+ignore_missing_imports = true
+
+[tool.pytest]
+filterwarnings = [
+ # treat all warnings as errors
+ "error",
+ # Remove once https://github.com/pytest-dev/pytest-cov/issues/621 is fixed
+ "ignore:.*The --rsyncdir command line argument and rsyncdirs config variable are deprecated:DeprecationWarning",
+ # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed
+ "ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning",
+ # Remove once support for python 3.7 is dropped
+ # This warning only appears when using python 3.7
+ "ignore:.*Using or importing the ABCs from.*collections:DeprecationWarning",
+ # Remove once support for grpcio-gcp is deprecated
+ # See https://github.com/googleapis/python-api-core/blob/42e8b6e6f426cab749b34906529e8aaf3f133d75/google/api_core/grpc_helpers.py#L39-L45
+ "ignore:.*Support for grpcio-gcp is deprecated:DeprecationWarning",
+ "ignore: The `compression` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning",
+ "ignore:The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning",
+ # Remove once the minimum supported version of googleapis-common-protos is 1.62.0
+ "ignore:.*pkg_resources.declare_namespace:DeprecationWarning",
+ "ignore:.*pkg_resources is deprecated as an API:DeprecationWarning",
+ # Remove once https://github.com/grpc/grpc/issues/35086 is fixed (and version newer than 1.60.0 is published)
+ "ignore:There is no current event loop:DeprecationWarning",
+ # Remove after support for Python 3.7 is dropped
+ "ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning",
+]
diff --git a/renovate.json b/renovate.json
index c21036d..c7875c4 100644
--- a/renovate.json
+++ b/renovate.json
@@ -5,7 +5,7 @@
":preserveSemverRanges",
":disableDependencyDashboard"
],
- "ignorePaths": [".pre-commit-config.yaml"],
+ "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"],
"pip_requirements": {
"fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
}
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
index 21f6d2a..120b0dd 100755
--- a/scripts/decrypt-secrets.sh
+++ b/scripts/decrypt-secrets.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-# Copyright 2015 Google Inc. All rights reserved.
+# Copyright 2024 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
index d309d6e..8f5e248 100644
--- a/scripts/readme-gen/readme_gen.py
+++ b/scripts/readme-gen/readme_gen.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-# Copyright 2016 Google Inc
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -28,19 +28,22 @@
jinja_env = jinja2.Environment(
trim_blocks=True,
loader=jinja2.FileSystemLoader(
- os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+ os.path.abspath(os.path.join(os.path.dirname(__file__), "templates"))
+ ),
+ autoescape=True,
+)
-README_TMPL = jinja_env.get_template('README.tmpl.rst')
+README_TMPL = jinja_env.get_template("README.tmpl.rst")
def get_help(file):
- return subprocess.check_output(['python', file, '--help']).decode()
+ return subprocess.check_output(["python", file, "--help"]).decode()
def main():
parser = argparse.ArgumentParser()
- parser.add_argument('source')
- parser.add_argument('--destination', default='README.rst')
+ parser.add_argument("source")
+ parser.add_argument("--destination", default="README.rst")
args = parser.parse_args()
@@ -48,9 +51,9 @@
root = os.path.dirname(source)
destination = os.path.join(root, args.destination)
- jinja_env.globals['get_help'] = get_help
+ jinja_env.globals["get_help"] = get_help
- with io.open(source, 'r') as f:
+ with io.open(source, "r") as f:
config = yaml.load(f)
# This allows get_help to execute in the right directory.
@@ -58,9 +61,9 @@
output = README_TMPL.render(config)
- with io.open(destination, 'w') as f:
+ with io.open(destination, "w") as f:
f.write(output)
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
index 275d649..6f069c6 100644
--- a/scripts/readme-gen/templates/install_deps.tmpl.rst
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -12,7 +12,7 @@
.. _Python Development Environment Setup Guide:
https://cloud.google.com/python/setup
-#. Create a virtualenv. Samples are compatible with Python 3.6+.
+#. Create a virtualenv. Samples are compatible with Python 3.7+.
.. code-block:: bash
diff --git a/setup.cfg b/setup.cfg
index 0be0b3f..f7b5a3b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,8 +1,5 @@
-[bdist_wheel]
-universal = 1
-
[pytype]
-python_version = 3.6
+python_version = 3.7
inputs =
google/
exclude =
diff --git a/setup.py b/setup.py
index ddc5600..168877f 100644
--- a/setup.py
+++ b/setup.py
@@ -12,91 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import io
-import os
-
import setuptools
-# Package metadata.
-
-name = "google-api-core"
-description = "Google API client core library"
-
-# Should be one of:
-# 'Development Status :: 3 - Alpha'
-# 'Development Status :: 4 - Beta'
-# 'Development Status :: 5 - Production/Stable'
-release_status = "Development Status :: 5 - Production/Stable"
-dependencies = [
- "googleapis-common-protos >= 1.52.0, < 2.0dev",
- "protobuf >= 3.12.0",
- "google-auth >= 1.25.0, < 3.0dev",
- "requests >= 2.18.0, < 3.0.0dev",
- "setuptools >= 40.3.0",
-]
-extras = {
- "grpc": ["grpcio >= 1.33.2, < 2.0dev", "grpcio-status >= 1.33.2, < 2.0dev"],
- "grpcgcp": "grpcio-gcp >= 0.2.2",
- "grpcio-gcp": "grpcio-gcp >= 0.2.2",
-}
-
-
-# Setup boilerplate below this line.
-
-package_root = os.path.abspath(os.path.dirname(__file__))
-
-
-version = {}
-with open(os.path.join(package_root, "google/api_core/version.py")) as fp:
- exec(fp.read(), version)
-version = version["__version__"]
-
-readme_filename = os.path.join(package_root, "README.rst")
-with io.open(readme_filename, encoding="utf-8") as readme_file:
- readme = readme_file.read()
-
-# Only include packages under the 'google' namespace. Do not include tests,
-# benchmarks, etc.
-packages = [
- package for package in setuptools.find_packages() if package.startswith("google")
-]
-
-# Determine which namespaces are needed.
-namespaces = ["google"]
-if "google.cloud" in packages:
- namespaces.append("google.cloud")
-
-
-setuptools.setup(
- name=name,
- version=version,
- description=description,
- long_description=readme,
- author="Google LLC",
- author_email="[email protected]",
- license="Apache 2.0",
- url="https://github.com/googleapis/python-api-core",
- classifiers=[
- release_status,
- "Intended Audience :: Developers",
- "License :: OSI Approved :: Apache Software License",
- "Programming Language :: Python",
- "Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- "Operating System :: OS Independent",
- "Topic :: Internet",
- ],
- platforms="Posix; MacOS X; Windows",
- packages=packages,
- namespace_packages=namespaces,
- install_requires=dependencies,
- extras_require=extras,
- python_requires=">=3.6",
- include_package_data=True,
- zip_safe=False,
-)
+setuptools.setup()
diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/constraints-3.12.txt
diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/testing/constraints-3.13.txt
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
index e69de29..4ce1c89 100644
--- a/testing/constraints-3.7.txt
+++ b/testing/constraints-3.7.txt
@@ -0,0 +1,15 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List *all* library dependencies and extras in this file.
+# Pin the version to the lower bound.
+#
+# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have foo==1.14.0
+googleapis-common-protos==1.56.2
+protobuf==3.19.5
+google-auth==2.14.1
+requests==2.18.0
+grpcio==1.33.2
+grpcio-status==1.33.2
+grpcio-gcp==0.2.2
+proto-plus==1.22.3
diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt
index e69de29..1b5bb58 100644
--- a/testing/constraints-3.8.txt
+++ b/testing/constraints-3.8.txt
@@ -0,0 +1,2 @@
+googleapis-common-protos==1.56.3
+protobuf==4.21.6
\ No newline at end of file
diff --git a/testing/constraints-3.6.txt b/testing/constraints-async-rest-3.7.txt
similarity index 68%
rename from testing/constraints-3.6.txt
rename to testing/constraints-async-rest-3.7.txt
index 0c2a07b..7aedeb1 100644
--- a/testing/constraints-3.6.txt
+++ b/testing/constraints-async-rest-3.7.txt
@@ -5,13 +5,13 @@
#
# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
# Then this file should have foo==1.14.0
-googleapis-common-protos==1.52.0
-protobuf==3.12.0
-google-auth==1.25.0
-requests==2.18.0
-setuptools==40.3.0
-packaging==14.3
+googleapis-common-protos==1.56.2
+protobuf==3.19.5
+google-auth==2.35.0
+# from google-auth[aiohttp]
+aiohttp==3.6.2
+requests==2.20.0
grpcio==1.33.2
-grpcio-gcp==0.2.2
-grpcio-gcp==0.2.2
grpcio-status==1.33.2
+grpcio-gcp==0.2.2
+proto-plus==1.22.3
diff --git a/tests/asyncio/future/test_async_future.py b/tests/asyncio/future/test_async_future.py
index 1e9ae33..659f41c 100644
--- a/tests/asyncio/future/test_async_future.py
+++ b/tests/asyncio/future/test_async_future.py
@@ -13,8 +13,8 @@
# limitations under the License.
import asyncio
+from unittest import mock
-import mock
import pytest
from google.api_core import exceptions
@@ -47,7 +47,6 @@
@pytest.mark.asyncio
async def test_set_result():
future = AsyncFuture()
- callback = mock.Mock()
future.set_result(1)
diff --git a/tests/asyncio/gapic/test_method_async.py b/tests/asyncio/gapic/test_method_async.py
index 1410747..73f67b8 100644
--- a/tests/asyncio/gapic/test_method_async.py
+++ b/tests/asyncio/gapic/test_method_async.py
@@ -14,11 +14,15 @@
import datetime
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest
try:
- from grpc import aio
+ from grpc import aio, Compression
except ImportError:
pytest.skip("No GRPC", allow_module_level=True)
@@ -93,6 +97,35 @@
@pytest.mark.asyncio
+async def test_wrap_method_with_no_compression():
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+
+ wrapped_method = gapic_v1.method_async.wrap_method(method)
+
+ await wrapped_method(1, 2, meep="moop", compression=None)
+
+ method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY)
+
+
[email protected]
+async def test_wrap_method_with_custom_compression():
+ compression = Compression.Gzip
+ fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
+ method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
+
+ wrapped_method = gapic_v1.method_async.wrap_method(
+ method, default_compression=compression
+ )
+
+ await wrapped_method(1, 2, meep="moop", compression=Compression.Deflate)
+
+ method.assert_called_once_with(
+ 1, 2, meep="moop", metadata=mock.ANY, compression=Compression.Deflate
+ )
+
+
[email protected]
async def test_invoke_wrapped_method_with_metadata():
fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
@@ -126,7 +159,7 @@
@mock.patch("asyncio.sleep")
@pytest.mark.asyncio
-async def test_wrap_method_with_default_retry_and_timeout(unused_sleep):
+async def test_wrap_method_with_default_retry_timeout_and_compression(unused_sleep):
fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
method = mock.Mock(
spec=aio.UnaryUnaryMultiCallable,
@@ -135,15 +168,18 @@
default_retry = retry_async.AsyncRetry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = Compression.Gzip
wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = await wrapped_method()
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=60, metadata=mock.ANY)
+ method.assert_called_with(
+ timeout=60, compression=default_compression, metadata=mock.ANY
+ )
@mock.patch("asyncio.sleep")
@@ -157,22 +193,27 @@
default_retry = retry_async.AsyncRetry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = Compression.Gzip
wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = await wrapped_method(
- retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT,
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout=gapic_v1.method_async.DEFAULT,
+ compression=gapic_v1.method_async.DEFAULT,
)
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=60, metadata=mock.ANY)
+ method.assert_called_with(
+ timeout=60, compression=Compression.Gzip, metadata=mock.ANY
+ )
@mock.patch("asyncio.sleep")
@pytest.mark.asyncio
-async def test_wrap_method_with_overriding_retry_and_timeout(unused_sleep):
+async def test_wrap_method_with_overriding_retry_timeout_and_compression(unused_sleep):
fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
method = mock.Mock(
spec=aio.UnaryUnaryMultiCallable,
@@ -181,8 +222,9 @@
default_retry = retry_async.AsyncRetry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = Compression.Gzip
wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = await wrapped_method(
@@ -190,45 +232,13 @@
retry_async.if_exception_type(exceptions.NotFound)
),
timeout=timeout.ConstantTimeout(22),
+ compression=Compression.Deflate,
)
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=22, metadata=mock.ANY)
-
-
[email protected]("asyncio.sleep")
[email protected](
- "google.api_core.datetime_helpers.utcnow",
- side_effect=_utcnow_monotonic(),
- autospec=True,
-)
[email protected]
-async def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep):
- fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42)
- method = mock.Mock(
- spec=aio.UnaryUnaryMultiCallable,
- side_effect=([exceptions.InternalServerError(None)] * 4) + [fake_call],
- )
-
- default_retry = retry_async.AsyncRetry()
- default_timeout = timeout.ExponentialTimeout(deadline=60)
- wrapped_method = gapic_v1.method_async.wrap_method(
- method, default_retry, default_timeout
- )
-
- # Overriding only the retry's deadline should also override the timeout's
- # deadline.
- result = await wrapped_method(retry=default_retry.with_deadline(30))
-
- assert result == 42
- timeout_args = [call[1]["timeout"] for call in method.call_args_list]
- assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0]
- assert utcnow.call_count == (
- 1
- + 1 # Compute wait_for timeout in retry_async
- + 5 # First to set the deadline.
- + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds)
+ method.assert_called_with(
+ timeout=22, compression=Compression.Deflate, metadata=mock.ANY
)
@@ -246,3 +256,14 @@
assert result == 42
method.assert_called_once_with(timeout=22, metadata=mock.ANY)
+
+
[email protected]
+async def test_wrap_method_without_wrap_errors():
+ fake_call = mock.AsyncMock()
+
+ wrapped_method = gapic_v1.method_async.wrap_method(fake_call, kind="rest")
+ with mock.patch("google.api_core.grpc_helpers_async.wrap_errors") as method:
+ await wrapped_method()
+
+ method.assert_not_called()
diff --git a/tests/asyncio/operations_v1/test_operations_async_client.py b/tests/asyncio/operations_v1/test_operations_async_client.py
index 47c3b4b..e5b20dc 100644
--- a/tests/asyncio/operations_v1/test_operations_async_client.py
+++ b/tests/asyncio/operations_v1/test_operations_async_client.py
@@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+from unittest import mock
+
import pytest
try:
- from grpc import aio
-except ImportError:
+ from grpc import aio, Compression
+except ImportError: # pragma: NO COVER
pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import grpc_helpers_async
@@ -42,10 +43,13 @@
)
client = operations_v1.OperationsAsyncClient(mocked_channel)
- response = await client.get_operation("name", metadata=[("header", "foo")])
+ response = await client.get_operation(
+ "name", metadata=[("header", "foo")], compression=Compression.Gzip
+ )
assert method.call_count == 1
assert tuple(method.call_args_list[0])[0][0].name == "name"
assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
"metadata"
]
@@ -63,7 +67,9 @@
mocked_channel, method, fake_call = _mock_grpc_objects(list_response)
client = operations_v1.OperationsAsyncClient(mocked_channel)
- pager = await client.list_operations("name", "filter", metadata=[("header", "foo")])
+ pager = await client.list_operations(
+ "name", "filter", metadata=[("header", "foo")], compression=Compression.Gzip
+ )
assert isinstance(pager, page_iterator_async.AsyncIterator)
responses = []
@@ -74,6 +80,7 @@
assert method.call_count == 1
assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
"metadata"
]
@@ -88,11 +95,14 @@
mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty())
client = operations_v1.OperationsAsyncClient(mocked_channel)
- await client.delete_operation("name", metadata=[("header", "foo")])
+ await client.delete_operation(
+ "name", metadata=[("header", "foo")], compression=Compression.Gzip
+ )
assert method.call_count == 1
assert tuple(method.call_args_list[0])[0][0].name == "name"
assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
"metadata"
]
@@ -103,11 +113,14 @@
mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty())
client = operations_v1.OperationsAsyncClient(mocked_channel)
- await client.cancel_operation("name", metadata=[("header", "foo")])
+ await client.cancel_operation(
+ "name", metadata=[("header", "foo")], compression=Compression.Gzip
+ )
assert method.call_count == 1
assert tuple(method.call_args_list[0])[0][0].name == "name"
assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"]
+ assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip
assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][
"metadata"
]
diff --git a/tests/asyncio/retry/__init__.py b/tests/asyncio/retry/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/asyncio/retry/__init__.py
diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py
new file mode 100644
index 0000000..d0fd799
--- /dev/null
+++ b/tests/asyncio/retry/test_retry_streaming_async.py
@@ -0,0 +1,572 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import asyncio
+import datetime
+import re
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+import pytest
+
+from google.api_core import exceptions
+from google.api_core import retry_async
+from google.api_core.retry import retry_streaming_async
+
+from ...unit.retry.test_retry_base import Test_BaseRetry
+
+
[email protected]
+async def test_retry_streaming_target_bad_sleep_generator():
+ from google.api_core.retry.retry_streaming_async import retry_target_stream
+
+ with pytest.raises(ValueError, match="Sleep generator"):
+ await retry_target_stream(None, None, [], None).__anext__()
+
+
+class TestAsyncStreamingRetry(Test_BaseRetry):
+ def _make_one(self, *args, **kwargs):
+ return retry_streaming_async.AsyncStreamingRetry(*args, **kwargs)
+
+ def test___str__(self):
+ def if_exception_type(exc):
+ return bool(exc) # pragma: NO COVER
+
+ # Explicitly set all attributes as changed Retry defaults should not
+ # cause this test to start failing.
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ predicate=if_exception_type,
+ initial=1.0,
+ maximum=60.0,
+ multiplier=2.0,
+ timeout=120.0,
+ on_error=None,
+ )
+ assert re.match(
+ (
+ r"<AsyncStreamingRetry predicate=<function.*?if_exception_type.*?>, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
+ r"on_error=None>"
+ ),
+ str(retry_),
+ )
+
+ async def _generator_mock(
+ self,
+ num=5,
+ error_on=None,
+ exceptions_seen=None,
+ sleep_time=0,
+ ):
+ """
+ Helper to create a mock generator that yields a number of values
+ Generator can optionally raise an exception on a specific iteration
+
+ Args:
+ - num (int): the number of values to yield
+ - error_on (int): if given, the generator will raise a ValueError on the specified iteration
+ - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising
+ - sleep_time (int): if given, the generator will asyncio.sleep for this many seconds before yielding each value
+ """
+ try:
+ for i in range(num):
+ if sleep_time:
+ await asyncio.sleep(sleep_time)
+ if error_on and i == error_on:
+ raise ValueError("generator mock error")
+ yield i
+ except (Exception, BaseException, GeneratorExit) as e:
+ # keep track of exceptions seen by generator
+ if exceptions_seen is not None:
+ exceptions_seen.append(e)
+ raise
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___generator_success(self, sleep):
+ """
+ Test that a retry-decorated generator yields values as expected
+ This test checks a generator with no issues
+ """
+ from collections.abc import AsyncGenerator
+
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+ decorated = retry_(self._generator_mock)
+
+ num = 10
+ generator = await decorated(num)
+ # check types
+ assert isinstance(generator, AsyncGenerator)
+ assert isinstance(self._generator_mock(num), AsyncGenerator)
+ # check yield contents
+ unpacked = [i async for i in generator]
+ assert len(unpacked) == num
+ expected = [i async for i in self._generator_mock(num)]
+ for a, b in zip(unpacked, expected):
+ assert a == b
+ sleep.assert_not_called()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___generator_retry(self, sleep):
+ """
+ Tests that a retry-decorated generator will retry on errors
+ """
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ on_error=on_error,
+ predicate=retry_async.if_exception_type(ValueError),
+ timeout=None,
+ )
+ generator = await retry_(self._generator_mock)(error_on=3)
+ # error thrown on 3
+ # generator should contain 0, 1, 2 looping
+ unpacked = [await generator.__anext__() for i in range(10)]
+ assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0]
+ assert on_error.call_count == 3
+ await generator.aclose()
+
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.parametrize("use_deadline_arg", [True, False])
+ @pytest.mark.asyncio
+ async def test___call___generator_retry_hitting_timeout(
+ self, sleep, uniform, use_deadline_arg
+ ):
+ """
+ Tests that a retry-decorated generator will throw a RetryError
+ after using the time budget
+ """
+ import time
+
+ timeout_val = 9.9
+ # support "deadline" as an alias for "timeout"
+ timeout_kwarg = (
+ {"timeout": timeout_val}
+ if not use_deadline_arg
+ else {"deadline": timeout_val}
+ )
+
+ on_error = mock.Mock()
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ predicate=retry_async.if_exception_type(ValueError),
+ initial=1.0,
+ maximum=1024.0,
+ multiplier=2.0,
+ **timeout_kwarg,
+ )
+
+ time_now = time.monotonic()
+ now_patcher = mock.patch(
+ "time.monotonic",
+ return_value=time_now,
+ )
+
+ decorated = retry_(self._generator_mock, on_error=on_error)
+ generator = await decorated(error_on=1)
+
+ with now_patcher as patched_now:
+ # Make sure that calls to fake asyncio.sleep() also advance the mocked
+ # time clock.
+ def increase_time(sleep_delay):
+ patched_now.return_value += sleep_delay
+
+ sleep.side_effect = increase_time
+
+ with pytest.raises(exceptions.RetryError):
+ [i async for i in generator]
+
+ assert on_error.call_count == 4
+ # check the delays
+ assert sleep.call_count == 3 # once between each successive target calls
+ last_wait = sleep.call_args.args[0]
+ total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
+ # next wait would have put us over, so ended early
+ assert last_wait == 4
+ assert total_wait == 7
+
+ @pytest.mark.asyncio
+ async def test___call___generator_cancellations(self):
+ """
+ cancel calls should propagate to the generator
+ """
+ # test without cancel as retryable
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+ utcnow = datetime.datetime.now(datetime.timezone.utc)
+ mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow)
+ generator = await retry_(self._generator_mock)(sleep_time=0.2)
+ assert await generator.__anext__() == 0
+ task = asyncio.create_task(generator.__anext__())
+ task.cancel()
+ with pytest.raises(asyncio.CancelledError):
+ await task
+ with pytest.raises(StopAsyncIteration):
+ await generator.__anext__()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_generator_send(self, sleep):
+ """
+ Send should be passed through retry into target generator
+ """
+
+ async def _mock_send_gen():
+ """
+ always yield whatever was sent in
+ """
+ in_ = yield
+ while True:
+ in_ = yield in_
+
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+
+ decorated = retry_(_mock_send_gen)
+
+ generator = await decorated()
+ result = await generator.__anext__()
+ # first yield should be None
+ assert result is None
+ in_messages = ["test_1", "hello", "world"]
+ out_messages = []
+ for msg in in_messages:
+ recv = await generator.asend(msg)
+ out_messages.append(recv)
+ assert in_messages == out_messages
+ await generator.aclose()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___generator_send_retry(self, sleep):
+ """
+ Send should be retried if target generator raises an error
+ """
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ on_error=on_error,
+ predicate=retry_async.if_exception_type(ValueError),
+ timeout=None,
+ )
+ generator = await retry_(self._generator_mock)(error_on=3)
+ with pytest.raises(TypeError) as exc_info:
+ await generator.asend("cannot send to fresh generator")
+ assert exc_info.match("can't send non-None value")
+ await generator.aclose()
+
+ # error thrown on 3
+ # generator should contain 0, 1, 2 looping
+ generator = await retry_(self._generator_mock)(error_on=3)
+ assert await generator.__anext__() == 0
+ unpacked = [await generator.asend(i) for i in range(10)]
+ assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1]
+ assert on_error.call_count == 3
+ await generator.aclose()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_generator_close(self, sleep):
+ """
+ Close should be passed through retry into target generator
+ """
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+ decorated = retry_(self._generator_mock)
+ exception_list = []
+ generator = await decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ await generator.__anext__()
+ await generator.aclose()
+
+ assert isinstance(exception_list[0], GeneratorExit)
+ with pytest.raises(StopAsyncIteration):
+ # calling next on closed generator should raise error
+ await generator.__anext__()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_new_generator_close(self, sleep):
+ """
+ Close should be passed through retry into target generator,
+ even when it hasn't been iterated yet
+ """
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+ decorated = retry_(self._generator_mock)
+ exception_list = []
+ generator = await decorated(10, exceptions_seen=exception_list)
+ await generator.aclose()
+
+ with pytest.raises(StopAsyncIteration):
+ # calling next on closed generator should raise error
+ await generator.__anext__()
+
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_generator_throw(self, sleep):
+ """
+ Throw should be passed through retry into target generator
+ """
+
+ # The generator should not retry when it encounters a non-retryable error
+ retry_ = retry_streaming_async.AsyncStreamingRetry(
+ predicate=retry_async.if_exception_type(ValueError),
+ )
+ decorated = retry_(self._generator_mock)
+ exception_list = []
+ generator = await decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ await generator.__anext__()
+ with pytest.raises(BufferError):
+ await generator.athrow(BufferError("test"))
+ assert isinstance(exception_list[0], BufferError)
+ with pytest.raises(StopAsyncIteration):
+ # calling next on closed generator should raise error
+ await generator.__anext__()
+
+ # In contrast, the generator should retry if we throw a retryable exception
+ exception_list = []
+ generator = await decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ await generator.__anext__()
+ throw_val = await generator.athrow(ValueError("test"))
+ assert throw_val == 0
+ assert isinstance(exception_list[0], ValueError)
+ # calling next on generator should not raise error, because it was retried
+ assert await generator.__anext__() == 1
+
+ @pytest.mark.parametrize("awaitable_wrapped", [True, False])
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_iterable_send(self, sleep, awaitable_wrapped):
+ """
+ Send should work like next if the wrapped iterable does not support it
+ """
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+
+ def iterable_fn():
+ class CustomIterable:
+ def __init__(self):
+ self.i = -1
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ self.i += 1
+ return self.i
+
+ return CustomIterable()
+
+ if awaitable_wrapped:
+
+ async def wrapper():
+ return iterable_fn()
+
+ decorated = retry_(wrapper)
+ else:
+ decorated = retry_(iterable_fn)
+
+ retryable = await decorated()
+ # initiate the generator by calling next
+ result = await retryable.__anext__()
+ assert result == 0
+ # test sending values
+ assert await retryable.asend("test") == 1
+ assert await retryable.asend("test2") == 2
+ assert await retryable.asend("test3") == 3
+ await retryable.aclose()
+
+ @pytest.mark.parametrize("awaitable_wrapped", [True, False])
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_iterable_close(self, sleep, awaitable_wrapped):
+ """
+ close should be handled by wrapper if wrapped iterable does not support it
+ """
+ retry_ = retry_streaming_async.AsyncStreamingRetry()
+
+ def iterable_fn():
+ class CustomIterable:
+ def __init__(self):
+ self.i = -1
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ self.i += 1
+ return self.i
+
+ return CustomIterable()
+
+ if awaitable_wrapped:
+
+ async def wrapper():
+ return iterable_fn()
+
+ decorated = retry_(wrapper)
+ else:
+ decorated = retry_(iterable_fn)
+
+ # try closing active generator
+ retryable = await decorated()
+ assert await retryable.__anext__() == 0
+ await retryable.aclose()
+ with pytest.raises(StopAsyncIteration):
+ await retryable.__anext__()
+ # try closing new generator
+ new_retryable = await decorated()
+ await new_retryable.aclose()
+ with pytest.raises(StopAsyncIteration):
+ await new_retryable.__anext__()
+
+ @pytest.mark.parametrize("awaitable_wrapped", [True, False])
+ @mock.patch("asyncio.sleep", autospec=True)
+ @pytest.mark.asyncio
+ async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped):
+ """
+ Throw should work even if the wrapped iterable does not support it
+ """
+
+ predicate = retry_async.if_exception_type(ValueError)
+ retry_ = retry_streaming_async.AsyncStreamingRetry(predicate=predicate)
+
+ def iterable_fn():
+ class CustomIterable:
+ def __init__(self):
+ self.i = -1
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ self.i += 1
+ return self.i
+
+ return CustomIterable()
+
+ if awaitable_wrapped:
+
+ async def wrapper():
+ return iterable_fn()
+
+ decorated = retry_(wrapper)
+ else:
+ decorated = retry_(iterable_fn)
+
+ # try throwing with active generator
+ retryable = await decorated()
+ assert await retryable.__anext__() == 0
+ # should swallow errors in predicate
+ await retryable.athrow(ValueError("test"))
+ # should raise errors not in predicate
+ with pytest.raises(BufferError):
+ await retryable.athrow(BufferError("test"))
+ with pytest.raises(StopAsyncIteration):
+ await retryable.__anext__()
+ # try throwing with new generator
+ new_retryable = await decorated()
+ with pytest.raises(BufferError):
+ await new_retryable.athrow(BufferError("test"))
+ with pytest.raises(StopAsyncIteration):
+ await new_retryable.__anext__()
+
+ @pytest.mark.asyncio
+ async def test_exc_factory_non_retryable_error(self):
+ """
+ generator should give the option to override exception creation logic
+ test when non-retryable error is thrown
+ """
+ from google.api_core.retry import RetryFailureReason
+ from google.api_core.retry.retry_streaming_async import retry_target_stream
+
+ timeout = 6
+ sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")]
+ expected_final_err = RuntimeError("done")
+ expected_source_err = ZeroDivisionError("test4")
+
+ def factory(*args, **kwargs):
+ assert len(kwargs) == 0
+ assert args[0] == sent_errors
+ assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR
+ assert args[2] == timeout
+ return expected_final_err, expected_source_err
+
+ generator = retry_target_stream(
+ self._generator_mock,
+ retry_async.if_exception_type(ValueError),
+ [0] * 3,
+ timeout=timeout,
+ exception_factory=factory,
+ )
+ # initialize the generator
+ await generator.__anext__()
+ # trigger some retryable errors
+ await generator.athrow(sent_errors[0])
+ await generator.athrow(sent_errors[1])
+ # trigger a non-retryable error
+ with pytest.raises(expected_final_err.__class__) as exc_info:
+ await generator.athrow(sent_errors[2])
+ assert exc_info.value == expected_final_err
+ assert exc_info.value.__cause__ == expected_source_err
+
+ @pytest.mark.asyncio
+ async def test_exc_factory_timeout(self):
+ """
+ generator should give the option to override exception creation logic
+ test when timeout is exceeded
+ """
+ import time
+ from google.api_core.retry import RetryFailureReason
+ from google.api_core.retry.retry_streaming_async import retry_target_stream
+
+ timeout = 2
+ time_now = time.monotonic()
+ now_patcher = mock.patch(
+ "time.monotonic",
+ return_value=time_now,
+ )
+
+ with now_patcher as patched_now:
+ timeout = 2
+ sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")]
+ expected_final_err = RuntimeError("done")
+ expected_source_err = ZeroDivisionError("test4")
+
+ def factory(*args, **kwargs):
+ assert len(kwargs) == 0
+ assert args[0] == sent_errors
+ assert args[1] == RetryFailureReason.TIMEOUT
+ assert args[2] == timeout
+ return expected_final_err, expected_source_err
+
+ generator = retry_target_stream(
+ self._generator_mock,
+ retry_async.if_exception_type(ValueError),
+ [0] * 3,
+ timeout=timeout,
+ exception_factory=factory,
+ )
+ # initialize the generator
+ await generator.__anext__()
+ # trigger some retryable errors
+ await generator.athrow(sent_errors[0])
+ await generator.athrow(sent_errors[1])
+ # trigger a timeout
+ patched_now.return_value += timeout + 1
+ with pytest.raises(expected_final_err.__class__) as exc_info:
+ await generator.athrow(sent_errors[2])
+ assert exc_info.value == expected_final_err
+ assert exc_info.value.__cause__ == expected_source_err
diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/retry/test_retry_unary_async.py
similarity index 62%
rename from tests/asyncio/test_retry_async.py
rename to tests/asyncio/retry/test_retry_unary_async.py
index 9e51044..dc64299 100644
--- a/tests/asyncio/test_retry_async.py
+++ b/tests/asyncio/retry/test_retry_unary_async.py
@@ -15,12 +15,18 @@
import datetime
import re
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest
from google.api_core import exceptions
from google.api_core import retry_async
+from ...unit.retry.test_retry_base import Test_BaseRetry
+
@mock.patch("asyncio.sleep", autospec=True)
@mock.patch(
@@ -97,29 +103,35 @@
@mock.patch("asyncio.sleep", autospec=True)
[email protected]("google.api_core.datetime_helpers.utcnow", autospec=True)
[email protected]("time.monotonic", autospec=True)
[email protected]("use_deadline_arg", [True, False])
@pytest.mark.asyncio
-async def test_retry_target_deadline_exceeded(utcnow, sleep):
+async def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg):
predicate = retry_async.if_exception_type(ValueError)
exception = ValueError("meep")
target = mock.Mock(side_effect=exception)
# Setup the timeline so that the first call takes 5 seconds but the second
- # call takes 6, which puts the retry over the deadline.
- utcnow.side_effect = [
- # The first call to utcnow establishes the start of the timeline.
- datetime.datetime.min,
- datetime.datetime.min + datetime.timedelta(seconds=5),
- datetime.datetime.min + datetime.timedelta(seconds=11),
- ]
+ # call takes 6, which puts the retry over the timeout.
+ monotonic.side_effect = [0, 5, 11]
+
+ timeout_val = 10
+ # support "deadline" as an alias for "timeout"
+ timeout_kwarg = (
+ {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val}
+ )
with pytest.raises(exceptions.RetryError) as exc_info:
- await retry_async.retry_target(target, predicate, range(10), deadline=10)
+ await retry_async.retry_target(target, predicate, range(10), **timeout_kwarg)
assert exc_info.value.cause == exception
- assert exc_info.match("Deadline of 10.0s exceeded")
+ assert exc_info.match("Timeout of 10.0s exceeded")
assert exc_info.match("last exception: meep")
assert target.call_count == 2
+ # Ensure the exception message does not include the target fn:
+ # it may be a partial with user data embedded
+ assert str(target) not in exc_info.exconly()
+
@pytest.mark.asyncio
async def test_retry_target_bad_sleep_generator():
@@ -129,108 +141,9 @@
)
-class TestAsyncRetry:
- def test_constructor_defaults(self):
- retry_ = retry_async.AsyncRetry()
- assert retry_._predicate == retry_async.if_transient_error
- assert retry_._initial == 1
- assert retry_._maximum == 60
- assert retry_._multiplier == 2
- assert retry_._deadline == 120
- assert retry_._on_error is None
-
- def test_constructor_options(self):
- _some_function = mock.Mock()
-
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=_some_function,
- )
- assert retry_._predicate == mock.sentinel.predicate
- assert retry_._initial == 1
- assert retry_._maximum == 2
- assert retry_._multiplier == 3
- assert retry_._deadline == 4
- assert retry_._on_error is _some_function
-
- def test_with_deadline(self):
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_deadline(42)
- assert retry_ is not new_retry
- assert new_retry._deadline == 42
-
- # the rest of the attributes should remain the same
- assert new_retry._predicate is retry_._predicate
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_predicate(self):
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_predicate(mock.sentinel.predicate)
- assert retry_ is not new_retry
- assert new_retry._predicate == mock.sentinel.predicate
-
- # the rest of the attributes should remain the same
- assert new_retry._deadline == retry_._deadline
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_delay_noop(self):
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay()
- assert retry_ is not new_retry
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
-
- def test_with_delay(self):
- retry_ = retry_async.AsyncRetry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3)
- assert retry_ is not new_retry
- assert new_retry._initial == 1
- assert new_retry._maximum == 2
- assert new_retry._multiplier == 3
-
- # the rest of the attributes should remain the same
- assert new_retry._deadline == retry_._deadline
- assert new_retry._predicate is retry_._predicate
- assert new_retry._on_error is retry_._on_error
+class TestAsyncRetry(Test_BaseRetry):
+ def _make_one(self, *args, **kwargs):
+ return retry_async.AsyncRetry(*args, **kwargs)
def test___str__(self):
def if_exception_type(exc):
@@ -243,13 +156,13 @@
initial=1.0,
maximum=60.0,
multiplier=2.0,
- deadline=120.0,
+ timeout=120.0,
on_error=None,
)
assert re.match(
(
r"<AsyncRetry predicate=<function.*?if_exception_type.*?>, "
- r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
r"on_error=None>"
),
str(retry_),
@@ -272,12 +185,10 @@
target.assert_called_once_with("meep")
sleep.assert_not_called()
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
async def test___call___and_execute_retry(self, sleep, uniform):
-
on_error = mock.Mock(spec=["__call__"], side_effect=[None])
retry_ = retry_async.AsyncRetry(
predicate=retry_async.if_exception_type(ValueError)
@@ -298,25 +209,20 @@
sleep.assert_called_once_with(retry_._initial)
assert on_error.call_count == 1
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
- async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform):
-
+ async def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform):
on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10)
retry_ = retry_async.AsyncRetry(
predicate=retry_async.if_exception_type(ValueError),
initial=1.0,
maximum=1024.0,
multiplier=2.0,
- deadline=9.9,
+ timeout=30.9,
)
- utcnow = datetime.datetime.utcnow()
- utcnow_patcher = mock.patch(
- "google.api_core.datetime_helpers.utcnow", return_value=utcnow
- )
+ monotonic_patcher = mock.patch("time.monotonic", return_value=0)
target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError()] * 10)
# __name__ is needed by functools.partial.
@@ -325,11 +231,11 @@
decorated = retry_(target, on_error=on_error)
target.assert_not_called()
- with utcnow_patcher as patched_utcnow:
+ with monotonic_patcher as patched_monotonic:
# Make sure that calls to fake asyncio.sleep() also advance the mocked
# time clock.
def increase_time(sleep_delay):
- patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay)
+ patched_monotonic.return_value += sleep_delay
sleep.side_effect = increase_time
@@ -345,8 +251,17 @@
last_wait = sleep.call_args.args[0]
total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
- assert last_wait == 2.9 # and not 8.0, because the last delay was shortened
- assert total_wait == 9.9 # the same as the deadline
+ assert last_wait == 8.0
+ # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus
+ # we do not even wait for it to be scheduled (30.9 is configured timeout).
+ # This changes the previous logic of shortening the last attempt to fit
+ # in the timeout. The previous logic was removed to make Python retry
+ # logic consistent with the other languages and to not disrupt the
+ # randomized retry delays distribution by artificially increasing a
+ # probability of scheduling two (instead of one) last attempts with very
+ # short delay between them, while the second retry having very low chance
+ # of succeeding anyways.
+ assert total_wait == 15.0
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
@@ -372,8 +287,7 @@
sleep.assert_not_called()
_some_function.assert_not_called()
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
async def test___init___when_retry_is_executed(self, sleep, uniform):
diff --git a/tests/asyncio/test_grpc_helpers_async.py b/tests/asyncio/test_grpc_helpers_async.py
index 3681a40..aa8d5d1 100644
--- a/tests/asyncio/test_grpc_helpers_async.py
+++ b/tests/asyncio/test_grpc_helpers_async.py
@@ -12,17 +12,21 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest # noqa: I202
try:
import grpc
from grpc import aio
-except ImportError:
+except ImportError: # pragma: NO COVER
grpc = aio = None
-if grpc is None:
+if grpc is None: # pragma: NO COVER
pytest.skip("No GRPC", allow_module_level=True)
@@ -98,11 +102,39 @@
@pytest.mark.asyncio
[email protected](
+ "callable_type,expected_wrapper_type",
+ [
+ (grpc.aio.UnaryStreamMultiCallable, grpc_helpers_async._WrappedUnaryStreamCall),
+ (grpc.aio.StreamUnaryMultiCallable, grpc_helpers_async._WrappedStreamUnaryCall),
+ (
+ grpc.aio.StreamStreamMultiCallable,
+ grpc_helpers_async._WrappedStreamStreamCall,
+ ),
+ ],
+)
+async def test_wrap_errors_w_stream_type(callable_type, expected_wrapper_type):
+ class ConcreteMulticallable(callable_type):
+ def __call__(self, *args, **kwargs):
+ raise NotImplementedError("Should not be called")
+
+ with mock.patch.object(
+ grpc_helpers_async, "_wrap_stream_errors"
+ ) as wrap_stream_errors:
+ callable_ = ConcreteMulticallable()
+ grpc_helpers_async.wrap_errors(callable_)
+ assert wrap_stream_errors.call_count == 1
+ wrap_stream_errors.assert_called_once_with(callable_, expected_wrapper_type)
+
+
[email protected]
async def test_wrap_stream_errors_unary_stream():
mock_call = mock.Mock(aio.UnaryStreamCall, autospec=True)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedUnaryStreamCall
+ )
await wrapped_callable(1, 2, three="four")
multicallable.assert_called_once_with(1, 2, three="four")
@@ -114,7 +146,9 @@
mock_call = mock.Mock(aio.StreamUnaryCall, autospec=True)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamUnaryCall
+ )
await wrapped_callable(1, 2, three="four")
multicallable.assert_called_once_with(1, 2, three="four")
@@ -126,7 +160,9 @@
mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
await wrapped_callable(1, 2, three="four")
multicallable.assert_called_once_with(1, 2, three="four")
@@ -134,24 +170,15 @@
@pytest.mark.asyncio
-async def test_wrap_stream_errors_type_error():
- mock_call = mock.Mock()
- multicallable = mock.Mock(return_value=mock_call)
-
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
-
- with pytest.raises(TypeError):
- await wrapped_callable()
-
-
[email protected]
async def test_wrap_stream_errors_raised():
grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT)
mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
mock_call.wait_for_connection = mock.AsyncMock(side_effect=[grpc_error])
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
with pytest.raises(exceptions.InvalidArgument):
await wrapped_callable()
@@ -166,7 +193,9 @@
mock_call.read = mock.AsyncMock(side_effect=grpc_error)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable(1, 2, three="four")
multicallable.assert_called_once_with(1, 2, three="four")
@@ -189,7 +218,9 @@
mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable()
with pytest.raises(exceptions.InvalidArgument) as exc_info:
@@ -210,7 +241,9 @@
mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable()
with pytest.raises(TypeError) as exc_info:
@@ -224,7 +257,9 @@
mock_call = mock.Mock(aio.StreamStreamCall, autospec=True)
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable()
assert wrapped_call.__aiter__() == wrapped_call.__aiter__()
@@ -239,7 +274,9 @@
mock_call.done_writing = mock.AsyncMock(side_effect=[None, grpc_error])
multicallable = mock.Mock(return_value=mock_call)
- wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable)
+ wrapped_callable = grpc_helpers_async._wrap_stream_errors(
+ multicallable, grpc_helpers_async._WrappedStreamStreamCall
+ )
wrapped_call = await wrapped_callable()
@@ -266,6 +303,28 @@
wrap_unary_errors.assert_called_once_with(callable_)
+def test_grpc_async_stream():
+ """
+ GrpcAsyncStream type should be both an AsyncIterator and a grpc.aio.Call.
+ """
+ instance = grpc_helpers_async.GrpcAsyncStream[int]()
+ assert isinstance(instance, grpc.aio.Call)
+ # should implement __aiter__ and __anext__
+ assert hasattr(instance, "__aiter__")
+ it = instance.__aiter__()
+ assert hasattr(it, "__anext__")
+
+
+def test_awaitable_grpc_call():
+ """
+ AwaitableGrpcCall type should be an Awaitable and a grpc.aio.Call.
+ """
+ instance = grpc_helpers_async.AwaitableGrpcCall()
+ assert isinstance(instance, grpc.aio.Call)
+ # should implement __await__
+ assert hasattr(instance, "__await__")
+
+
@mock.patch("google.api_core.grpc_helpers_async._wrap_stream_errors")
def test_wrap_errors_streaming(wrap_stream_errors):
callable_ = mock.create_autospec(aio.UnaryStreamMultiCallable)
@@ -273,85 +332,149 @@
result = grpc_helpers_async.wrap_errors(callable_)
assert result == wrap_stream_errors.return_value
- wrap_stream_errors.assert_called_once_with(callable_)
+ wrap_stream_errors.assert_called_once_with(
+ callable_, grpc_helpers_async._WrappedUnaryStreamCall
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected](
+ "attempt_direct_path,target,expected_target",
+ [
+ (None, "example.com:443", "example.com:443"),
+ (False, "example.com:443", "example.com:443"),
+ (True, "example.com:443", "google-c2p:///example.com"),
+ (True, "dns:///example.com", "google-c2p:///example.com"),
+ (True, "another-c2p:///example.com", "another-c2p:///example.com"),
+ ],
+)
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call):
- target = "example.com:443"
+def test_create_channel_implicit(
+ grpc_secure_channel,
+ google_auth_default,
+ composite_creds_call,
+ attempt_direct_path,
+ target,
+ expected_target,
+):
composite_creds = composite_creds_call.return_value
- channel = grpc_helpers_async.create_channel(target)
+ channel = grpc_helpers_async.create_channel(
+ target, attempt_direct_path=attempt_direct_path
+ )
assert channel is grpc_secure_channel.return_value
- default.assert_called_once_with(scopes=None, default_scopes=None)
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, compression=None
+ )
[email protected](
+ "attempt_direct_path,target, expected_target",
+ [
+ (None, "example.com:443", "example.com:443"),
+ (False, "example.com:443", "example.com:443"),
+ (True, "example.com:443", "google-c2p:///example.com"),
+ (True, "dns:///example.com", "google-c2p:///example.com"),
+ (True, "another-c2p:///example.com", "another-c2p:///example.com"),
+ ],
+)
@mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True)
@mock.patch(
"google.auth.transport.requests.Request",
autospec=True,
return_value=mock.sentinel.Request,
)
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_default_host(
- grpc_secure_channel, default, composite_creds_call, request, auth_metadata_plugin
+ grpc_secure_channel,
+ google_auth_default,
+ composite_creds_call,
+ request,
+ auth_metadata_plugin,
+ attempt_direct_path,
+ target,
+ expected_target,
):
- target = "example.com:443"
default_host = "example.com"
composite_creds = composite_creds_call.return_value
- channel = grpc_helpers_async.create_channel(target, default_host=default_host)
+ channel = grpc_helpers_async.create_channel(
+ target, default_host=default_host, attempt_direct_path=attempt_direct_path
+ )
assert channel is grpc_secure_channel.return_value
- default.assert_called_once_with(scopes=None, default_scopes=None)
+ google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
auth_metadata_plugin.assert_called_once_with(
mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host
)
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, compression=None
+ )
[email protected](
+ "attempt_direct_path",
+ [
+ None,
+ False,
+ ],
+)
@mock.patch("grpc.composite_channel_credentials")
@mock.patch(
"google.auth.default",
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_ssl_creds(
- grpc_secure_channel, default, composite_creds_call
+ grpc_secure_channel, default, composite_creds_call, attempt_direct_path
):
target = "example.com:443"
ssl_creds = grpc.ssl_channel_credentials()
- grpc_helpers_async.create_channel(target, ssl_credentials=ssl_creds)
+ grpc_helpers_async.create_channel(
+ target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path
+ )
default.assert_called_once_with(scopes=None, default_scopes=None)
composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
composite_creds = composite_creds_call.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
+def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true():
+ target = "example.com:443"
+ ssl_creds = grpc.ssl_channel_credentials()
+ with pytest.raises(
+ ValueError, match="Using ssl_credentials with Direct Path is not supported"
+ ):
+ grpc_helpers_async.create_channel(
+ target, ssl_credentials=ssl_creds, attempt_direct_path=True
+ )
+
+
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_scopes(
@@ -365,14 +488,16 @@
assert channel is grpc_secure_channel.return_value
default.assert_called_once_with(scopes=["one", "two"], default_scopes=None)
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.aio.secure_channel")
def test_create_channel_implicit_with_default_scopes(
@@ -382,13 +507,15 @@
composite_creds = composite_creds_call.return_value
channel = grpc_helpers_async.create_channel(
- target, default_scopes=["three", "four"]
+ target, default_scopes=["three", "four"], compression=grpc.Compression.Gzip
)
assert channel is grpc_secure_channel.return_value
default.assert_called_once_with(scopes=None, default_scopes=["three", "four"])
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=grpc.Compression.Gzip
+ )
def test_create_channel_explicit_with_duplicate_credentials():
@@ -404,7 +531,7 @@
assert "mutually exclusive" in str(excinfo.value)
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True)
@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
@@ -412,17 +539,19 @@
composite_creds = composite_creds_call.return_value
channel = grpc_helpers_async.create_channel(
- target, credentials=mock.sentinel.credentials
+ target, credentials=mock.sentinel.credentials, compression=grpc.Compression.Gzip
)
auth_creds.assert_called_once_with(
mock.sentinel.credentials, scopes=None, default_scopes=None
)
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=grpc.Compression.Gzip
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
target = "example.com:443"
@@ -433,15 +562,20 @@
credentials.requires_scopes = True
channel = grpc_helpers_async.create_channel(
- target, credentials=credentials, scopes=scopes
+ target,
+ credentials=credentials,
+ scopes=scopes,
+ compression=grpc.Compression.Gzip,
)
credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=grpc.Compression.Gzip
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit_default_scopes(
grpc_secure_channel, composite_creds_call
@@ -454,17 +588,22 @@
credentials.requires_scopes = True
channel = grpc_helpers_async.create_channel(
- target, credentials=credentials, default_scopes=default_scopes
+ target,
+ credentials=credentials,
+ default_scopes=default_scopes,
+ compression=grpc.Compression.Gzip,
)
credentials.with_scopes.assert_called_once_with(
scopes=None, default_scopes=default_scopes
)
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=grpc.Compression.Gzip
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.aio.secure_channel")
def test_create_channel_explicit_with_quota_project(
grpc_secure_channel, composite_creds_call
@@ -482,17 +621,19 @@
credentials.with_quota_project.assert_called_once_with("project-foo")
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.aio.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
autospec=True,
return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
-def test_create_channnel_with_credentials_file(
+def test_create_channel_with_credentials_file(
load_credentials_from_file, grpc_secure_channel, composite_creds_call
):
target = "example.com:443"
@@ -508,10 +649,12 @@
credentials_file, scopes=None, default_scopes=None
)
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.aio.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
@@ -535,10 +678,12 @@
credentials_file, scopes=scopes, default_scopes=None
)
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.aio.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
@@ -562,14 +707,13 @@
credentials_file, scopes=None, default_scopes=default_scopes
)
assert channel is grpc_secure_channel.return_value
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected](
- grpc_helpers_async.HAS_GRPC_GCP, reason="grpc_gcp module not available"
-)
@mock.patch("grpc.aio.secure_channel")
-def test_create_channel_without_grpc_gcp(grpc_secure_channel):
+def test_create_channel(grpc_secure_channel):
target = "example.com:443"
scopes = ["test_scope"]
diff --git a/tests/asyncio/test_operation_async.py b/tests/asyncio/test_operation_async.py
index 26ad7ce..939be09 100644
--- a/tests/asyncio/test_operation_async.py
+++ b/tests/asyncio/test_operation_async.py
@@ -13,12 +13,17 @@
# limitations under the License.
-import mock
import pytest
try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+try:
import grpc # noqa: F401
-except ImportError:
+except ImportError: # pragma: NO COVER
pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import exceptions
diff --git a/tests/asyncio/test_page_iterator_async.py b/tests/asyncio/test_page_iterator_async.py
index 75f9e1c..63e26d0 100644
--- a/tests/asyncio/test_page_iterator_async.py
+++ b/tests/asyncio/test_page_iterator_async.py
@@ -14,7 +14,11 @@
import inspect
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest
from google.api_core import page_iterator_async
@@ -106,6 +110,7 @@
await page_aiter.__anext__()
assert iterator.num_results == 1
+ await page_aiter.aclose()
@pytest.mark.asyncio
async def test__page_aiter_no_increment(self):
@@ -118,6 +123,7 @@
# results should still be 0 after fetching a page.
assert iterator.num_results == 0
+ await page_aiter.aclose()
@pytest.mark.asyncio
async def test__items_aiter(self):
diff --git a/tests/asyncio/test_rest_streaming_async.py b/tests/asyncio/test_rest_streaming_async.py
new file mode 100644
index 0000000..c9caa2b
--- /dev/null
+++ b/tests/asyncio/test_rest_streaming_async.py
@@ -0,0 +1,378 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# TODO: set random.seed explicitly in each test function.
+# See related issue: https://github.com/googleapis/python-api-core/issues/689.
+
+import datetime
+import logging
+import random
+import time
+from typing import List, AsyncIterator
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+import pytest # noqa: I202
+
+import proto
+
+try:
+ from google.auth.aio.transport import Response
+except ImportError:
+ pytest.skip(
+ "google-api-core[async_rest] is required to test asynchronous rest streaming.",
+ allow_module_level=True,
+ )
+
+from google.api_core import rest_streaming_async
+from google.api import http_pb2
+from google.api import httpbody_pb2
+
+
+from ..helpers import Composer, Song, EchoResponse, parse_responses
+
+
+__protobuf__ = proto.module(package=__name__)
+SEED = int(time.time())
+logging.info(f"Starting async rest streaming tests with random seed: {SEED}")
+random.seed(SEED)
+
+
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
+class ResponseMock(Response):
+ class _ResponseItr(AsyncIterator[bytes]):
+ def __init__(self, _response_bytes: bytes, random_split=False):
+ self._responses_bytes = _response_bytes
+ self._idx = 0
+ self._random_split = random_split
+
+ def __aiter__(self):
+ return self
+
+ async def __anext__(self):
+ if self._idx >= len(self._responses_bytes):
+ raise StopAsyncIteration
+ if self._random_split:
+ n = random.randint(1, len(self._responses_bytes[self._idx :]))
+ else:
+ n = 1
+ x = self._responses_bytes[self._idx : self._idx + n]
+ self._idx += n
+ return x
+
+ def __init__(
+ self,
+ responses: List[proto.Message],
+ response_cls,
+ random_split=False,
+ ):
+ self._responses = responses
+ self._random_split = random_split
+ self._response_message_cls = response_cls
+
+ def _parse_responses(self):
+ return parse_responses(self._response_message_cls, self._responses)
+
+ @property
+ async def headers(self):
+ raise NotImplementedError()
+
+ @property
+ async def status_code(self):
+ raise NotImplementedError()
+
+ async def close(self):
+ raise NotImplementedError()
+
+ async def content(self, chunk_size=None):
+ itr = self._ResponseItr(
+ self._parse_responses(), random_split=self._random_split
+ )
+ async for chunk in itr:
+ yield chunk
+
+ async def read(self):
+ raise NotImplementedError()
+
+
[email protected]
[email protected](
+ "random_split,resp_message_is_proto_plus",
+ [(False, True), (False, False)],
+)
+async def test_next_simple(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = EchoResponse
+ responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")]
+ else:
+ response_type = httpbody_pb2.HttpBody
+ responses = [
+ httpbody_pb2.HttpBody(content_type="hello world"),
+ httpbody_pb2.HttpBody(content_type="yes"),
+ ]
+
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ idx = 0
+ async for response in itr:
+ assert response == responses[idx]
+ idx += 1
+
+
[email protected]
[email protected](
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+async def test_next_nested(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = Song
+ responses = [
+ Song(title="some song", composer=Composer(given_name="some name")),
+ Song(title="another song", date_added=datetime.datetime(2021, 12, 17)),
+ ]
+ else:
+ # Although `http_pb2.HttpRule`` is used in the response, any response message
+ # can be used which meets this criteria for the test of having a nested field.
+ response_type = http_pb2.HttpRule
+ responses = [
+ http_pb2.HttpRule(
+ selector="some selector",
+ custom=http_pb2.CustomHttpPattern(kind="some kind"),
+ ),
+ http_pb2.HttpRule(
+ selector="another selector",
+ custom=http_pb2.CustomHttpPattern(path="some path"),
+ ),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ idx = 0
+ async for response in itr:
+ assert response == responses[idx]
+ idx += 1
+ assert idx == len(responses)
+
+
[email protected]
[email protected](
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+async def test_next_stress(random_split, resp_message_is_proto_plus):
+ n = 50
+ if resp_message_is_proto_plus:
+ response_type = Song
+ responses = [
+ Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i))
+ for i in range(n)
+ ]
+ else:
+ response_type = http_pb2.HttpRule
+ responses = [
+ http_pb2.HttpRule(
+ selector="selector_%d" % i,
+ custom=http_pb2.CustomHttpPattern(path="path_%d" % i),
+ )
+ for i in range(n)
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ idx = 0
+ async for response in itr:
+ assert response == responses[idx]
+ idx += 1
+ assert idx == n
+
+
[email protected]
[email protected](
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+async def test_next_escaped_characters_in_string(
+ random_split, resp_message_is_proto_plus
+):
+ if resp_message_is_proto_plus:
+ response_type = Song
+ composer_with_relateds = Composer()
+ relateds = ["Artist A", "Artist B"]
+ composer_with_relateds.relateds = relateds
+
+ responses = [
+ Song(
+ title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n")
+ ),
+ Song(
+ title='{"this is weird": "totally"}',
+ composer=Composer(given_name="\\{}\\"),
+ ),
+ Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds),
+ ]
+ else:
+ response_type = http_pb2.Http
+ responses = [
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='ti"tle\nfoo\tbar{}',
+ custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"),
+ )
+ ]
+ ),
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='{"this is weird": "totally"}',
+ custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
+ )
+ ]
+ ),
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='\\{"key": ["value",]}\\',
+ custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
+ )
+ ]
+ ),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ idx = 0
+ async for response in itr:
+ assert response == responses[idx]
+ idx += 1
+ assert idx == len(responses)
+
+
[email protected]
[email protected]("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+async def test_next_not_array(response_type):
+
+ data = '{"hello": 0}'
+ with mock.patch.object(
+ ResponseMock, "content", return_value=mock_async_gen(data)
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ await itr.__anext__()
+ mock_method.assert_called_once()
+
+
[email protected]
[email protected]("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+async def test_cancel(response_type):
+ with mock.patch.object(
+ ResponseMock, "close", new_callable=mock.AsyncMock
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ await itr.cancel()
+ mock_method.assert_called_once()
+
+
[email protected]
[email protected]("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+async def test_iterator_as_context_manager(response_type):
+ with mock.patch.object(
+ ResponseMock, "close", new_callable=mock.AsyncMock
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ async with rest_streaming_async.AsyncResponseIterator(resp, response_type):
+ pass
+ mock_method.assert_called_once()
+
+
[email protected]
[email protected](
+ "response_type,return_value",
+ [
+ (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")),
+ (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")),
+ ],
+)
+async def test_check_buffer(response_type, return_value):
+ with mock.patch.object(
+ ResponseMock,
+ "_parse_responses",
+ return_value=return_value,
+ ):
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ await itr.__anext__()
+ await itr.__anext__()
+
+
[email protected]
[email protected]("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+async def test_next_html(response_type):
+
+ data = "<!DOCTYPE html><html></html>"
+ with mock.patch.object(
+ ResponseMock, "content", return_value=mock_async_gen(data)
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+
+ itr = rest_streaming_async.AsyncResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ await itr.__anext__()
+ mock_method.assert_called_once()
+
+
[email protected]
+async def test_invalid_response_class():
+ class SomeClass:
+ pass
+
+ resp = ResponseMock(responses=[], response_cls=SomeClass)
+ with pytest.raises(
+ ValueError,
+ match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message",
+ ):
+ rest_streaming_async.AsyncResponseIterator(resp, SomeClass)
diff --git a/tests/helpers.py b/tests/helpers.py
new file mode 100644
index 0000000..3429d51
--- /dev/null
+++ b/tests/helpers.py
@@ -0,0 +1,71 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helpers for tests"""
+
+import logging
+from typing import List
+
+import proto
+
+from google.protobuf import duration_pb2
+from google.protobuf import timestamp_pb2
+from google.protobuf.json_format import MessageToJson
+
+
+class Genre(proto.Enum):
+ GENRE_UNSPECIFIED = 0
+ CLASSICAL = 1
+ JAZZ = 2
+ ROCK = 3
+
+
+class Composer(proto.Message):
+ given_name = proto.Field(proto.STRING, number=1)
+ family_name = proto.Field(proto.STRING, number=2)
+ relateds = proto.RepeatedField(proto.STRING, number=3)
+ indices = proto.MapField(proto.STRING, proto.STRING, number=4)
+
+
+class Song(proto.Message):
+ composer = proto.Field(Composer, number=1)
+ title = proto.Field(proto.STRING, number=2)
+ lyrics = proto.Field(proto.STRING, number=3)
+ year = proto.Field(proto.INT32, number=4)
+ genre = proto.Field(Genre, number=5)
+ is_five_mins_longer = proto.Field(proto.BOOL, number=6)
+ score = proto.Field(proto.DOUBLE, number=7)
+ likes = proto.Field(proto.INT64, number=8)
+ duration = proto.Field(duration_pb2.Duration, number=9)
+ date_added = proto.Field(timestamp_pb2.Timestamp, number=10)
+
+
+class EchoResponse(proto.Message):
+ content = proto.Field(proto.STRING, number=1)
+
+
+def parse_responses(response_message_cls, all_responses: List[proto.Message]) -> bytes:
+ # json.dumps returns a string surrounded with quotes that need to be stripped
+ # in order to be an actual JSON.
+ json_responses = [
+ (
+ response_message_cls.to_json(response).strip('"')
+ if issubclass(response_message_cls, proto.Message)
+ else MessageToJson(response).strip('"')
+ )
+ for response in all_responses
+ ]
+ logging.info(f"Sending JSON stream: {json_responses}")
+ ret_val = "[{}]".format(",".join(json_responses))
+ return bytes(ret_val, "utf-8")
diff --git a/tests/unit/future/test__helpers.py b/tests/unit/future/test__helpers.py
index 98afc59..a37efdd 100644
--- a/tests/unit/future/test__helpers.py
+++ b/tests/unit/future/test__helpers.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+from unittest import mock
from google.api_core.future import _helpers
diff --git a/tests/unit/future/test_polling.py b/tests/unit/future/test_polling.py
index 2381d03..2f66f23 100644
--- a/tests/unit/future/test_polling.py
+++ b/tests/unit/future/test_polling.py
@@ -15,8 +15,8 @@
import concurrent.futures
import threading
import time
+from unittest import mock
-import mock
import pytest
from google.api_core import exceptions, retry
@@ -24,7 +24,7 @@
class PollingFutureImpl(polling.PollingFuture):
- def done(self):
+ def done(self, retry=None):
return False
def cancel(self):
@@ -33,9 +33,6 @@
def cancelled(self):
return False
- def running(self):
- return True
-
def test_polling_future_constructor():
future = PollingFutureImpl()
@@ -84,20 +81,23 @@
class PollingFutureImplWithPoll(PollingFutureImpl):
- def __init__(self):
+ def __init__(self, max_poll_count=1):
super(PollingFutureImplWithPoll, self).__init__()
self.poll_count = 0
self.event = threading.Event()
+ self.max_poll_count = max_poll_count
- def done(self, retry=polling.DEFAULT_RETRY):
+ def done(self, retry=None):
self.poll_count += 1
+ if self.max_poll_count > self.poll_count:
+ return False
self.event.wait()
self.set_result(42)
return True
-def test_result_with_polling():
- future = PollingFutureImplWithPoll()
+def test_result_with_one_polling():
+ future = PollingFutureImplWithPoll(max_poll_count=1)
future.event.set()
result = future.result()
@@ -109,8 +109,34 @@
assert future.poll_count == 1
+def test_result_with_two_pollings():
+ future = PollingFutureImplWithPoll(max_poll_count=2)
+
+ future.event.set()
+ result = future.result()
+
+ assert result == 42
+ assert future.poll_count == 2
+ # Repeated calls should not cause additional polling
+ assert future.result() == result
+ assert future.poll_count == 2
+
+
+def test_result_with_two_pollings_custom_retry():
+ future = PollingFutureImplWithPoll(max_poll_count=2)
+
+ future.event.set()
+ result = future.result()
+
+ assert result == 42
+ assert future.poll_count == 2
+ # Repeated calls should not cause additional polling
+ assert future.result() == result
+ assert future.poll_count == 2
+
+
class PollingFutureImplTimeout(PollingFutureImplWithPoll):
- def done(self, retry=polling.DEFAULT_RETRY):
+ def done(self, retry=None):
time.sleep(1)
return False
@@ -132,11 +158,11 @@
super(PollingFutureImplTransient, self).__init__()
self._errors = errors
- def done(self, retry=polling.DEFAULT_RETRY):
+ def done(self, retry=None):
+ self.poll_count += 1
if self._errors:
error, self._errors = self._errors[0], self._errors[1:]
raise error("testing")
- self.poll_count += 1
self.set_result(42)
return True
@@ -144,17 +170,17 @@
def test_result_transient_error():
future = PollingFutureImplTransient(
(
- exceptions.TooManyRequests,
- exceptions.InternalServerError,
- exceptions.BadGateway,
+ polling._OperationNotComplete,
+ polling._OperationNotComplete,
+ polling._OperationNotComplete,
)
)
result = future.result()
assert result == 42
- assert future.poll_count == 1
+ assert future.poll_count == 4
# Repeated calls should not cause additional polling
assert future.result() == result
- assert future.poll_count == 1
+ assert future.poll_count == 4
def test_callback_background_thread():
@@ -197,23 +223,23 @@
class PollingFutureImplWithoutRetry(PollingFutureImpl):
- def done(self):
+ def done(self, retry=None):
return True
- def result(self):
+ def result(self, timeout=None, retry=None, polling=None):
return super(PollingFutureImplWithoutRetry, self).result()
- def _blocking_poll(self, timeout):
+ def _blocking_poll(self, timeout=None, retry=None, polling=None):
return super(PollingFutureImplWithoutRetry, self)._blocking_poll(
timeout=timeout
)
class PollingFutureImplWith_done_or_raise(PollingFutureImpl):
- def done(self):
+ def done(self, retry=None):
return True
- def _done_or_raise(self):
+ def _done_or_raise(self, retry=None):
return super(PollingFutureImplWith_done_or_raise, self)._done_or_raise()
@@ -223,12 +249,12 @@
)
future = PollingFutureImplWithoutRetry()
assert future.done()
- assert future.running()
+ assert not future.running()
assert future.result() is None
with mock.patch.object(future, "done") as done_mock:
future._done_or_raise()
- done_mock.assert_called_once_with()
+ done_mock.assert_called_once_with(retry=None)
with mock.patch.object(future, "done") as done_mock:
future._done_or_raise(retry=custom_retry)
@@ -238,5 +264,5 @@
def test_polling_future_with__done_or_raise():
future = PollingFutureImplWith_done_or_raise()
assert future.done()
- assert future.running()
+ assert not future.running()
assert future.result() is None
diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py
index 9778d23..87aa639 100644
--- a/tests/unit/gapic/test_method.py
+++ b/tests/unit/gapic/test_method.py
@@ -13,8 +13,8 @@
# limitations under the License.
import datetime
+from unittest import mock
-import mock
import pytest
try:
@@ -39,27 +39,6 @@
curr_value += delta
-def test__determine_timeout():
- # Check _determine_timeout always returns a Timeout object.
- timeout_type_timeout = timeout.ConstantTimeout(600.0)
- returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
- 600.0, 600.0, None
- )
- assert isinstance(returned_timeout, timeout.ConstantTimeout)
- returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
- 600.0, timeout_type_timeout, None
- )
- assert isinstance(returned_timeout, timeout.ConstantTimeout)
- returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
- timeout_type_timeout, 600.0, None
- )
- assert isinstance(returned_timeout, timeout.ConstantTimeout)
- returned_timeout = google.api_core.gapic_v1.method._determine_timeout(
- timeout_type_timeout, timeout_type_timeout, None
- )
- assert isinstance(returned_timeout, timeout.ConstantTimeout)
-
-
def test_wrap_method_basic():
method = mock.Mock(spec=["__call__"], return_value=42)
@@ -142,91 +121,71 @@
@mock.patch("time.sleep")
-def test_wrap_method_with_default_retry_and_timeout(unusued_sleep):
+def test_wrap_method_with_default_retry_and_timeout_and_compression(unused_sleep):
method = mock.Mock(
spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
)
default_retry = retry.Retry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = grpc.Compression.Gzip
wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = wrapped_method()
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=60, metadata=mock.ANY)
+ method.assert_called_with(
+ timeout=60, compression=default_compression, metadata=mock.ANY
+ )
@mock.patch("time.sleep")
-def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unusued_sleep):
+def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unused_sleep):
method = mock.Mock(
spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42]
)
default_retry = retry.Retry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = grpc.Compression.Gzip
wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = wrapped_method(
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
+ compression=google.api_core.gapic_v1.method.DEFAULT,
)
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=60, metadata=mock.ANY)
+ method.assert_called_with(
+ timeout=60, compression=default_compression, metadata=mock.ANY
+ )
@mock.patch("time.sleep")
-def test_wrap_method_with_overriding_retry_and_timeout(unusued_sleep):
+def test_wrap_method_with_overriding_retry_timeout_compression(unused_sleep):
method = mock.Mock(spec=["__call__"], side_effect=[exceptions.NotFound(None), 42])
default_retry = retry.Retry()
default_timeout = timeout.ConstantTimeout(60)
+ default_compression = grpc.Compression.Gzip
wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
+ method, default_retry, default_timeout, default_compression
)
result = wrapped_method(
retry=retry.Retry(retry.if_exception_type(exceptions.NotFound)),
timeout=timeout.ConstantTimeout(22),
+ compression=grpc.Compression.Deflate,
)
assert result == 42
assert method.call_count == 2
- method.assert_called_with(timeout=22, metadata=mock.ANY)
-
-
[email protected]("time.sleep")
[email protected](
- "google.api_core.datetime_helpers.utcnow",
- side_effect=_utcnow_monotonic(),
- autospec=True,
-)
-def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep):
- method = mock.Mock(
- spec=["__call__"],
- side_effect=([exceptions.InternalServerError(None)] * 4) + [42],
- )
- default_retry = retry.Retry()
- default_timeout = timeout.ExponentialTimeout(deadline=60)
- wrapped_method = google.api_core.gapic_v1.method.wrap_method(
- method, default_retry, default_timeout
- )
-
- # Overriding only the retry's deadline should also override the timeout's
- # deadline.
- result = wrapped_method(retry=default_retry.with_deadline(30))
-
- assert result == 42
- timeout_args = [call[1]["timeout"] for call in method.call_args_list]
- assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0]
- assert utcnow.call_count == (
- 1
- + 5 # First to set the deadline.
- + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds)
+ method.assert_called_with(
+ timeout=22, compression=grpc.Compression.Deflate, metadata=mock.ANY
)
@@ -242,3 +201,24 @@
assert result == 42
method.assert_called_once_with(timeout=22, metadata=mock.ANY)
+
+
+def test_wrap_method_with_call():
+ method = mock.Mock()
+ mock_call = mock.Mock()
+ method.with_call.return_value = 42, mock_call
+
+ wrapped_method = google.api_core.gapic_v1.method.wrap_method(method, with_call=True)
+ result = wrapped_method()
+ assert len(result) == 2
+ assert result[0] == 42
+ assert result[1] == mock_call
+
+
+def test_wrap_method_with_call_not_supported():
+ """Raises an error if wrapped callable doesn't have with_call method."""
+ method = lambda: None # noqa: E731
+
+ with pytest.raises(ValueError) as exc_info:
+ google.api_core.gapic_v1.method.wrap_method(method, with_call=True)
+ assert "with_call=True is only supported for unary calls" in str(exc_info.value)
diff --git a/tests/unit/gapic/test_routing_header.py b/tests/unit/gapic/test_routing_header.py
index 3037867..2c8c754 100644
--- a/tests/unit/gapic/test_routing_header.py
+++ b/tests/unit/gapic/test_routing_header.py
@@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from enum import Enum
+
import pytest
try:
@@ -35,7 +37,67 @@
assert value == "name=me/ep&book.read=1%262"
+def test_enum_fully_qualified():
+ class Message:
+ class Color(Enum):
+ RED = 1
+ GREEN = 2
+ BLUE = 3
+
+ params = [("color", Message.Color.RED)]
+ value = routing_header.to_routing_header(params)
+ assert value == "color=Color.RED"
+ value = routing_header.to_routing_header(params, qualified_enums=True)
+ assert value == "color=Color.RED"
+
+
+def test_enum_nonqualified():
+ class Message:
+ class Color(Enum):
+ RED = 1
+ GREEN = 2
+ BLUE = 3
+
+ params = [("color", Message.Color.RED), ("num", 5)]
+ value = routing_header.to_routing_header(params, qualified_enums=False)
+ assert value == "color=RED&num=5"
+ params = {"color": Message.Color.RED, "num": 5}
+ value = routing_header.to_routing_header(params, qualified_enums=False)
+ assert value == "color=RED&num=5"
+
+
def test_to_grpc_metadata():
params = [("name", "meep"), ("book.read", "1")]
metadata = routing_header.to_grpc_metadata(params)
assert metadata == (routing_header.ROUTING_METADATA_KEY, "name=meep&book.read=1")
+
+
[email protected](
+ "key,value,expected",
+ [
+ ("book.read", "1", "book.read=1"),
+ ("name", "me/ep", "name=me/ep"),
+ ("\\", "=", "%5C=%3D"),
+ (b"hello", "world", "hello=world"),
+ ("✔️", "✌️", "%E2%9C%94%EF%B8%8F=%E2%9C%8C%EF%B8%8F"),
+ ],
+)
+def test__urlencode_param(key, value, expected):
+ result = routing_header._urlencode_param(key, value)
+ assert result == expected
+
+
+def test__urlencode_param_caching_performance():
+ import time
+
+ key = "key" * 100
+ value = "value" * 100
+ # time with empty cache
+ start_time = time.perf_counter()
+ routing_header._urlencode_param(key, value)
+ duration = time.perf_counter() - start_time
+ second_start_time = time.perf_counter()
+ routing_header._urlencode_param(key, value)
+ second_duration = time.perf_counter() - second_start_time
+ # second call should be approximately 10 times faster
+ assert second_duration < duration / 10
diff --git a/tests/unit/operations_v1/test_operations_client.py b/tests/unit/operations_v1/test_operations_client.py
index 187f0be..fb4b14f 100644
--- a/tests/unit/operations_v1/test_operations_client.py
+++ b/tests/unit/operations_v1/test_operations_client.py
@@ -16,12 +16,13 @@
try:
import grpc # noqa: F401
-except ImportError:
+except ImportError: # pragma: NO COVER
pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import grpc_helpers
from google.api_core import operations_v1
from google.api_core import page_iterator
+from google.api_core.operations_v1 import operations_client_config
from google.longrunning import operations_pb2
from google.protobuf import empty_pb2
@@ -96,3 +97,7 @@
].metadata
assert len(channel.CancelOperation.requests) == 1
assert channel.CancelOperation.requests[0].name == "name"
+
+
+def test_operations_client_config():
+ assert operations_client_config.config["interfaces"]
diff --git a/tests/unit/operations_v1/test_operations_rest_client.py b/tests/unit/operations_v1/test_operations_rest_client.py
index dddf6b7..d1f6e0e 100644
--- a/tests/unit/operations_v1/test_operations_rest_client.py
+++ b/tests/unit/operations_v1/test_operations_rest_client.py
@@ -15,23 +15,31 @@
#
import os
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
import pytest
+from typing import Any, List
try:
import grpc # noqa: F401
-except ImportError:
+except ImportError: # pragma: NO COVER
pytest.skip("No GRPC", allow_module_level=True)
from requests import Response # noqa I201
-from requests.sessions import Session
+from google.auth.transport.requests import AuthorizedSession
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core.operations_v1 import AbstractOperationsClient
-from google.api_core.operations_v1 import pagers
-from google.api_core.operations_v1 import transports
+
import google.auth
+from google.api_core.operations_v1 import pagers
+from google.api_core.operations_v1 import pagers_async
+from google.api_core.operations_v1 import transports
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.longrunning import operations_pb2
@@ -39,6 +47,16 @@
from google.protobuf import json_format # type: ignore
from google.rpc import status_pb2 # type: ignore
+try:
+ import aiohttp # noqa: F401
+ import google.auth.aio.transport
+ from google.auth.aio.transport.sessions import AsyncAuthorizedSession
+ from google.api_core.operations_v1 import AsyncOperationsRestClient
+ from google.auth.aio import credentials as ga_credentials_async
+
+ GOOGLE_AUTH_AIO_INSTALLED = True
+except ImportError:
+ GOOGLE_AUTH_AIO_INSTALLED = False
HTTP_OPTIONS = {
"google.longrunning.Operations.CancelOperation": [
@@ -55,17 +73,62 @@
],
}
+PYPARAM_CLIENT: List[Any] = [
+ AbstractOperationsClient,
+]
+PYPARAM_CLIENT_TRANSPORT_NAME = [
+ [AbstractOperationsClient, transports.OperationsRestTransport, "rest"],
+]
+PYPARAM_CLIENT_TRANSPORT_CREDENTIALS = [
+ [
+ AbstractOperationsClient,
+ transports.OperationsRestTransport,
+ ga_credentials.AnonymousCredentials(),
+ ],
+]
+
+if GOOGLE_AUTH_AIO_INSTALLED:
+ PYPARAM_CLIENT.append(AsyncOperationsRestClient)
+ PYPARAM_CLIENT_TRANSPORT_NAME.append(
+ [
+ AsyncOperationsRestClient,
+ transports.AsyncOperationsRestTransport,
+ "rest_asyncio",
+ ]
+ )
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS.append(
+ [
+ AsyncOperationsRestClient,
+ transports.AsyncOperationsRestTransport,
+ ga_credentials_async.AnonymousCredentials(),
+ ]
+ )
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
-def _get_operations_client(http_options=HTTP_OPTIONS):
- transport = transports.rest.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(), http_options=http_options
+def _get_session_type(is_async: bool):
+ return (
+ AsyncAuthorizedSession
+ if is_async and GOOGLE_AUTH_AIO_INSTALLED
+ else AuthorizedSession
)
- return AbstractOperationsClient(transport=transport)
+
+def _get_operations_client(is_async: bool, http_options=HTTP_OPTIONS):
+ if is_async and GOOGLE_AUTH_AIO_INSTALLED:
+ async_transport = transports.rest_asyncio.AsyncOperationsRestTransport(
+ credentials=ga_credentials_async.AnonymousCredentials(),
+ http_options=http_options,
+ )
+ return AsyncOperationsRestClient(transport=async_transport)
+ else:
+ sync_transport = transports.rest.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(), http_options=http_options
+ )
+ return AbstractOperationsClient(transport=sync_transport)
# If default endpoint is localhost, then default mtls endpoint will be the same.
@@ -79,57 +142,69 @@
)
-def test__get_default_mtls_endpoint():
+# TODO: Add support for mtls in async rest
[email protected](
+ "client_class",
+ [
+ AbstractOperationsClient,
+ ],
+)
+def test__get_default_mtls_endpoint(client_class):
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
- assert AbstractOperationsClient._get_default_mtls_endpoint(None) is None
+ assert client_class._get_default_mtls_endpoint(None) is None
+ assert client_class._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert (
- AbstractOperationsClient._get_default_mtls_endpoint(api_endpoint)
- == api_mtls_endpoint
+ client_class._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
)
assert (
- AbstractOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint)
- == api_mtls_endpoint
- )
- assert (
- AbstractOperationsClient._get_default_mtls_endpoint(sandbox_endpoint)
+ client_class._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
- AbstractOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ client_class._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
- assert (
- AbstractOperationsClient._get_default_mtls_endpoint(non_googleapi)
- == non_googleapi
- )
-
-
[email protected]("client_class", [AbstractOperationsClient])
-def test_operations_client_from_service_account_info(client_class):
- creds = ga_credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_info"
- ) as factory:
- factory.return_value = creds
- info = {"valid": True}
- client = client_class.from_service_account_info(info)
- assert client.transport._credentials == creds
- assert isinstance(client, client_class)
-
- assert client.transport._host == "longrunning.googleapis.com:443"
+ assert client_class._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize(
- "transport_class,transport_name", [(transports.OperationsRestTransport, "rest")]
+ "client_class",
+ PYPARAM_CLIENT,
)
-def test_operations_client_service_account_always_use_jwt(
- transport_class, transport_name
-):
+def test_operations_client_from_service_account_info(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ if "async" in str(client_class):
+ # TODO(): Add support for service account info to async REST transport.
+ with pytest.raises(NotImplementedError):
+ info = {"valid": True}
+ client_class.from_service_account_info(info)
+ else:
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "https://longrunning.googleapis.com"
+
+
[email protected](
+ "transport_class",
+ [
+ transports.OperationsRestTransport,
+ # TODO(https://github.com/googleapis/python-api-core/issues/706): Add support for
+ # service account credentials in transports.AsyncOperationsRestTransport
+ ],
+)
+def test_operations_client_service_account_always_use_jwt(transport_class):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
@@ -145,35 +220,53 @@
use_jwt.assert_not_called()
[email protected]("client_class", [AbstractOperationsClient])
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
def test_operations_client_from_service_account_file(client_class):
- creds = ga_credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_file"
- ) as factory:
- factory.return_value = creds
- client = client_class.from_service_account_file("dummy/file/path.json")
- assert client.transport._credentials == creds
- assert isinstance(client, client_class)
- client = client_class.from_service_account_json("dummy/file/path.json")
- assert client.transport._credentials == creds
- assert isinstance(client, client_class)
+ if "async" in str(client_class):
+ # TODO(): Add support for service account creds to async REST transport.
+ with pytest.raises(NotImplementedError):
+ client_class.from_service_account_file("dummy/file/path.json")
+ else:
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
- assert client.transport._host == "longrunning.googleapis.com:443"
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "https://longrunning.googleapis.com"
-def test_operations_client_get_transport_class():
- transport = AbstractOperationsClient.get_transport_class()
[email protected](
+ "client_class,transport_class,transport_name",
+ PYPARAM_CLIENT_TRANSPORT_NAME,
+)
+def test_operations_client_get_transport_class(
+ client_class, transport_class, transport_name
+):
+ transport = client_class.get_transport_class()
available_transports = [
transports.OperationsRestTransport,
]
+ if GOOGLE_AUTH_AIO_INSTALLED:
+ available_transports.append(transports.AsyncOperationsRestTransport)
assert transport in available_transports
- transport = AbstractOperationsClient.get_transport_class("rest")
- assert transport == transports.OperationsRestTransport
+ transport = client_class.get_transport_class(transport_name)
+ assert transport == transport_class
+# TODO(): Update this test case to include async REST once we have support for MTLS.
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
@@ -186,22 +279,21 @@
def test_operations_client_client_options(
client_class, transport_class, transport_name
):
- # Check that if channel is provided we won't create a new one.
- with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc:
- transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
- client = client_class(transport=transport)
- gtc.assert_not_called()
+ # # Check that if channel is provided we won't create a new one.
+ # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc:
+ # client = client_class(transport=transport_class())
+ # gtc.assert_not_called()
- # Check that if channel is provided via str we will create a new one.
- with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc:
- client = client_class(transport=transport_name)
- gtc.assert_called()
+ # # Check that if channel is provided via str we will create a new one.
+ # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc:
+ # client = client_class(transport=transport_name)
+ # gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
- client = client_class(client_options=options)
+ client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
@@ -218,7 +310,7 @@
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
- client = client_class()
+ client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
@@ -235,7 +327,7 @@
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
- client = client_class()
+ client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
@@ -264,7 +356,7 @@
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
- client = client_class(client_options=options)
+ client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
@@ -277,6 +369,7 @@
)
+# TODO: Add support for mtls in async REST
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
@@ -393,59 +486,68 @@
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
- [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+ PYPARAM_CLIENT_TRANSPORT_NAME,
)
def test_operations_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
- options = client_options.ClientOptions(scopes=["1", "2"],)
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=["1", "2"],
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
+ options = client_options.ClientOptions(
+ scopes=["1", "2"],
+ )
+ if "async" in str(client_class):
+ # TODO(): Add support for scopes to async REST transport.
+ with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError):
+ client_class(client_options=options, transport=transport_name)
+ else:
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options, transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
- [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+ PYPARAM_CLIENT_TRANSPORT_NAME,
)
def test_operations_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
+ if "async" in str(client_class):
+ # TODO(): Add support for credentials file to async REST transport.
+ with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError):
+ client_class(client_options=options, transport=transport_name)
+ else:
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options, transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
-def test_list_operations_rest(
- transport: str = "rest", request_type=operations_pb2.ListOperationsRequest
-):
- client = _get_operations_client()
-
+def test_list_operations_rest():
+ client = _get_operations_client(is_async=False)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# Designate an appropriate value for the returned response.
return_value = operations_pb2.ListOperationsResponse(
next_page_token="next_page_token_value",
@@ -463,10 +565,7 @@
actual_args = req.call_args
assert actual_args.args[0] == "GET"
- assert (
- actual_args.args[1]
- == "https://longrunning.googleapis.com:443/v3/operations"
- )
+ assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations"
assert actual_args.kwargs["params"] == [
("filter", "my_filter"),
("pageSize", 10),
@@ -478,10 +577,49 @@
assert response.next_page_token == "next_page_token_value"
-def test_list_operations_rest_failure():
- client = _get_operations_client(http_options=None)
[email protected]
+async def test_list_operations_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
- with mock.patch.object(Session, "request") as req:
+ client = _get_operations_client(is_async=True)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.ListOperationsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.read = mock.AsyncMock(
+ return_value=json_return_value.encode("UTF-8")
+ )
+ req.return_value = response_value
+ response = await client.list_operations(
+ name="operations", filter_="my_filter", page_size=10, page_token="abc"
+ )
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations"
+ assert actual_args.kwargs["params"] == [
+ ("filter", "my_filter"),
+ ("pageSize", 10),
+ ("pageToken", "abc"),
+ ]
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers_async.ListOperationsAsyncPager)
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_operations_rest_failure():
+ client = _get_operations_client(is_async=False, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
response_value = Response()
response_value.status_code = 400
mock_request = mock.MagicMock()
@@ -493,13 +631,31 @@
client.list_operations(name="operations")
[email protected]
+async def test_list_operations_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com:443/v1/operations"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.list_operations(name="operations")
+
+
def test_list_operations_rest_pager():
- client = AbstractOperationsClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
+ client = _get_operations_client(is_async=False, http_options=None)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
@@ -513,10 +669,12 @@
next_page_token="abc",
),
operations_pb2.ListOperationsResponse(
- operations=[], next_page_token="def",
+ operations=[],
+ next_page_token="def",
),
operations_pb2.ListOperationsResponse(
- operations=[operations_pb2.Operation()], next_page_token="ghi",
+ operations=[operations_pb2.Operation()],
+ next_page_token="ghi",
),
operations_pb2.ListOperationsResponse(
operations=[operations_pb2.Operation(), operations_pb2.Operation()],
@@ -544,16 +702,85 @@
assert page_.next_page_token == token
-def test_get_operation_rest(
- transport: str = "rest", request_type=operations_pb2.GetOperationRequest
-):
- client = _get_operations_client()
[email protected]
+async def test_list_operations_rest_pager_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # TODO(kbandes): remove this mock unless there's a good reason for it.
+ # with mock.patch.object(path_template, 'transcode') as transcode:
+ # Set the response as a series of pages
+ response = (
+ operations_pb2.ListOperationsResponse(
+ operations=[
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ ],
+ next_page_token="abc",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[],
+ next_page_token="def",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation()],
+ next_page_token="ghi",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation(), operations_pb2.Operation()],
+ ),
+ )
+ # Two responses for two calls
+ response = response + response
+
+ # Wrap the values into proper Response objs
+ response = tuple(json_format.MessageToJson(x) for x in response)
+ return_values = tuple(mock.Mock() for i in response)
+ for return_val, response_val in zip(return_values, response):
+ return_val.read = mock.AsyncMock(return_value=response_val.encode("UTF-8"))
+ return_val.status_code = 200
+ req.side_effect = return_values
+
+ pager = await client.list_operations(name="operations")
+
+ responses = []
+ async for response in pager:
+ responses.append(response)
+
+ results = list(responses)
+ assert len(results) == 6
+ assert all(isinstance(i, operations_pb2.Operation) for i in results)
+ pager = await client.list_operations(name="operations")
+
+ responses = []
+ async for response in pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, operations_pb2.Operation) for i in results)
+
+ pages = []
+
+ async for page in pager.pages:
+ pages.append(page)
+ for page_, token in zip(pages, ["", "", "", "abc", "def", "ghi", ""]):
+ assert page_.next_page_token == token
+
+
+def test_get_operation_rest():
+ client = _get_operations_client(is_async=False)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# Designate an appropriate value for the returned response.
return_value = operations_pb2.Operation(
- name="operations/sample1", done=True, error=status_pb2.Status(code=411),
+ name="operations/sample1",
+ done=True,
+ error=status_pb2.Status(code=411),
)
# Wrap the value into a proper Response obj
@@ -568,7 +795,43 @@
assert actual_args.args[0] == "GET"
assert (
actual_args.args[1]
- == "https://longrunning.googleapis.com:443/v3/operations/sample1"
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
+ )
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.Operation)
+ assert response.name == "operations/sample1"
+ assert response.done is True
+
+
[email protected]
+async def test_get_operation_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(
+ name="operations/sample1",
+ done=True,
+ error=status_pb2.Status(code=411),
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.read = mock.AsyncMock(return_value=json_return_value)
+ req.return_value = response_value
+ response = await client.get_operation("operations/sample1")
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
)
# Establish that the response is the type that we expect.
@@ -578,29 +841,44 @@
def test_get_operation_rest_failure():
- client = _get_operations_client(http_options=None)
+ client = _get_operations_client(is_async=False, http_options=None)
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
response_value = Response()
response_value.status_code = 400
mock_request = mock.MagicMock()
mock_request.method = "GET"
- mock_request.url = (
- "https://longrunning.googleapis.com:443/v1/operations/sample1"
- )
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
response_value.request = mock_request
req.return_value = response_value
with pytest.raises(core_exceptions.GoogleAPIError):
- client.get_operation("operations/sample1")
+ client.get_operation("sample0/operations/sample1")
-def test_delete_operation_rest(
- transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest
-):
- client = _get_operations_client()
[email protected]
+async def test_get_operation_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.get_operation("sample0/operations/sample1")
+
+
+def test_delete_operation_rest():
+ client = _get_operations_client(is_async=False)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
@@ -613,32 +891,75 @@
assert actual_args.args[0] == "DELETE"
assert (
actual_args.args[1]
- == "https://longrunning.googleapis.com:443/v3/operations/sample1"
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
+ )
+
+
[email protected]
+async def test_delete_operation_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value.read = mock.AsyncMock(
+ return_value=json_return_value.encode("UTF-8")
+ )
+ req.return_value = response_value
+ await client.delete_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "DELETE"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
)
def test_delete_operation_rest_failure():
- client = _get_operations_client(http_options=None)
+ client = _get_operations_client(is_async=False, http_options=None)
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
response_value = Response()
response_value.status_code = 400
mock_request = mock.MagicMock()
mock_request.method = "DELETE"
- mock_request.url = (
- "https://longrunning.googleapis.com:443/v1/operations/sample1"
- )
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
response_value.request = mock_request
req.return_value = response_value
with pytest.raises(core_exceptions.GoogleAPIError):
- client.delete_operation(name="operations/sample1")
+ client.delete_operation(name="sample0/operations/sample1")
-def test_cancel_operation_rest(transport: str = "rest"):
- client = _get_operations_client()
[email protected]
+async def test_delete_operation_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "DELETE"
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.delete_operation(name="sample0/operations/sample1")
+
+
+def test_cancel_operation_rest():
+ client = _get_operations_client(is_async=False)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
@@ -651,71 +972,126 @@
assert actual_args.args[0] == "POST"
assert (
actual_args.args[1]
- == "https://longrunning.googleapis.com:443/v3/operations/sample1:cancel"
+ == "https://longrunning.googleapis.com/v3/operations/sample1:cancel"
+ )
+
+
[email protected]
+async def test_cancel_operation_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value.read = mock.AsyncMock(
+ return_value=json_return_value.encode("UTF-8")
+ )
+ req.return_value = response_value
+ await client.cancel_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "POST"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1:cancel"
)
def test_cancel_operation_rest_failure():
- client = _get_operations_client(http_options=None)
+ client = _get_operations_client(is_async=False, http_options=None)
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
response_value = Response()
response_value.status_code = 400
mock_request = mock.MagicMock()
mock_request.method = "POST"
mock_request.url = (
- "https://longrunning.googleapis.com:443/v1/operations/sample1:cancel"
+ "https://longrunning.googleapis.com/v1/operations/sample1:cancel"
)
response_value.request = mock_request
req.return_value = response_value
with pytest.raises(core_exceptions.GoogleAPIError):
- client.cancel_operation(name="operations/sample1")
+ client.cancel_operation(name="sample0/operations/sample1")
-def test_credentials_transport_error():
[email protected]
+async def test_cancel_operation_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "POST"
+ mock_request.url = (
+ "https://longrunning.googleapis.com/v1/operations/sample1:cancel"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.cancel_operation(name="sample0/operations/sample1")
+
+
[email protected](
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_credentials_transport_error(client_class, transport_class, credentials):
+
# It is an error to provide credentials and a transport instance.
- transport = transports.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
+ transport = transport_class(credentials=credentials)
with pytest.raises(ValueError):
- AbstractOperationsClient(
- credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ client_class(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
- transport = transports.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
+ transport = transport_class(credentials=credentials)
with pytest.raises(ValueError):
- AbstractOperationsClient(
+ client_class(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
- transport = transports.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
+ transport = transport_class(credentials=credentials)
with pytest.raises(ValueError):
- AbstractOperationsClient(
- client_options={"scopes": ["1", "2"]}, transport=transport,
+ client_class(
+ client_options={"scopes": ["1", "2"]},
+ transport=transport,
)
-def test_transport_instance():
[email protected](
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_transport_instance(client_class, transport_class, credentials):
# A client may be instantiated with a custom transport instance.
- transport = transports.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
+ transport = transport_class(
+ credentials=credentials,
)
- client = AbstractOperationsClient(transport=transport)
+ client = client_class(transport=transport)
assert client.transport is transport
[email protected]("transport_class", [transports.OperationsRestTransport])
-def test_transport_adc(transport_class):
[email protected](
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_transport_adc(client_class, transport_class, credentials):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
- adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ adc.return_value = (credentials, None)
transport_class()
adc.assert_called_once()
@@ -765,7 +1141,8 @@
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transports.OperationsTransport(
- credentials_file="credentials.json", quota_project_id="octopus",
+ credentials_file="credentials.json",
+ quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
@@ -786,159 +1163,239 @@
adc.assert_called_once()
-def test_operations_auth_adc():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_operations_auth_adc(client_class):
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
- AbstractOperationsClient()
- adc.assert_called_once_with(
- scopes=None, default_scopes=(), quota_project_id=None,
- )
+
+ if "async" in str(client_class).lower():
+ # TODO(): Add support for adc to async REST transport.
+ # NOTE: Ideally, the logic for adc shouldn't be called if transport
+ # is set to async REST. If the user does not configure credentials
+ # of type `google.auth.aio.credentials.Credentials`,
+ # we should raise an exception to avoid the adc workflow.
+ with pytest.raises(google.auth.exceptions.InvalidType):
+ client_class()
+ else:
+ client_class()
+ adc.assert_called_once_with(
+ scopes=None,
+ default_scopes=(),
+ quota_project_id=None,
+ )
-def test_operations_http_transport_client_cert_source_for_mtls():
+# TODO(https://github.com/googleapis/python-api-core/issues/705): Add
+# testing for `transports.AsyncOperationsRestTransport` once MTLS is supported
+# in `google.auth.aio.transport`.
[email protected](
+ "transport_class",
+ [
+ transports.OperationsRestTransport,
+ ],
+)
+def test_operations_http_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
- transports.OperationsRestTransport(
+ transport_class(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
-def test_operations_host_no_port():
- client = AbstractOperationsClient(
- credentials=ga_credentials.AnonymousCredentials(),
[email protected](
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_operations_host_no_port(client_class, transport_class, credentials):
+ client = client_class(
+ credentials=credentials,
client_options=client_options.ClientOptions(
api_endpoint="longrunning.googleapis.com"
),
)
- assert client.transport._host == "longrunning.googleapis.com:443"
+ assert client.transport._host == "https://longrunning.googleapis.com"
-def test_operations_host_with_port():
- client = AbstractOperationsClient(
- credentials=ga_credentials.AnonymousCredentials(),
[email protected](
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_operations_host_with_port(client_class, transport_class, credentials):
+ client = client_class(
+ credentials=credentials,
client_options=client_options.ClientOptions(
api_endpoint="longrunning.googleapis.com:8000"
),
)
- assert client.transport._host == "longrunning.googleapis.com:8000"
+ assert client.transport._host == "https://longrunning.googleapis.com:8000"
-def test_common_billing_account_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_billing_account_path(client_class):
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
- actual = AbstractOperationsClient.common_billing_account_path(billing_account)
+ actual = client_class.common_billing_account_path(billing_account)
assert expected == actual
-def test_parse_common_billing_account_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_billing_account_path(client_class):
expected = {
"billing_account": "clam",
}
- path = AbstractOperationsClient.common_billing_account_path(**expected)
+ path = client_class.common_billing_account_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_billing_account_path(path)
+ actual = client_class.parse_common_billing_account_path(path)
assert expected == actual
-def test_common_folder_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_folder_path(client_class):
folder = "whelk"
- expected = "folders/{folder}".format(folder=folder,)
- actual = AbstractOperationsClient.common_folder_path(folder)
+ expected = "folders/{folder}".format(
+ folder=folder,
+ )
+ actual = client_class.common_folder_path(folder)
assert expected == actual
-def test_parse_common_folder_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_folder_path(client_class):
expected = {
"folder": "octopus",
}
- path = AbstractOperationsClient.common_folder_path(**expected)
+ path = client_class.common_folder_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_folder_path(path)
+ actual = client_class.parse_common_folder_path(path)
assert expected == actual
-def test_common_organization_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_organization_path(client_class):
organization = "oyster"
- expected = "organizations/{organization}".format(organization=organization,)
- actual = AbstractOperationsClient.common_organization_path(organization)
+ expected = "organizations/{organization}".format(
+ organization=organization,
+ )
+ actual = client_class.common_organization_path(organization)
assert expected == actual
-def test_parse_common_organization_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_organization_path(client_class):
expected = {
"organization": "nudibranch",
}
- path = AbstractOperationsClient.common_organization_path(**expected)
+ path = client_class.common_organization_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_organization_path(path)
+ actual = client_class.parse_common_organization_path(path)
assert expected == actual
-def test_common_project_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_project_path(client_class):
project = "cuttlefish"
- expected = "projects/{project}".format(project=project,)
- actual = AbstractOperationsClient.common_project_path(project)
+ expected = "projects/{project}".format(
+ project=project,
+ )
+ actual = client_class.common_project_path(project)
assert expected == actual
-def test_parse_common_project_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_project_path(client_class):
expected = {
"project": "mussel",
}
- path = AbstractOperationsClient.common_project_path(**expected)
+ path = client_class.common_project_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_project_path(path)
+ actual = client_class.parse_common_project_path(path)
assert expected == actual
-def test_common_location_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_location_path(client_class):
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
- project=project, location=location,
+ project=project,
+ location=location,
)
- actual = AbstractOperationsClient.common_location_path(project, location)
+ actual = client_class.common_location_path(project, location)
assert expected == actual
-def test_parse_common_location_path():
[email protected](
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_location_path(client_class):
expected = {
"project": "scallop",
"location": "abalone",
}
- path = AbstractOperationsClient.common_location_path(**expected)
+ path = client_class.common_location_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_location_path(path)
+ actual = client_class.parse_common_location_path(path)
assert expected == actual
-def test_client_withDEFAULT_CLIENT_INFO():
[email protected](
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_client_withDEFAULT_CLIENT_INFO(client_class, transport_class, credentials):
client_info = gapic_v1.client_info.ClientInfo()
-
- with mock.patch.object(
- transports.OperationsTransport, "_prep_wrapped_messages"
- ) as prep:
- AbstractOperationsClient(
- credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
+ with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep:
+ client_class(
+ credentials=credentials,
+ client_info=client_info,
)
prep.assert_called_once_with(client_info)
- with mock.patch.object(
- transports.OperationsTransport, "_prep_wrapped_messages"
- ) as prep:
- transport_class = AbstractOperationsClient.get_transport_class()
+ with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep:
transport_class(
- credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
+ credentials=credentials,
+ client_info=client_info,
)
prep.assert_called_once_with(client_info)
diff --git a/tests/unit/retry/__init__.py b/tests/unit/retry/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/unit/retry/__init__.py
diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py
new file mode 100644
index 0000000..212c429
--- /dev/null
+++ b/tests/unit/retry/test_retry_base.py
@@ -0,0 +1,293 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import itertools
+import re
+from unittest import mock
+
+import pytest
+import requests.exceptions
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.auth import exceptions as auth_exceptions
+
+
+def test_if_exception_type():
+ predicate = retry.if_exception_type(ValueError)
+
+ assert predicate(ValueError())
+ assert not predicate(TypeError())
+
+
+def test_if_exception_type_multiple():
+ predicate = retry.if_exception_type(ValueError, TypeError)
+
+ assert predicate(ValueError())
+ assert predicate(TypeError())
+ assert not predicate(RuntimeError())
+
+
+def test_if_transient_error():
+ assert retry.if_transient_error(exceptions.InternalServerError(""))
+ assert retry.if_transient_error(exceptions.TooManyRequests(""))
+ assert retry.if_transient_error(exceptions.ServiceUnavailable(""))
+ assert retry.if_transient_error(requests.exceptions.ConnectionError(""))
+ assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError(""))
+ assert retry.if_transient_error(auth_exceptions.TransportError(""))
+ assert not retry.if_transient_error(exceptions.InvalidArgument(""))
+
+
+# Make uniform return half of its maximum, which will be the calculated
+# sleep time.
[email protected]("random.uniform", autospec=True, side_effect=lambda m, n: n)
+def test_exponential_sleep_generator_base_2(uniform):
+ gen = retry.exponential_sleep_generator(1, 60, multiplier=2)
+
+ result = list(itertools.islice(gen, 8))
+ assert result == [1, 2, 4, 8, 16, 32, 60, 60]
+
+
+def test_build_retry_error_empty_list():
+ """
+ attempt to build a retry error with no errors encountered
+ should return a generic RetryError
+ """
+ from google.api_core.retry import build_retry_error
+ from google.api_core.retry import RetryFailureReason
+
+ reason = RetryFailureReason.NON_RETRYABLE_ERROR
+ src, cause = build_retry_error([], reason, 10)
+ assert isinstance(src, exceptions.RetryError)
+ assert cause is None
+ assert src.message == "Unknown error"
+
+
+def test_build_retry_error_timeout_message():
+ """
+ should provide helpful error message when timeout is reached
+ """
+ from google.api_core.retry import build_retry_error
+ from google.api_core.retry import RetryFailureReason
+
+ reason = RetryFailureReason.TIMEOUT
+ cause = RuntimeError("timeout")
+ src, found_cause = build_retry_error([ValueError(), cause], reason, 10)
+ assert isinstance(src, exceptions.RetryError)
+ assert src.message == "Timeout of 10.0s exceeded"
+ # should attach appropriate cause
+ assert found_cause is cause
+
+
+def test_build_retry_error_empty_timeout():
+ """
+ attempt to build a retry error when timeout is None
+ should return a generic timeout error message
+ """
+ from google.api_core.retry import build_retry_error
+ from google.api_core.retry import RetryFailureReason
+
+ reason = RetryFailureReason.TIMEOUT
+ src, _ = build_retry_error([], reason, None)
+ assert isinstance(src, exceptions.RetryError)
+ assert src.message == "Timeout exceeded"
+
+
+class Test_BaseRetry(object):
+ def _make_one(self, *args, **kwargs):
+ return retry.retry_base._BaseRetry(*args, **kwargs)
+
+ def test_constructor_defaults(self):
+ retry_ = self._make_one()
+ assert retry_._predicate == retry.if_transient_error
+ assert retry_._initial == 1
+ assert retry_._maximum == 60
+ assert retry_._multiplier == 2
+ assert retry_._timeout == 120
+ assert retry_._on_error is None
+ assert retry_.timeout == 120
+ assert retry_.timeout == 120
+
+ def test_constructor_options(self):
+ _some_function = mock.Mock()
+
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=_some_function,
+ )
+ assert retry_._predicate == mock.sentinel.predicate
+ assert retry_._initial == 1
+ assert retry_._maximum == 2
+ assert retry_._multiplier == 3
+ assert retry_._timeout == 4
+ assert retry_._on_error is _some_function
+
+ @pytest.mark.parametrize("use_deadline", [True, False])
+ @pytest.mark.parametrize("value", [None, 0, 1, 4, 42, 5.5])
+ def test_with_timeout(self, use_deadline, value):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = (
+ retry_.with_timeout(value)
+ if not use_deadline
+ else retry_.with_deadline(value)
+ )
+ assert retry_ is not new_retry
+ assert new_retry._timeout == value
+ assert (
+ new_retry.timeout == value
+ if not use_deadline
+ else new_retry.deadline == value
+ )
+
+ # the rest of the attributes should remain the same
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_predicate(self):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_predicate(mock.sentinel.predicate)
+ assert retry_ is not new_retry
+ assert new_retry._predicate == mock.sentinel.predicate
+
+ # the rest of the attributes should remain the same
+ assert new_retry._timeout == retry_._timeout
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_delay_noop(self):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay()
+ assert retry_ is not new_retry
+ assert new_retry._initial == retry_._initial
+ assert new_retry._maximum == retry_._maximum
+ assert new_retry._multiplier == retry_._multiplier
+
+ @pytest.mark.parametrize(
+ "originals,updated,expected",
+ [
+ [(1, 2, 3), (4, 5, 6), (4, 5, 6)],
+ [(1, 2, 3), (0, 0, 0), (0, 0, 0)],
+ [(1, 2, 3), (None, None, None), (1, 2, 3)],
+ [(0, 0, 0), (None, None, None), (0, 0, 0)],
+ [(1, 2, 3), (None, 0.5, None), (1, 0.5, 3)],
+ [(1, 2, 3), (None, 0.5, 4), (1, 0.5, 4)],
+ [(1, 2, 3), (9, None, None), (9, 2, 3)],
+ ],
+ )
+ def test_with_delay(self, originals, updated, expected):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=originals[0],
+ maximum=originals[1],
+ multiplier=originals[2],
+ timeout=14,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay(
+ initial=updated[0], maximum=updated[1], multiplier=updated[2]
+ )
+ assert retry_ is not new_retry
+ assert new_retry._initial == expected[0]
+ assert new_retry._maximum == expected[1]
+ assert new_retry._multiplier == expected[2]
+
+ # the rest of the attributes should remain the same
+ assert new_retry._timeout == retry_._timeout
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._on_error is retry_._on_error
+
+ def test_with_delay_partial_options(self):
+ retry_ = self._make_one(
+ predicate=mock.sentinel.predicate,
+ initial=1,
+ maximum=2,
+ multiplier=3,
+ timeout=4,
+ on_error=mock.sentinel.on_error,
+ )
+ new_retry = retry_.with_delay(initial=4)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 4
+ assert new_retry._maximum == 2
+ assert new_retry._multiplier == 3
+
+ new_retry = retry_.with_delay(maximum=4)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 1
+ assert new_retry._maximum == 4
+ assert new_retry._multiplier == 3
+
+ new_retry = retry_.with_delay(multiplier=4)
+ assert retry_ is not new_retry
+ assert new_retry._initial == 1
+ assert new_retry._maximum == 2
+ assert new_retry._multiplier == 4
+
+ # the rest of the attributes should remain the same
+ assert new_retry._timeout == retry_._timeout
+ assert new_retry._predicate is retry_._predicate
+ assert new_retry._on_error is retry_._on_error
+
+ def test___str__(self):
+ def if_exception_type(exc):
+ return bool(exc) # pragma: NO COVER
+
+ # Explicitly set all attributes as changed Retry defaults should not
+ # cause this test to start failing.
+ retry_ = self._make_one(
+ predicate=if_exception_type,
+ initial=1.0,
+ maximum=60.0,
+ multiplier=2.0,
+ timeout=120.0,
+ on_error=None,
+ )
+ assert re.match(
+ (
+ r"<_BaseRetry predicate=<function.*?if_exception_type.*?>, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
+ r"on_error=None>"
+ ),
+ str(retry_),
+ )
diff --git a/tests/unit/retry/test_retry_imports.py b/tests/unit/retry/test_retry_imports.py
new file mode 100644
index 0000000..597909f
--- /dev/null
+++ b/tests/unit/retry/test_retry_imports.py
@@ -0,0 +1,33 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def test_legacy_imports_retry_unary_sync():
+ # TODO: Delete this test when when we revert these imports on the
+ # next major version release
+ # (https://github.com/googleapis/python-api-core/issues/576)
+ from google.api_core.retry import datetime_helpers # noqa: F401
+ from google.api_core.retry import exceptions # noqa: F401
+ from google.api_core.retry import auth_exceptions # noqa: F401
+
+
+def test_legacy_imports_retry_unary_async():
+ # TODO: Delete this test when when we revert these imports on the
+ # next major version release
+ # (https://github.com/googleapis/python-api-core/issues/576)
+ from google.api_core import retry_async # noqa: F401
+
+ # See https://github.com/googleapis/python-api-core/issues/586
+ # for context on why we need to test this import this explicitly.
+ from google.api_core.retry_async import AsyncRetry # noqa: F401
diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py
new file mode 100644
index 0000000..0bc85d9
--- /dev/null
+++ b/tests/unit/retry/test_retry_streaming.py
@@ -0,0 +1,476 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+import pytest
+
+from google.api_core import exceptions
+from google.api_core import retry
+from google.api_core.retry import retry_streaming
+
+from .test_retry_base import Test_BaseRetry
+
+
+def test_retry_streaming_target_bad_sleep_generator():
+ with pytest.raises(
+ ValueError, match="Sleep generator stopped yielding sleep values"
+ ):
+ next(retry_streaming.retry_target_stream(None, None, [], None))
+
+
+class TestStreamingRetry(Test_BaseRetry):
+ def _make_one(self, *args, **kwargs):
+ return retry_streaming.StreamingRetry(*args, **kwargs)
+
+ def test___str__(self):
+ def if_exception_type(exc):
+ return bool(exc) # pragma: NO COVER
+
+ # Explicitly set all attributes as changed Retry defaults should not
+ # cause this test to start failing.
+ retry_ = retry_streaming.StreamingRetry(
+ predicate=if_exception_type,
+ initial=1.0,
+ maximum=60.0,
+ multiplier=2.0,
+ timeout=120.0,
+ on_error=None,
+ )
+ assert re.match(
+ (
+ r"<StreamingRetry predicate=<function.*?if_exception_type.*?>, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
+ r"on_error=None>"
+ ),
+ str(retry_),
+ )
+
+ def _generator_mock(
+ self,
+ num=5,
+ error_on=None,
+ return_val=None,
+ exceptions_seen=None,
+ ):
+ """
+ Helper to create a mock generator that yields a number of values
+ Generator can optionally raise an exception on a specific iteration
+
+ Args:
+ - num (int): the number of values to yield. After this, the generator will return `return_val`
+ - error_on (int): if given, the generator will raise a ValueError on the specified iteration
+ - return_val (any): if given, the generator will return this value after yielding num values
+ - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising
+ """
+ try:
+ for i in range(num):
+ if error_on and i == error_on:
+ raise ValueError("generator mock error")
+ yield i
+ return return_val
+ except (Exception, BaseException, GeneratorExit) as e:
+ # keep track of exceptions seen by generator
+ if exceptions_seen is not None:
+ exceptions_seen.append(e)
+ raise
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___success(self, sleep):
+ """
+ Test that a retry-decorated generator yields values as expected
+ This test checks a generator with no issues
+ """
+ import types
+ import collections
+
+ retry_ = retry_streaming.StreamingRetry()
+
+ decorated = retry_(self._generator_mock)
+
+ num = 10
+ result = decorated(num)
+ # check types
+ assert isinstance(decorated(num), collections.abc.Iterable)
+ assert isinstance(decorated(num), types.GeneratorType)
+ assert isinstance(self._generator_mock(num), collections.abc.Iterable)
+ assert isinstance(self._generator_mock(num), types.GeneratorType)
+ # check yield contents
+ unpacked = [i for i in result]
+ assert len(unpacked) == num
+ for a, b in zip(unpacked, self._generator_mock(num)):
+ assert a == b
+ sleep.assert_not_called()
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___retry(self, sleep):
+ """
+ Tests that a retry-decorated generator will retry on errors
+ """
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming.StreamingRetry(
+ on_error=on_error,
+ predicate=retry.if_exception_type(ValueError),
+ timeout=None,
+ )
+ result = retry_(self._generator_mock)(error_on=3)
+ # error thrown on 3
+ # generator should contain 0, 1, 2 looping
+ unpacked = [next(result) for i in range(10)]
+ assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0]
+ assert on_error.call_count == 3
+
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
+ @mock.patch("time.sleep", autospec=True)
+ @pytest.mark.parametrize("use_deadline_arg", [True, False])
+ def test___call___retry_hitting_timeout(self, sleep, uniform, use_deadline_arg):
+ """
+ Tests that a retry-decorated generator will throw a RetryError
+ after using the time budget
+ """
+ import time
+
+ timeout_val = 30.9
+ # support "deadline" as an alias for "timeout"
+ timeout_kwarg = (
+ {"timeout": timeout_val}
+ if not use_deadline_arg
+ else {"deadline": timeout_val}
+ )
+
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming.StreamingRetry(
+ predicate=retry.if_exception_type(ValueError),
+ initial=1.0,
+ maximum=1024.0,
+ multiplier=2.0,
+ **timeout_kwarg,
+ )
+
+ timenow = time.monotonic()
+ now_patcher = mock.patch(
+ "time.monotonic",
+ return_value=timenow,
+ )
+
+ decorated = retry_(self._generator_mock, on_error=on_error)
+ generator = decorated(error_on=1)
+ with now_patcher as patched_now:
+ # Make sure that calls to fake time.sleep() also advance the mocked
+ # time clock.
+ def increase_time(sleep_delay):
+ patched_now.return_value += sleep_delay
+
+ sleep.side_effect = increase_time
+ with pytest.raises(exceptions.RetryError):
+ [i for i in generator]
+
+ assert on_error.call_count == 5
+ # check the delays
+ assert sleep.call_count == 4 # once between each successive target calls
+ last_wait = sleep.call_args.args[0]
+ total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
+ assert last_wait == 8.0
+ assert total_wait == 15.0
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_send(self, sleep):
+ """
+ Send should be passed through retry into target generator
+ """
+
+ def _mock_send_gen():
+ """
+ always yield whatever was sent in
+ """
+ in_ = yield
+ while True:
+ in_ = yield in_
+
+ retry_ = retry_streaming.StreamingRetry()
+
+ decorated = retry_(_mock_send_gen)
+
+ generator = decorated()
+ result = next(generator)
+ # first yield should be None
+ assert result is None
+ in_messages = ["test_1", "hello", "world"]
+ out_messages = []
+ for msg in in_messages:
+ recv = generator.send(msg)
+ out_messages.append(recv)
+ assert in_messages == out_messages
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_send_retry(self, sleep):
+ """
+ Send should support retries like next
+ """
+ on_error = mock.Mock(return_value=None)
+ retry_ = retry_streaming.StreamingRetry(
+ on_error=on_error,
+ predicate=retry.if_exception_type(ValueError),
+ timeout=None,
+ )
+ result = retry_(self._generator_mock)(error_on=3)
+ with pytest.raises(TypeError) as exc_info:
+ # calling first send with non-None input should raise a TypeError
+ result.send("can not send to fresh generator")
+ assert exc_info.match("can't send non-None value")
+ # initiate iteration with None
+ result = retry_(self._generator_mock)(error_on=3)
+ assert result.send(None) == 0
+ # error thrown on 3
+ # generator should contain 0, 1, 2 looping
+ unpacked = [result.send(i) for i in range(10)]
+ assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1]
+ assert on_error.call_count == 3
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_iterable_send(self, sleep):
+ """
+ send should raise attribute error if wrapped iterator does not support it
+ """
+ retry_ = retry_streaming.StreamingRetry()
+
+ def iterable_fn(n):
+ return iter(range(n))
+
+ decorated = retry_(iterable_fn)
+ generator = decorated(5)
+ # initialize
+ next(generator)
+ # call send
+ with pytest.raises(AttributeError):
+ generator.send("test")
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_iterable_close(self, sleep):
+ """
+ close should be handled by wrapper if wrapped iterable does not support it
+ """
+ retry_ = retry_streaming.StreamingRetry()
+
+ def iterable_fn(n):
+ return iter(range(n))
+
+ decorated = retry_(iterable_fn)
+
+ # try closing active generator
+ retryable = decorated(10)
+ assert next(retryable) == 0
+ retryable.close()
+ with pytest.raises(StopIteration):
+ next(retryable)
+
+ # try closing a new generator
+ retryable = decorated(10)
+ retryable.close()
+ with pytest.raises(StopIteration):
+ next(retryable)
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_iterable_throw(self, sleep):
+ """
+ Throw should work even if the wrapped iterable does not support it
+ """
+ predicate = retry.if_exception_type(ValueError)
+ retry_ = retry_streaming.StreamingRetry(predicate=predicate)
+
+ def iterable_fn(n):
+ return iter(range(n))
+
+ decorated = retry_(iterable_fn)
+
+ # try throwing with active generator
+ retryable = decorated(10)
+ assert next(retryable) == 0
+ # should swallow errors in predicate
+ retryable.throw(ValueError)
+ assert next(retryable) == 1
+ # should raise on other errors
+ with pytest.raises(TypeError):
+ retryable.throw(TypeError)
+ with pytest.raises(StopIteration):
+ next(retryable)
+
+ # try throwing with a new generator
+ retryable = decorated(10)
+ with pytest.raises(ValueError):
+ retryable.throw(ValueError)
+ with pytest.raises(StopIteration):
+ next(retryable)
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_return(self, sleep):
+ """
+ Generator return value should be passed through retry decorator
+ """
+ retry_ = retry_streaming.StreamingRetry()
+
+ decorated = retry_(self._generator_mock)
+
+ expected_value = "done"
+ generator = decorated(5, return_val=expected_value)
+ found_value = None
+ try:
+ while True:
+ next(generator)
+ except StopIteration as e:
+ found_value = e.value
+ assert found_value == expected_value
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_close(self, sleep):
+ """
+ Close should be passed through retry into target generator
+ """
+ retry_ = retry_streaming.StreamingRetry()
+
+ decorated = retry_(self._generator_mock)
+
+ exception_list = []
+ generator = decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ next(generator)
+ generator.close()
+ assert isinstance(exception_list[0], GeneratorExit)
+ with pytest.raises(StopIteration):
+ # calling next on closed generator should raise error
+ next(generator)
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___with_generator_throw(self, sleep):
+ """
+ Throw should be passed through retry into target generator
+ """
+ retry_ = retry_streaming.StreamingRetry(
+ predicate=retry.if_exception_type(ValueError),
+ )
+ decorated = retry_(self._generator_mock)
+
+ exception_list = []
+ generator = decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ next(generator)
+ with pytest.raises(BufferError):
+ generator.throw(BufferError("test"))
+ assert isinstance(exception_list[0], BufferError)
+ with pytest.raises(StopIteration):
+ # calling next on closed generator should raise error
+ next(generator)
+ # should retry if throw retryable exception
+ exception_list = []
+ generator = decorated(10, exceptions_seen=exception_list)
+ for i in range(2):
+ next(generator)
+ val = generator.throw(ValueError("test"))
+ assert val == 0
+ assert isinstance(exception_list[0], ValueError)
+ # calling next on closed generator should not raise error
+ assert next(generator) == 1
+
+ def test_exc_factory_non_retryable_error(self):
+ """
+ generator should give the option to override exception creation logic
+ test when non-retryable error is thrown
+ """
+ from google.api_core.retry import RetryFailureReason
+ from google.api_core.retry.retry_streaming import retry_target_stream
+
+ timeout = None
+ sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")]
+ expected_final_err = RuntimeError("done")
+ expected_source_err = ZeroDivisionError("test4")
+
+ def factory(*args, **kwargs):
+ assert len(kwargs) == 0
+ assert args[0] == sent_errors
+ assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR
+ assert args[2] == timeout
+ return expected_final_err, expected_source_err
+
+ generator = retry_target_stream(
+ self._generator_mock,
+ retry.if_exception_type(ValueError),
+ [0] * 3,
+ timeout=timeout,
+ exception_factory=factory,
+ )
+ # initialize generator
+ next(generator)
+ # trigger some retryable errors
+ generator.throw(sent_errors[0])
+ generator.throw(sent_errors[1])
+ # trigger a non-retryable error
+ with pytest.raises(expected_final_err.__class__) as exc_info:
+ generator.throw(sent_errors[2])
+ assert exc_info.value == expected_final_err
+ assert exc_info.value.__cause__ == expected_source_err
+
+ def test_exc_factory_timeout(self):
+ """
+ generator should give the option to override exception creation logic
+ test when timeout is exceeded
+ """
+ import time
+ from google.api_core.retry import RetryFailureReason
+ from google.api_core.retry.retry_streaming import retry_target_stream
+
+ timeout = 2
+ time_now = time.monotonic()
+ now_patcher = mock.patch(
+ "time.monotonic",
+ return_value=time_now,
+ )
+
+ with now_patcher as patched_now:
+ timeout = 2
+ sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")]
+ expected_final_err = RuntimeError("done")
+ expected_source_err = ZeroDivisionError("test4")
+
+ def factory(*args, **kwargs):
+ assert len(kwargs) == 0
+ assert args[0] == sent_errors
+ assert args[1] == RetryFailureReason.TIMEOUT
+ assert args[2] == timeout
+ return expected_final_err, expected_source_err
+
+ generator = retry_target_stream(
+ self._generator_mock,
+ retry.if_exception_type(ValueError),
+ [0] * 3,
+ timeout=timeout,
+ exception_factory=factory,
+ check_timeout_on_yield=True,
+ )
+ # initialize generator
+ next(generator)
+ # trigger some retryable errors
+ generator.throw(sent_errors[0])
+ generator.throw(sent_errors[1])
+ # trigger a timeout
+ patched_now.return_value += timeout + 1
+ with pytest.raises(expected_final_err.__class__) as exc_info:
+ generator.throw(sent_errors[2])
+ assert exc_info.value == expected_final_err
+ assert exc_info.value.__cause__ == expected_source_err
diff --git a/tests/unit/retry/test_retry_unary.py b/tests/unit/retry/test_retry_unary.py
new file mode 100644
index 0000000..6851fbe
--- /dev/null
+++ b/tests/unit/retry/test_retry_unary.py
@@ -0,0 +1,318 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import pytest
+import re
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+from google.api_core import exceptions
+from google.api_core import retry
+
+from .test_retry_base import Test_BaseRetry
+
+
[email protected]("time.sleep", autospec=True)
[email protected](
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+def test_retry_target_success(utcnow, sleep):
+ predicate = retry.if_exception_type(ValueError)
+ call_count = [0]
+
+ def target():
+ call_count[0] += 1
+ if call_count[0] < 3:
+ raise ValueError()
+ return 42
+
+ result = retry.retry_target(target, predicate, range(10), None)
+
+ assert result == 42
+ assert call_count[0] == 3
+ sleep.assert_has_calls([mock.call(0), mock.call(1)])
+
+
[email protected]("time.sleep", autospec=True)
[email protected](
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+def test_retry_target_w_on_error(utcnow, sleep):
+ predicate = retry.if_exception_type(ValueError)
+ call_count = {"target": 0}
+ to_raise = ValueError()
+
+ def target():
+ call_count["target"] += 1
+ if call_count["target"] < 3:
+ raise to_raise
+ return 42
+
+ on_error = mock.Mock()
+
+ result = retry.retry_target(target, predicate, range(10), None, on_error=on_error)
+
+ assert result == 42
+ assert call_count["target"] == 3
+
+ on_error.assert_has_calls([mock.call(to_raise), mock.call(to_raise)])
+ sleep.assert_has_calls([mock.call(0), mock.call(1)])
+
+
[email protected]("time.sleep", autospec=True)
[email protected](
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
+def test_retry_target_non_retryable_error(utcnow, sleep):
+ predicate = retry.if_exception_type(ValueError)
+ exception = TypeError()
+ target = mock.Mock(side_effect=exception)
+
+ with pytest.raises(TypeError) as exc_info:
+ retry.retry_target(target, predicate, range(10), None)
+
+ assert exc_info.value == exception
+ sleep.assert_not_called()
+
+
[email protected]("asyncio.sleep", autospec=True)
[email protected](
+ "google.api_core.datetime_helpers.utcnow",
+ return_value=datetime.datetime.min,
+ autospec=True,
+)
[email protected]
+async def test_retry_target_warning_for_retry(utcnow, sleep):
+ predicate = retry.if_exception_type(ValueError)
+ target = mock.AsyncMock(spec=["__call__"])
+
+ with pytest.warns(Warning) as exc_info:
+ # Note: predicate is just a filler and doesn't affect the test
+ retry.retry_target(target, predicate, range(10), None)
+
+ assert len(exc_info) == 2
+ assert str(exc_info[0].message) == retry.retry_unary._ASYNC_RETRY_WARNING
+ sleep.assert_not_called()
+
+
[email protected]("time.sleep", autospec=True)
[email protected]("time.monotonic", autospec=True)
[email protected]("use_deadline_arg", [True, False])
+def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg):
+ predicate = retry.if_exception_type(ValueError)
+ exception = ValueError("meep")
+ target = mock.Mock(side_effect=exception)
+ # Setup the timeline so that the first call takes 5 seconds but the second
+ # call takes 6, which puts the retry over the timeout.
+ monotonic.side_effect = [0, 5, 11]
+
+ # support "deadline" as an alias for "timeout"
+ kwargs = {"timeout": 10} if not use_deadline_arg else {"deadline": 10}
+
+ with pytest.raises(exceptions.RetryError) as exc_info:
+ retry.retry_target(target, predicate, range(10), **kwargs)
+
+ assert exc_info.value.cause == exception
+ assert exc_info.match("Timeout of 10.0s exceeded")
+ assert exc_info.match("last exception: meep")
+ assert target.call_count == 2
+
+ # Ensure the exception message does not include the target fn:
+ # it may be a partial with user data embedded
+ assert str(target) not in exc_info.exconly()
+
+
+def test_retry_target_bad_sleep_generator():
+ with pytest.raises(ValueError, match="Sleep generator"):
+ retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None)
+
+
+class TestRetry(Test_BaseRetry):
+ def _make_one(self, *args, **kwargs):
+ return retry.Retry(*args, **kwargs)
+
+ def test___str__(self):
+ def if_exception_type(exc):
+ return bool(exc) # pragma: NO COVER
+
+ # Explicitly set all attributes as changed Retry defaults should not
+ # cause this test to start failing.
+ retry_ = retry.Retry(
+ predicate=if_exception_type,
+ initial=1.0,
+ maximum=60.0,
+ multiplier=2.0,
+ timeout=120.0,
+ on_error=None,
+ )
+ assert re.match(
+ (
+ r"<Retry predicate=<function.*?if_exception_type.*?>, "
+ r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, "
+ r"on_error=None>"
+ ),
+ str(retry_),
+ )
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___and_execute_success(self, sleep):
+ retry_ = retry.Retry()
+ target = mock.Mock(spec=["__call__"], return_value=42)
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ decorated = retry_(target)
+ target.assert_not_called()
+
+ result = decorated("meep")
+
+ assert result == 42
+ target.assert_called_once_with("meep")
+ sleep.assert_not_called()
+
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___and_execute_retry(self, sleep, uniform):
+ on_error = mock.Mock(spec=["__call__"], side_effect=[None])
+ retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError))
+
+ target = mock.Mock(spec=["__call__"], side_effect=[ValueError(), 42])
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ decorated = retry_(target, on_error=on_error)
+ target.assert_not_called()
+
+ result = decorated("meep")
+
+ assert result == 42
+ assert target.call_count == 2
+ target.assert_has_calls([mock.call("meep"), mock.call("meep")])
+ sleep.assert_called_once_with(retry_._initial)
+ assert on_error.call_count == 1
+
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
+ @mock.patch("time.sleep", autospec=True)
+ def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform):
+ on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10)
+ retry_ = retry.Retry(
+ predicate=retry.if_exception_type(ValueError),
+ initial=1.0,
+ maximum=1024.0,
+ multiplier=2.0,
+ timeout=30.9,
+ )
+
+ monotonic_patcher = mock.patch("time.monotonic", return_value=0)
+
+ target = mock.Mock(spec=["__call__"], side_effect=[ValueError()] * 10)
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ decorated = retry_(target, on_error=on_error)
+ target.assert_not_called()
+
+ with monotonic_patcher as patched_monotonic:
+ # Make sure that calls to fake time.sleep() also advance the mocked
+ # time clock.
+ def increase_time(sleep_delay):
+ patched_monotonic.return_value += sleep_delay
+
+ sleep.side_effect = increase_time
+
+ with pytest.raises(exceptions.RetryError):
+ decorated("meep")
+
+ assert target.call_count == 5
+ target.assert_has_calls([mock.call("meep")] * 5)
+ assert on_error.call_count == 5
+
+ # check the delays
+ assert sleep.call_count == 4 # once between each successive target calls
+ last_wait = sleep.call_args.args[0]
+ total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
+
+ assert last_wait == 8.0
+ # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus
+ # we do not even wait for it to be scheduled (30.9 is configured timeout).
+ # This changes the previous logic of shortening the last attempt to fit
+ # in the timeout. The previous logic was removed to make Python retry
+ # logic consistent with the other languages and to not disrupt the
+ # randomized retry delays distribution by artificially increasing a
+ # probability of scheduling two (instead of one) last attempts with very
+ # short delay between them, while the second retry having very low chance
+ # of succeeding anyways.
+ assert total_wait == 15.0
+
+ @mock.patch("time.sleep", autospec=True)
+ def test___init___without_retry_executed(self, sleep):
+ _some_function = mock.Mock()
+
+ retry_ = retry.Retry(
+ predicate=retry.if_exception_type(ValueError), on_error=_some_function
+ )
+ # check the proper creation of the class
+ assert retry_._on_error is _some_function
+
+ target = mock.Mock(spec=["__call__"], side_effect=[42])
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ wrapped = retry_(target)
+
+ result = wrapped("meep")
+
+ assert result == 42
+ target.assert_called_once_with("meep")
+ sleep.assert_not_called()
+ _some_function.assert_not_called()
+
+ @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
+ @mock.patch("time.sleep", autospec=True)
+ def test___init___when_retry_is_executed(self, sleep, uniform):
+ _some_function = mock.Mock()
+
+ retry_ = retry.Retry(
+ predicate=retry.if_exception_type(ValueError), on_error=_some_function
+ )
+ # check the proper creation of the class
+ assert retry_._on_error is _some_function
+
+ target = mock.Mock(
+ spec=["__call__"], side_effect=[ValueError(), ValueError(), 42]
+ )
+ # __name__ is needed by functools.partial.
+ target.__name__ = "target"
+
+ wrapped = retry_(target)
+ target.assert_not_called()
+
+ result = wrapped("meep")
+
+ assert result == 42
+ assert target.call_count == 3
+ assert _some_function.call_count == 2
+ target.assert_has_calls([mock.call("meep"), mock.call("meep")])
+ sleep.assert_any_call(retry_._initial)
diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py
index 7fb1620..cca9a21 100644
--- a/tests/unit/test_bidi.py
+++ b/tests/unit/test_bidi.py
@@ -17,12 +17,17 @@
import queue
import threading
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
import pytest
try:
import grpc
-except ImportError:
+except ImportError: # pragma: NO COVER
pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import bidi
@@ -291,6 +296,9 @@
# ensure the request queue was signaled to stop.
assert bidi_rpc.pending_requests == 1
assert bidi_rpc._request_queue.get() is None
+ # ensure request and callbacks are cleaned up
+ assert bidi_rpc._initial_request is None
+ assert not bidi_rpc._callbacks
def test_close_no_rpc(self):
bidi_rpc = bidi.BidiRpc(None)
@@ -618,6 +626,8 @@
assert bidi_rpc.pending_requests == 1
assert bidi_rpc._request_queue.get() is None
assert bidi_rpc._finalized
+ assert bidi_rpc._initial_request is None
+ assert not bidi_rpc._callbacks
def test_reopen_failure_on_rpc_restart(self):
error1 = ValueError("1")
@@ -772,6 +782,7 @@
consumer.stop()
assert consumer.is_active is False
+ assert consumer._on_response is None
def test_wake_on_error(self):
should_continue = threading.Event()
@@ -804,6 +815,21 @@
while consumer.is_active:
pass
+ def test_rpc_callback_fires_when_consumer_start_fails(self):
+ expected_exception = exceptions.InvalidArgument(
+ "test", response=grpc.StatusCode.INVALID_ARGUMENT
+ )
+ callback = mock.Mock(spec=["__call__"])
+
+ rpc, _ = make_rpc()
+ bidi_rpc = bidi.BidiRpc(rpc)
+ bidi_rpc.add_done_callback(callback)
+ bidi_rpc._start_rpc.side_effect = expected_exception
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, on_response=None)
+ consumer.start()
+ assert callback.call_args.args[0] == grpc.StatusCode.INVALID_ARGUMENT
+
def test_consumer_expected_error(self, caplog):
caplog.set_level(logging.DEBUG)
@@ -864,6 +890,7 @@
consumer.stop()
assert consumer.is_active is False
+ assert consumer._on_response is None
# calling stop twice should not result in an error.
consumer.stop()
diff --git a/tests/unit/test_client_info.py b/tests/unit/test_client_info.py
index f5eebfb..3361fef 100644
--- a/tests/unit/test_client_info.py
+++ b/tests/unit/test_client_info.py
@@ -15,7 +15,7 @@
try:
import grpc
-except ImportError:
+except ImportError: # pragma: NO COVER
grpc = None
from google.api_core import client_info
@@ -26,9 +26,9 @@
assert info.python_version is not None
- if grpc is not None:
+ if grpc is not None: # pragma: NO COVER
assert info.grpc_version is not None
- else:
+ else: # pragma: NO COVER
assert info.grpc_version is None
assert info.api_core_version is not None
diff --git a/tests/unit/test_client_logging.py b/tests/unit/test_client_logging.py
new file mode 100644
index 0000000..b3b0b5c
--- /dev/null
+++ b/tests/unit/test_client_logging.py
@@ -0,0 +1,140 @@
+import json
+import logging
+from unittest import mock
+
+from google.api_core.client_logging import (
+ setup_logging,
+ initialize_logging,
+ StructuredLogFormatter,
+)
+
+
+def reset_logger(scope):
+ logger = logging.getLogger(scope)
+ logger.handlers = []
+ logger.setLevel(logging.NOTSET)
+ logger.propagate = True
+
+
+def test_setup_logging_w_no_scopes():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging()
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ reset_logger("foogle")
+
+
+def test_setup_logging_w_base_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging("foogle")
+ base_logger = logging.getLogger("foogle")
+ assert isinstance(base_logger.handlers[0], logging.StreamHandler)
+ assert not base_logger.propagate
+ assert base_logger.level == logging.DEBUG
+
+ reset_logger("foogle")
+
+
+def test_setup_logging_w_configured_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ base_logger = logging.getLogger("foogle")
+ base_logger.propagate = False
+ setup_logging("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ reset_logger("foogle")
+
+
+def test_setup_logging_w_module_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging("foogle.bar")
+
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ module_logger = logging.getLogger("foogle.bar")
+ assert isinstance(module_logger.handlers[0], logging.StreamHandler)
+ assert not module_logger.propagate
+ assert module_logger.level == logging.DEBUG
+
+ reset_logger("foogle")
+ reset_logger("foogle.bar")
+
+
+def test_setup_logging_w_incorrect_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging("abc")
+
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ # TODO(https://github.com/googleapis/python-api-core/issues/759): update test once we add logic to ignore an incorrect scope.
+ logger = logging.getLogger("abc")
+ assert isinstance(logger.handlers[0], logging.StreamHandler)
+ assert not logger.propagate
+ assert logger.level == logging.DEBUG
+
+ reset_logger("foogle")
+ reset_logger("abc")
+
+
+def test_initialize_logging():
+
+ with mock.patch("os.getenv", return_value="foogle.bar"):
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ initialize_logging()
+
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ module_logger = logging.getLogger("foogle.bar")
+ assert isinstance(module_logger.handlers[0], logging.StreamHandler)
+ assert not module_logger.propagate
+ assert module_logger.level == logging.DEBUG
+
+ # Check that `initialize_logging()` is a no-op after the first time by verifying that user-set configs are not modified:
+ base_logger.propagate = True
+ module_logger.propagate = True
+
+ initialize_logging()
+
+ assert base_logger.propagate
+ assert module_logger.propagate
+
+ reset_logger("foogle")
+ reset_logger("foogle.bar")
+
+
+def test_structured_log_formatter():
+ # TODO(https://github.com/googleapis/python-api-core/issues/761): Test additional fields when implemented.
+ record = logging.LogRecord(
+ name="Appelation",
+ level=logging.DEBUG,
+ msg="This is a test message.",
+ pathname="some/path",
+ lineno=25,
+ args=None,
+ exc_info=None,
+ )
+
+ # Extra fields:
+ record.rpcName = "bar"
+
+ formatted_msg = StructuredLogFormatter().format(record)
+ parsed_msg = json.loads(formatted_msg)
+
+ assert parsed_msg["name"] == "Appelation"
+ assert parsed_msg["severity"] == "DEBUG"
+ assert parsed_msg["message"] == "This is a test message."
+ assert parsed_msg["rpcName"] == "bar"
diff --git a/tests/unit/test_client_options.py b/tests/unit/test_client_options.py
index 38b9ad0..396d662 100644
--- a/tests/unit/test_client_options.py
+++ b/tests/unit/test_client_options.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from re import match
import pytest
from google.api_core import client_options
@@ -36,6 +37,8 @@
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
],
+ api_audience="foo2.googleapis.com",
+ universe_domain="googleapis.com",
)
assert options.api_endpoint == "foo.googleapis.com"
@@ -46,6 +49,8 @@
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
]
+ assert options.api_audience == "foo2.googleapis.com"
+ assert options.universe_domain == "googleapis.com"
def test_constructor_with_encrypted_cert_source():
@@ -72,10 +77,42 @@
)
+def test_constructor_with_api_key():
+
+ options = client_options.ClientOptions(
+ api_endpoint="foo.googleapis.com",
+ client_cert_source=get_client_cert,
+ quota_project_id="quote-proj",
+ api_key="api-key",
+ scopes=[
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ ],
+ )
+
+ assert options.api_endpoint == "foo.googleapis.com"
+ assert options.client_cert_source() == (b"cert", b"key")
+ assert options.quota_project_id == "quote-proj"
+ assert options.api_key == "api-key"
+ assert options.scopes == [
+ "https://www.googleapis.com/auth/cloud-platform",
+ "https://www.googleapis.com/auth/cloud-platform.read-only",
+ ]
+
+
+def test_constructor_with_both_api_key_and_credentials_file():
+ with pytest.raises(ValueError):
+ client_options.ClientOptions(
+ api_key="api-key",
+ credentials_file="path/to/credentials.json",
+ )
+
+
def test_from_dict():
options = client_options.from_dict(
{
"api_endpoint": "foo.googleapis.com",
+ "universe_domain": "googleapis.com",
"client_cert_source": get_client_cert,
"quota_project_id": "quote-proj",
"credentials_file": "path/to/credentials.json",
@@ -83,10 +120,12 @@
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
],
+ "api_audience": "foo2.googleapis.com",
}
)
assert options.api_endpoint == "foo.googleapis.com"
+ assert options.universe_domain == "googleapis.com"
assert options.client_cert_source() == (b"cert", b"key")
assert options.quota_project_id == "quote-proj"
assert options.credentials_file == "path/to/credentials.json"
@@ -94,6 +133,8 @@
"https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/cloud-platform.read-only",
]
+ assert options.api_key is None
+ assert options.api_audience == "foo2.googleapis.com"
def test_from_dict_bad_argument():
@@ -108,10 +149,22 @@
def test_repr():
- options = client_options.ClientOptions(api_endpoint="foo.googleapis.com")
-
- assert (
- repr(options)
- == "ClientOptions: {'api_endpoint': 'foo.googleapis.com', 'client_cert_source': None, 'client_encrypted_cert_source': None}"
- or "ClientOptions: {'client_encrypted_cert_source': None, 'client_cert_source': None, 'api_endpoint': 'foo.googleapis.com'}"
+ expected_keys = set(
+ [
+ "api_endpoint",
+ "universe_domain",
+ "client_cert_source",
+ "client_encrypted_cert_source",
+ "quota_project_id",
+ "credentials_file",
+ "scopes",
+ "api_key",
+ "api_audience",
+ ]
)
+ options = client_options.ClientOptions(api_endpoint="foo.googleapis.com")
+ options_repr = repr(options)
+ options_keys = vars(options).keys()
+ assert match(r"ClientOptions:", options_repr)
+ assert match(r".*'api_endpoint': 'foo.googleapis.com'.*", options_repr)
+ assert options_keys == expected_keys
diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py
index 622f58a..e3f8f90 100644
--- a/tests/unit/test_exceptions.py
+++ b/tests/unit/test_exceptions.py
@@ -14,15 +14,15 @@
import http.client
import json
+from unittest import mock
-import mock
import pytest
import requests
try:
import grpc
from grpc_status import rpc_status
-except ImportError:
+except ImportError: # pragma: NO COVER
grpc = rpc_status = None
from google.api_core import exceptions
@@ -275,31 +275,56 @@
return status_detail
+def create_error_info_details():
+ info = error_details_pb2.ErrorInfo(
+ reason="SERVICE_DISABLED",
+ domain="googleapis.com",
+ metadata={
+ "consumer": "projects/455411330361",
+ "service": "translate.googleapis.com",
+ },
+ )
+ status_detail = any_pb2.Any()
+ status_detail.Pack(info)
+ return status_detail
+
+
def test_error_details_from_rest_response():
bad_request_detail = create_bad_request_details()
+ error_info_detail = create_error_info_details()
status = status_pb2.Status()
status.code = 3
status.message = (
"3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
)
status.details.append(bad_request_detail)
+ status.details.append(error_info_detail)
# See JSON schema in https://cloud.google.com/apis/design/errors#http_mapping
http_response = make_response(
- json.dumps({"error": json.loads(json_format.MessageToJson(status))}).encode(
- "utf-8"
- )
+ json.dumps(
+ {"error": json.loads(json_format.MessageToJson(status, sort_keys=True))}
+ ).encode("utf-8")
)
exception = exceptions.from_http_response(http_response)
- want_error_details = [json.loads(json_format.MessageToJson(bad_request_detail))]
+ want_error_details = [
+ json.loads(json_format.MessageToJson(bad_request_detail)),
+ json.loads(json_format.MessageToJson(error_info_detail)),
+ ]
assert want_error_details == exception.details
+
# 404 POST comes from make_response.
assert str(exception) == (
"404 POST https://example.com/: 3 INVALID_ARGUMENT:"
" One of content, or gcs_content_uri must be set."
" [{'@type': 'type.googleapis.com/google.rpc.BadRequest',"
- " 'fieldViolations': [{'field': 'document.content',"
- " 'description': 'Must have some text content to annotate.'}]}]"
+ " 'fieldViolations': [{'description': 'Must have some text content to annotate.',"
+ " 'field': 'document.content'}]},"
+ " {'@type': 'type.googleapis.com/google.rpc.ErrorInfo',"
+ " 'domain': 'googleapis.com',"
+ " 'metadata': {'consumer': 'projects/455411330361',"
+ " 'service': 'translate.googleapis.com'},"
+ " 'reason': 'SERVICE_DISABLED'}]"
)
@@ -311,6 +336,11 @@
)
exception = exceptions.from_http_response(response)
assert exception.details == []
+ assert (
+ exception.reason is None
+ and exception.domain is None
+ and exception.metadata is None
+ )
@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
@@ -320,10 +350,12 @@
status.message = (
"3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set."
)
- status_detail = create_bad_request_details()
- status.details.append(status_detail)
+ status_br_detail = create_bad_request_details()
+ status_ei_detail = create_error_info_details()
+ status.details.append(status_br_detail)
+ status.details.append(status_ei_detail)
- # Actualy error doesn't matter as long as its grpc.Call,
+ # The actual error doesn't matter as long as its grpc.Call,
# because from_call is mocked.
error = mock.create_autospec(grpc.Call, instance=True)
with mock.patch("grpc_status.rpc_status.from_call") as m:
@@ -331,8 +363,13 @@
exception = exceptions.from_grpc_error(error)
bad_request_detail = error_details_pb2.BadRequest()
- status_detail.Unpack(bad_request_detail)
- assert exception.details == [bad_request_detail]
+ error_info_detail = error_details_pb2.ErrorInfo()
+ status_br_detail.Unpack(bad_request_detail)
+ status_ei_detail.Unpack(error_info_detail)
+ assert exception.details == [bad_request_detail, error_info_detail]
+ assert exception.reason == error_info_detail.reason
+ assert exception.domain == error_info_detail.domain
+ assert exception.metadata == error_info_detail.metadata
@pytest.mark.skipif(grpc is None, reason="gRPC not importable")
@@ -351,3 +388,8 @@
m.return_value = status
exception = exceptions.from_grpc_error(error)
assert exception.details == [status_detail]
+ assert (
+ exception.reason is None
+ and exception.domain is None
+ and exception.metadata is None
+ )
diff --git a/tests/unit/test_extended_operation.py b/tests/unit/test_extended_operation.py
new file mode 100644
index 0000000..ab55066
--- /dev/null
+++ b/tests/unit/test_extended_operation.py
@@ -0,0 +1,246 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import dataclasses
+import enum
+import typing
+from unittest import mock
+
+import pytest
+
+from google.api_core import exceptions
+from google.api_core import extended_operation
+from google.api_core import retry
+
+TEST_OPERATION_NAME = "test/extended_operation"
+
+
[email protected](frozen=True)
+class CustomOperation:
+ class StatusCode(enum.Enum):
+ UNKNOWN = 0
+ DONE = 1
+ PENDING = 2
+
+ class LROCustomErrors:
+ class LROCustomError:
+ def __init__(self, code: str = "", message: str = ""):
+ self.code = code
+ self.message = message
+
+ def __init__(self, errors: typing.List[LROCustomError] = []):
+ self.errors = errors
+
+ name: str
+ status: StatusCode
+ error_code: typing.Optional[int] = None
+ error_message: typing.Optional[str] = None
+ armor_class: typing.Optional[int] = None
+ # Note: `error` can be removed once proposal A from
+ # b/284179390 is implemented.
+ error: typing.Optional[LROCustomErrors] = None
+
+ # Note: in generated clients, this property must be generated for each
+ # extended operation message type.
+ # The status may be an enum, a string, or a bool. If it's a string or enum,
+ # its text is compared to the string "DONE".
+ @property
+ def done(self):
+ return self.status.name == "DONE"
+
+
+def make_extended_operation(responses=None):
+ client_operations_responses = responses or [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ )
+ ]
+
+ refresh = mock.Mock(spec=["__call__"], side_effect=client_operations_responses)
+ refresh.responses = client_operations_responses
+ cancel = mock.Mock(spec=["__call__"])
+ extended_operation_future = extended_operation.ExtendedOperation.make(
+ refresh,
+ cancel,
+ client_operations_responses[0],
+ )
+
+ return extended_operation_future, refresh, cancel
+
+
+def test_constructor():
+ ex_op, refresh, _ = make_extended_operation()
+ assert ex_op._extended_operation == refresh.responses[0]
+ assert not ex_op.cancelled()
+ assert not ex_op.done()
+ assert ex_op.name == TEST_OPERATION_NAME
+ assert ex_op.status == CustomOperation.StatusCode.PENDING
+ assert ex_op.error_code is None
+ assert ex_op.error_message is None
+
+
+def test_done():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ ),
+ # Second response indicates that the operation has finished.
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
+ ),
+ # Bumper to make sure we stop polling on DONE.
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_message="Gone too far!",
+ ),
+ ]
+ ex_op, refresh, _ = make_extended_operation(responses)
+
+ # Start out not done.
+ assert not ex_op.done()
+ assert refresh.call_count == 1
+
+ # Refresh brings us to the done state.
+ assert ex_op.done()
+ assert refresh.call_count == 2
+ assert not ex_op.error_message
+
+ # Make sure that subsequent checks are no-ops.
+ assert ex_op.done()
+ assert refresh.call_count == 2
+ assert not ex_op.error_message
+
+
+def test_cancellation():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ ),
+ # Second response indicates that the operation was cancelled.
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
+ ),
+ ]
+ ex_op, _, cancel = make_extended_operation(responses)
+
+ assert not ex_op.cancelled()
+
+ assert ex_op.cancel()
+ assert ex_op.cancelled()
+ cancel.assert_called_once_with()
+
+ # Cancelling twice should have no effect.
+ assert not ex_op.cancel()
+ cancel.assert_called_once_with()
+
+
+def test_done_w_retry():
+ # Not sure what's going on here with the coverage, so just ignore it.
+ test_retry = retry.Retry(predicate=lambda x: True) # pragma: NO COVER
+
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING
+ ),
+ CustomOperation(
+ name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE
+ ),
+ ]
+
+ ex_op, refresh, _ = make_extended_operation(responses)
+
+ ex_op.done(retry=test_retry)
+
+ refresh.assert_called_once_with(retry=test_retry)
+
+
+def test_error():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_code=400,
+ error_message="Bad request",
+ ),
+ ]
+
+ ex_op, _, _ = make_extended_operation(responses)
+
+ # Defaults to CallError when grpc is not installed
+ with pytest.raises(exceptions.BadRequest):
+ ex_op.result()
+
+ # Test GCE custom LRO Error. See b/284179390
+ # Note: This test case can be removed once proposal A from
+ # b/284179390 is implemented.
+ _EXCEPTION_CODE = "INCOMPATIBLE_BACKEND_SERVICES"
+ _EXCEPTION_MESSAGE = "Validation failed for instance group"
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_code=400,
+ error_message="Bad request",
+ error=CustomOperation.LROCustomErrors(
+ errors=[
+ CustomOperation.LROCustomErrors.LROCustomError(
+ code=_EXCEPTION_CODE, message=_EXCEPTION_MESSAGE
+ )
+ ]
+ ),
+ ),
+ ]
+
+ ex_op, _, _ = make_extended_operation(responses)
+
+ # Defaults to CallError when grpc is not installed
+ with pytest.raises(
+ exceptions.BadRequest, match=f"{_EXCEPTION_CODE}: {_EXCEPTION_MESSAGE}"
+ ):
+ ex_op.result()
+
+ # Inconsistent result
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ error_code=2112,
+ ),
+ ]
+
+ ex_op, _, _ = make_extended_operation(responses)
+
+ with pytest.raises(exceptions.GoogleAPICallError):
+ ex_op.result()
+
+
+def test_pass_through():
+ responses = [
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.PENDING,
+ armor_class=10,
+ ),
+ CustomOperation(
+ name=TEST_OPERATION_NAME,
+ status=CustomOperation.StatusCode.DONE,
+ armor_class=20,
+ ),
+ ]
+ ex_op, _, _ = make_extended_operation(responses)
+
+ assert ex_op.armor_class == 10
+ ex_op.result()
+ assert ex_op.armor_class == 20
diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py
index ca969e4..8de9d8c 100644
--- a/tests/unit/test_grpc_helpers.py
+++ b/tests/unit/test_grpc_helpers.py
@@ -12,12 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+from unittest import mock
+
import pytest
try:
import grpc
-except ImportError:
+except ImportError: # pragma: NO COVER
pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import exceptions
@@ -195,6 +196,23 @@
wrapped.trailing_metadata.assert_called_once_with()
+class TestGrpcStream(Test_StreamingResponseIterator):
+ @staticmethod
+ def _make_one(wrapped, **kw):
+ return grpc_helpers.GrpcStream(wrapped, **kw)
+
+ def test_grpc_stream_attributes(self):
+ """
+ Should be both a grpc.Call and an iterable
+ """
+ call = self._make_one(None)
+ assert isinstance(call, grpc.Call)
+ # should implement __iter__
+ assert hasattr(call, "__iter__")
+ it = call.__iter__()
+ assert hasattr(it, "__next__")
+
+
def test_wrap_stream_okay():
expected_responses = [1, 2, 3]
callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses))
@@ -222,7 +240,7 @@
callable_.assert_called_once_with(1, 2, three="four")
-def test_wrap_stream_iterable_iterface():
+def test_wrap_stream_iterable_interface():
response_iter = mock.create_autospec(grpc.Call, instance=True)
callable_ = mock.Mock(spec=["__call__"], return_value=response_iter)
@@ -348,35 +366,121 @@
wrap_stream_errors.assert_called_once_with(callable_)
[email protected]("grpc.composite_channel_credentials")
[email protected](
+ "attempt_direct_path,target,expected_target",
+ [
+ (None, "example.com:443", "example.com:443"),
+ (False, "example.com:443", "example.com:443"),
+ (True, "example.com:443", "google-c2p:///example.com"),
+ (True, "dns:///example.com", "google-c2p:///example.com"),
+ (True, "another-c2p:///example.com", "another-c2p:///example.com"),
+ ],
+)
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.secure_channel")
-def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call):
- target = "example.com:443"
+def test_create_channel_implicit(
+ grpc_secure_channel,
+ google_auth_default,
+ composite_creds_call,
+ attempt_direct_path,
+ target,
+ expected_target,
+):
composite_creds = composite_creds_call.return_value
- channel = grpc_helpers.create_channel(target)
+ channel = grpc_helpers.create_channel(
+ target,
+ compression=grpc.Compression.Gzip,
+ attempt_direct_path=attempt_direct_path,
+ )
assert channel is grpc_secure_channel.return_value
- default.assert_called_once_with(scopes=None, default_scopes=None)
+ google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
- if grpc_helpers.HAS_GRPC_GCP:
- grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
+ # The original target is the expected target
+ expected_target = target
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, None
+ )
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, compression=grpc.Compression.Gzip
+ )
[email protected](
+ "attempt_direct_path,target, expected_target",
+ [
+ (None, "example.com:443", "example.com:443"),
+ (False, "example.com:443", "example.com:443"),
+ (True, "example.com:443", "google-c2p:///example.com"),
+ (True, "dns:///example.com", "google-c2p:///example.com"),
+ (True, "another-c2p:///example.com", "another-c2p:///example.com"),
+ ],
+)
@mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True)
@mock.patch(
"google.auth.transport.requests.Request",
autospec=True,
return_value=mock.sentinel.Request,
)
[email protected]("grpc.compute_engine_channel_credentials")
[email protected](
+ "google.auth.default",
+ autospec=True,
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
+)
[email protected]("grpc.secure_channel")
+def test_create_channel_implicit_with_default_host(
+ grpc_secure_channel,
+ google_auth_default,
+ composite_creds_call,
+ request,
+ auth_metadata_plugin,
+ attempt_direct_path,
+ target,
+ expected_target,
+):
+ default_host = "example.com"
+ composite_creds = composite_creds_call.return_value
+
+ channel = grpc_helpers.create_channel(
+ target, default_host=default_host, attempt_direct_path=attempt_direct_path
+ )
+
+ assert channel is grpc_secure_channel.return_value
+
+ google_auth_default.assert_called_once_with(scopes=None, default_scopes=None)
+ auth_metadata_plugin.assert_called_once_with(
+ mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host
+ )
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
+ # The original target is the expected target
+ expected_target = target
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, None
+ )
+ else:
+ grpc_secure_channel.assert_called_once_with(
+ expected_target, composite_creds, compression=None
+ )
+
+
[email protected](
+ "attempt_direct_path",
+ [
+ None,
+ False,
+ ],
+)
@mock.patch("grpc.composite_channel_credentials")
@mock.patch(
"google.auth.default",
@@ -384,59 +488,46 @@
return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.secure_channel")
-def test_create_channel_implicit_with_default_host(
- grpc_secure_channel, default, composite_creds_call, request, auth_metadata_plugin
-):
- target = "example.com:443"
- default_host = "example.com"
- composite_creds = composite_creds_call.return_value
-
- channel = grpc_helpers.create_channel(target, default_host=default_host)
-
- assert channel is grpc_secure_channel.return_value
-
- default.assert_called_once_with(scopes=None, default_scopes=None)
- auth_metadata_plugin.assert_called_once_with(
- mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host
- )
-
- if grpc_helpers.HAS_GRPC_GCP:
- grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
- else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
-
-
[email protected]("grpc.composite_channel_credentials")
[email protected](
- "google.auth.default",
- autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
-)
[email protected]("grpc.secure_channel")
def test_create_channel_implicit_with_ssl_creds(
- grpc_secure_channel, default, composite_creds_call
+ grpc_secure_channel, default, composite_creds_call, attempt_direct_path
):
target = "example.com:443"
ssl_creds = grpc.ssl_channel_credentials()
- grpc_helpers.create_channel(target, ssl_credentials=ssl_creds)
+ grpc_helpers.create_channel(
+ target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path
+ )
default.assert_called_once_with(scopes=None, default_scopes=None)
composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY)
composite_creds = composite_creds_call.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
+def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true():
+ target = "example.com:443"
+ ssl_creds = grpc.ssl_channel_credentials()
+ with pytest.raises(
+ ValueError, match="Using ssl_credentials with Direct Path is not supported"
+ ):
+ grpc_helpers.create_channel(
+ target, ssl_credentials=ssl_creds, attempt_direct_path=True
+ )
+
+
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.secure_channel")
def test_create_channel_implicit_with_scopes(
@@ -451,17 +542,19 @@
default.assert_called_once_with(scopes=["one", "two"], default_scopes=None)
- if grpc_helpers.HAS_GRPC_GCP:
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch(
"google.auth.default",
autospec=True,
- return_value=(mock.sentinel.credentials, mock.sentinel.projet),
+ return_value=(mock.sentinel.credentials, mock.sentinel.project),
)
@mock.patch("grpc.secure_channel")
def test_create_channel_implicit_with_default_scopes(
@@ -476,10 +569,12 @@
default.assert_called_once_with(scopes=None, default_scopes=["three", "four"])
- if grpc_helpers.HAS_GRPC_GCP:
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
def test_create_channel_explicit_with_duplicate_credentials():
@@ -493,7 +588,7 @@
)
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True)
@mock.patch("grpc.secure_channel")
def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call):
@@ -507,13 +602,16 @@
)
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call):
target = "example.com:443"
@@ -530,13 +628,16 @@
credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None)
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
def test_create_channel_explicit_default_scopes(
grpc_secure_channel, composite_creds_call
@@ -557,13 +658,16 @@
)
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
def test_create_channel_explicit_with_quota_project(
grpc_secure_channel, composite_creds_call
@@ -582,13 +686,16 @@
credentials.with_quota_project.assert_called_once_with("project-foo")
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
@@ -610,13 +717,16 @@
)
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
@@ -641,13 +751,16 @@
)
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
[email protected]("grpc.composite_channel_credentials")
[email protected]("grpc.compute_engine_channel_credentials")
@mock.patch("grpc.secure_channel")
@mock.patch(
"google.auth.load_credentials_from_file",
@@ -672,17 +785,20 @@
)
assert channel is grpc_secure_channel.return_value
- if grpc_helpers.HAS_GRPC_GCP:
+
+ if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER
grpc_secure_channel.assert_called_once_with(target, composite_creds, None)
else:
- grpc_secure_channel.assert_called_once_with(target, composite_creds)
+ grpc_secure_channel.assert_called_once_with(
+ target, composite_creds, compression=None
+ )
@pytest.mark.skipif(
not grpc_helpers.HAS_GRPC_GCP, reason="grpc_gcp module not available"
)
@mock.patch("grpc_gcp.secure_channel")
-def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel):
+def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel): # pragma: NO COVER
target = "example.com:443"
scopes = ["test_scope"]
@@ -805,6 +921,7 @@
stub = operations_pb2.OperationsStub(channel)
expected_request = operations_pb2.GetOperationRequest(name="meep")
expected_response = operations_pb2.Operation(name="moop")
+ expected_compression = grpc.Compression.NoCompression
expected_metadata = [("red", "blue"), ("two", "shoe")]
expected_credentials = mock.sentinel.credentials
channel.GetOperation.response = expected_response
@@ -812,6 +929,7 @@
response = stub.GetOperation(
expected_request,
timeout=42,
+ compression=expected_compression,
metadata=expected_metadata,
credentials=expected_credentials,
)
@@ -819,7 +937,13 @@
assert response == expected_response
assert channel.requests == [("GetOperation", expected_request)]
assert channel.GetOperation.calls == [
- (expected_request, 42, expected_metadata, expected_credentials)
+ (
+ expected_request,
+ 42,
+ expected_metadata,
+ expected_credentials,
+ expected_compression,
+ )
]
def test_unary_unary(self):
diff --git a/tests/unit/test_iam.py b/tests/unit/test_iam.py
index fbd242e..3de1528 100644
--- a/tests/unit/test_iam.py
+++ b/tests/unit/test_iam.py
@@ -167,14 +167,15 @@
assert policy.owners == expected
def test_owners_setter(self):
- import warnings
from google.api_core.iam import OWNER_ROLE
MEMBER = "user:[email protected]"
expected = set([MEMBER])
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
+ with pytest.warns(
+ DeprecationWarning, match="Assigning to 'owners' is deprecated."
+ ) as warned:
policy.owners = [MEMBER]
(warning,) = warned
@@ -191,14 +192,15 @@
assert policy.editors == expected
def test_editors_setter(self):
- import warnings
from google.api_core.iam import EDITOR_ROLE
MEMBER = "user:[email protected]"
expected = set([MEMBER])
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
+ with pytest.warns(
+ DeprecationWarning, match="Assigning to 'editors' is deprecated."
+ ) as warned:
policy.editors = [MEMBER]
(warning,) = warned
@@ -215,14 +217,15 @@
assert policy.viewers == expected
def test_viewers_setter(self):
- import warnings
from google.api_core.iam import VIEWER_ROLE
MEMBER = "user:[email protected]"
expected = set([MEMBER])
policy = self._make_one()
- with warnings.catch_warnings(record=True) as warned:
+ with pytest.warns(
+ DeprecationWarning, match="Assigning to 'viewers' is deprecated."
+ ) as warned:
policy.viewers = [MEMBER]
(warning,) = warned
@@ -337,12 +340,13 @@
assert policy.to_api_repr() == {}
def test_to_api_repr_binding_w_duplicates(self):
- import warnings
from google.api_core.iam import OWNER_ROLE
OWNER = "group:[email protected]"
policy = self._make_one()
- with warnings.catch_warnings(record=True):
+ with pytest.warns(
+ DeprecationWarning, match="Assigning to 'owners' is deprecated."
+ ):
policy.owners = [OWNER, OWNER]
assert policy.to_api_repr() == {
"bindings": [{"role": OWNER_ROLE, "members": [OWNER]}]
diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py
index 22e23bc..8068072 100644
--- a/tests/unit/test_operation.py
+++ b/tests/unit/test_operation.py
@@ -13,12 +13,13 @@
# limitations under the License.
-import mock
+from unittest import mock
+
import pytest
try:
import grpc # noqa: F401
-except ImportError:
+except ImportError: # pragma: NO COVER
pytest.skip("No GRPC", allow_module_level=True)
from google.api_core import exceptions
diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py
new file mode 100644
index 0000000..8100a49
--- /dev/null
+++ b/tests/unit/test_packaging.py
@@ -0,0 +1,28 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+
+
+def test_namespace_package_compat(tmp_path):
+ # The ``google`` namespace package should not be masked
+ # by the presence of ``google-api-core``.
+ google = tmp_path / "google"
+ google.mkdir()
+ google.joinpath("othermod.py").write_text("")
+ env = dict(os.environ, PYTHONPATH=str(tmp_path))
+ cmd = [sys.executable, "-m", "google.othermod"]
+ subprocess.check_call(cmd, env=env)
diff --git a/tests/unit/test_page_iterator.py b/tests/unit/test_page_iterator.py
index a44e998..560722c 100644
--- a/tests/unit/test_page_iterator.py
+++ b/tests/unit/test_page_iterator.py
@@ -14,8 +14,8 @@
import math
import types
+from unittest import mock
-import mock
import pytest
from google.api_core import page_iterator
@@ -505,7 +505,8 @@
assert list(next(items_iter)) == [
dict(name=str(i))
for i in range(
- ipage * page_size, min((ipage + 1) * page_size, n_results),
+ ipage * page_size,
+ min((ipage + 1) * page_size, n_results),
)
]
else:
diff --git a/tests/unit/test_path_template.py b/tests/unit/test_path_template.py
index 2c5216e..c34dd0f 100644
--- a/tests/unit/test_path_template.py
+++ b/tests/unit/test_path_template.py
@@ -13,10 +13,11 @@
# limitations under the License.
from __future__ import unicode_literals
+from unittest import mock
-import mock
import pytest
+from google.api import auth_pb2
from google.api_core import path_template
@@ -171,113 +172,264 @@
@pytest.mark.parametrize(
- "http_options, request_kwargs, expected_result",
+ "http_options, message, request_kwargs, expected_result",
[
[
[["get", "/v1/no/template", ""]],
+ None,
{"foo": "bar"},
["get", "/v1/no/template", {}, {"foo": "bar"}],
],
+ [
+ [["get", "/v1/no/template", ""]],
+ auth_pb2.AuthenticationRule(selector="bar"),
+ {},
+ [
+ "get",
+ "/v1/no/template",
+ None,
+ auth_pb2.AuthenticationRule(selector="bar"),
+ ],
+ ],
# Single templates
[
[["get", "/v1/{field}", ""]],
+ None,
{"field": "parent"},
["get", "/v1/parent", {}, {}],
],
[
+ [["get", "/v1/{selector}", ""]],
+ auth_pb2.AuthenticationRule(selector="parent"),
+ {},
+ ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
[["get", "/v1/{field.sub}", ""]],
+ None,
{"field": {"sub": "parent"}, "foo": "bar"},
["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
],
+ [
+ [["get", "/v1/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="bar",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"),
+ ),
+ {},
+ [
+ "get",
+ "/v1/parent",
+ None,
+ auth_pb2.AuthenticationRule(
+ selector="bar", oauth=auth_pb2.OAuthRequirements()
+ ),
+ ],
+ ],
],
)
-def test_transcode_base_case(http_options, request_kwargs, expected_result):
+def test_transcode_base_case(http_options, message, request_kwargs, expected_result):
http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, **request_kwargs)
+ result = path_template.transcode(http_options, message, **request_kwargs)
assert result == expected_result
@pytest.mark.parametrize(
- "http_options, request_kwargs, expected_result",
+ "http_options, message, request_kwargs, expected_result",
[
[
[["get", "/v1/{field.subfield}", ""]],
+ None,
{"field": {"subfield": "parent"}, "foo": "bar"},
["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}],
],
[
+ [["get", "/v1/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="bar",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"),
+ ),
+ {},
+ [
+ "get",
+ "/v1/parent",
+ None,
+ auth_pb2.AuthenticationRule(
+ selector="bar", oauth=auth_pb2.OAuthRequirements()
+ ),
+ ],
+ ],
+ [
[["get", "/v1/{field.subfield.subsubfield}", ""]],
+ None,
{"field": {"subfield": {"subsubfield": "parent"}}, "foo": "bar"},
["get", "/v1/parent", {}, {"field": {"subfield": {}}, "foo": "bar"}],
],
[
[["get", "/v1/{field.subfield1}/{field.subfield2}", ""]],
+ None,
{"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"},
["get", "/v1/parent/child", {}, {"field": {}, "foo": "bar"}],
],
+ [
+ [["get", "/v1/{selector}/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="parent",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"),
+ ),
+ {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"},
+ [
+ "get",
+ "/v1/parent/child",
+ None,
+ auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()),
+ ],
+ ],
],
)
-def test_transcode_subfields(http_options, request_kwargs, expected_result):
+def test_transcode_subfields(http_options, message, request_kwargs, expected_result):
http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, **request_kwargs)
+ result = path_template.transcode(http_options, message, **request_kwargs)
assert result == expected_result
@pytest.mark.parametrize(
- "http_options, request_kwargs, expected_result",
+ "http_options, message, request_kwargs, expected_result",
[
# Single segment wildcard
[
[["get", "/v1/{field=*}", ""]],
+ None,
{"field": "parent"},
["get", "/v1/parent", {}, {}],
],
[
+ [["get", "/v1/{selector=*}", ""]],
+ auth_pb2.AuthenticationRule(selector="parent"),
+ {},
+ ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
[["get", "/v1/{field=a/*/b/*}", ""]],
+ None,
{"field": "a/parent/b/child", "foo": "bar"},
["get", "/v1/a/parent/b/child", {}, {"foo": "bar"}],
],
+ [
+ [["get", "/v1/{selector=a/*/b/*}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent/b/child", allow_without_credential=True
+ ),
+ {},
+ [
+ "get",
+ "/v1/a/parent/b/child",
+ None,
+ auth_pb2.AuthenticationRule(allow_without_credential=True),
+ ],
+ ],
# Double segment wildcard
[
[["get", "/v1/{field=**}", ""]],
+ None,
{"field": "parent/p1"},
["get", "/v1/parent/p1", {}, {}],
],
[
+ [["get", "/v1/{selector=**}", ""]],
+ auth_pb2.AuthenticationRule(selector="parent/p1"),
+ {},
+ ["get", "/v1/parent/p1", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
[["get", "/v1/{field=a/**/b/**}", ""]],
+ None,
{"field": "a/parent/p1/b/child/c1", "foo": "bar"},
["get", "/v1/a/parent/p1/b/child/c1", {}, {"foo": "bar"}],
],
+ [
+ [["get", "/v1/{selector=a/**/b/**}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent/p1/b/child/c1", allow_without_credential=True
+ ),
+ {},
+ [
+ "get",
+ "/v1/a/parent/p1/b/child/c1",
+ None,
+ auth_pb2.AuthenticationRule(allow_without_credential=True),
+ ],
+ ],
# Combined single and double segment wildcard
[
[["get", "/v1/{field=a/*/b/**}", ""]],
+ None,
{"field": "a/parent/b/child/c1"},
["get", "/v1/a/parent/b/child/c1", {}, {}],
],
[
+ [["get", "/v1/{selector=a/*/b/**}", ""]],
+ auth_pb2.AuthenticationRule(selector="a/parent/b/child/c1"),
+ {},
+ ["get", "/v1/a/parent/b/child/c1", None, auth_pb2.AuthenticationRule()],
+ ],
+ [
[["get", "/v1/{field=a/**/b/*}/v2/{name}", ""]],
+ None,
{"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"},
["get", "/v1/a/parent/p1/b/child/v2/first", {}, {"foo": "bar"}],
],
+ [
+ [["get", "/v1/{selector=a/**/b/*}/v2/{oauth.canonical_scopes}", ""]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent/p1/b/child",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first"),
+ ),
+ {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"},
+ [
+ "get",
+ "/v1/a/parent/p1/b/child/v2/first",
+ None,
+ auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()),
+ ],
+ ],
],
)
-def test_transcode_with_wildcard(http_options, request_kwargs, expected_result):
+def test_transcode_with_wildcard(
+ http_options, message, request_kwargs, expected_result
+):
http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, **request_kwargs)
+ result = path_template.transcode(http_options, message, **request_kwargs)
assert result == expected_result
@pytest.mark.parametrize(
- "http_options, request_kwargs, expected_result",
+ "http_options, message, request_kwargs, expected_result",
[
# Single field body
[
[["post", "/v1/no/template", "data"]],
+ None,
{"data": {"id": 1, "info": "some info"}, "foo": "bar"},
["post", "/v1/no/template", {"id": 1, "info": "some info"}, {"foo": "bar"}],
],
[
+ [["post", "/v1/no/template", "oauth"]],
+ auth_pb2.AuthenticationRule(
+ selector="bar",
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"),
+ ),
+ {},
+ [
+ "post",
+ "/v1/no/template",
+ auth_pb2.OAuthRequirements(canonical_scopes="child"),
+ auth_pb2.AuthenticationRule(selector="bar"),
+ ],
+ ],
+ [
[["post", "/v1/{field=a/*}/b/{name=**}", "data"]],
+ None,
{
"field": "a/parent",
"name": "first/last",
@@ -291,9 +443,29 @@
{"foo": "bar"},
],
],
+ [
+ [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "oauth"]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ requirements=[auth_pb2.AuthRequirement(provider_id="p")],
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
+ ),
+ {},
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ auth_pb2.OAuthRequirements(),
+ auth_pb2.AuthenticationRule(
+ requirements=[auth_pb2.AuthRequirement(provider_id="p")],
+ allow_without_credential=True,
+ ),
+ ],
+ ],
# Wildcard body
[
[["post", "/v1/{field=a/*}/b/{name=**}", "*"]],
+ None,
{
"field": "a/parent",
"name": "first/last",
@@ -307,16 +479,38 @@
{},
],
],
+ [
+ [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"]],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
+ ),
+ {
+ "field": "a/parent",
+ "name": "first/last",
+ "data": {"id": 1, "info": "some info"},
+ "foo": "bar",
+ },
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ auth_pb2.AuthenticationRule(
+ allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
+ ),
+ auth_pb2.AuthenticationRule(),
+ ],
+ ],
],
)
-def test_transcode_with_body(http_options, request_kwargs, expected_result):
+def test_transcode_with_body(http_options, message, request_kwargs, expected_result):
http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, **request_kwargs)
+ result = path_template.transcode(http_options, message, **request_kwargs)
assert result == expected_result
@pytest.mark.parametrize(
- "http_options, request_kwargs, expected_result",
+ "http_options, message, request_kwargs, expected_result",
[
# Additional bindings
[
@@ -324,6 +518,7 @@
["post", "/v1/{field=a/*}/b/{name=**}", "extra_data"],
["post", "/v1/{field=a/*}/b/{name=**}", "*"],
],
+ None,
{
"field": "a/parent",
"name": "first/last",
@@ -339,35 +534,104 @@
],
[
[
+ [
+ "post",
+ "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}",
+ "extra_data",
+ ],
+ ["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"],
+ ],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"),
+ ),
+ {},
+ [
+ "post",
+ "/v1/a/parent/b/first/last",
+ auth_pb2.AuthenticationRule(
+ allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
+ ),
+ auth_pb2.AuthenticationRule(),
+ ],
+ ],
+ [
+ [
["get", "/v1/{field=a/*}/b/{name=**}", ""],
["get", "/v1/{field=a/*}/b/first/last", ""],
],
+ None,
{"field": "a/parent", "foo": "bar"},
["get", "/v1/a/parent/b/first/last", {}, {"foo": "bar"}],
],
+ [
+ [
+ ["get", "/v1/{selector=a/*}/b/{oauth.allow_without_credential=**}", ""],
+ ["get", "/v1/{selector=a/*}/b/first/last", ""],
+ ],
+ auth_pb2.AuthenticationRule(
+ selector="a/parent",
+ allow_without_credential=True,
+ oauth=auth_pb2.OAuthRequirements(),
+ ),
+ {},
+ [
+ "get",
+ "/v1/a/parent/b/first/last",
+ None,
+ auth_pb2.AuthenticationRule(
+ allow_without_credential=True, oauth=auth_pb2.OAuthRequirements()
+ ),
+ ],
+ ],
],
)
def test_transcode_with_additional_bindings(
- http_options, request_kwargs, expected_result
+ http_options, message, request_kwargs, expected_result
):
http_options, expected_result = helper_test_transcode(http_options, expected_result)
- result = path_template.transcode(http_options, **request_kwargs)
+ result = path_template.transcode(http_options, message, **request_kwargs)
assert result == expected_result
@pytest.mark.parametrize(
- "http_options, request_kwargs",
+ "http_options, message, request_kwargs",
[
- [[["get", "/v1/{name}", ""]], {"foo": "bar"}],
- [[["get", "/v1/{name}", ""]], {"name": "first/last"}],
- [[["get", "/v1/{name=mr/*/*}", ""]], {"name": "first/last"}],
- [[["post", "/v1/{name}", "data"]], {"name": "first/last"}],
+ [[["get", "/v1/{name}", ""]], None, {"foo": "bar"}],
+ [[["get", "/v1/{selector}", ""]], auth_pb2.AuthenticationRule(), {}],
+ [[["get", "/v1/{name}", ""]], auth_pb2.AuthenticationRule(), {}],
+ [[["get", "/v1/{name}", ""]], None, {"name": "first/last"}],
+ [
+ [["get", "/v1/{selector}", ""]],
+ auth_pb2.AuthenticationRule(selector="first/last"),
+ {},
+ ],
+ [[["get", "/v1/{name=mr/*/*}", ""]], None, {"name": "first/last"}],
+ [
+ [["get", "/v1/{selector=mr/*/*}", ""]],
+ auth_pb2.AuthenticationRule(selector="first/last"),
+ {},
+ ],
+ [[["post", "/v1/{name}", "data"]], None, {"name": "first/last"}],
+ [
+ [["post", "/v1/{selector}", "data"]],
+ auth_pb2.AuthenticationRule(selector="first"),
+ {},
+ ],
+ [[["post", "/v1/{first_name}", "data"]], None, {"last_name": "last"}],
+ [
+ [["post", "/v1/{first_name}", ""]],
+ auth_pb2.AuthenticationRule(selector="first"),
+ {},
+ ],
],
)
-def test_transcode_fails(http_options, request_kwargs):
+def test_transcode_fails(http_options, message, request_kwargs):
http_options, _ = helper_test_transcode(http_options, range(4))
- with pytest.raises(ValueError):
- path_template.transcode(http_options, **request_kwargs)
+ with pytest.raises(ValueError) as exc_info:
+ path_template.transcode(http_options, message, **request_kwargs)
+ assert str(exc_info.value).count("URI") == len(http_options)
def helper_test_transcode(http_options_list, expected_result_list):
@@ -385,5 +649,4 @@
}
if expected_result_list[2]:
expected_result["body"] = expected_result_list[2]
-
return (http_options, expected_result)
diff --git a/tests/unit/test_protobuf_helpers.py b/tests/unit/test_protobuf_helpers.py
index 3df45df..5678d3b 100644
--- a/tests/unit/test_protobuf_helpers.py
+++ b/tests/unit/test_protobuf_helpers.py
@@ -12,9 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys
-
import pytest
+import re
from google.api import http_pb2
from google.api_core import protobuf_helpers
@@ -67,7 +66,12 @@
in_message = any_pb2.Any()
in_message.Pack(date_pb2.Date(year=1990))
- with pytest.raises(TypeError):
+ with pytest.raises(
+ TypeError,
+ match=re.escape(
+ "Could not convert `google.type.Date` with underlying type `google.protobuf.any_pb2.Any` to `google.type.TimeOfDay`"
+ ),
+ ):
protobuf_helpers.from_any_pb(timeofday_pb2.TimeOfDay, in_message)
@@ -476,11 +480,6 @@
]
[email protected](
- sys.version_info.major == 2,
- reason="Field names with trailing underscores can only be created"
- "through proto-plus, which is Python 3 only.",
-)
def test_field_mask_ignore_trailing_underscore():
import proto
@@ -496,11 +495,6 @@
]
[email protected](
- sys.version_info.major == 2,
- reason="Field names with trailing underscores can only be created"
- "through proto-plus, which is Python 3 only.",
-)
def test_field_mask_ignore_trailing_underscore_with_nesting():
import proto
diff --git a/tests/unit/test_rest_helpers.py b/tests/unit/test_rest_helpers.py
index 5932fa5..ff1a43f 100644
--- a/tests/unit/test_rest_helpers.py
+++ b/tests/unit/test_rest_helpers.py
@@ -36,9 +36,26 @@
def test_flatten_simple_dict():
- assert rest_helpers.flatten_query_params({"a": "abc", "b": "def"}) == [
+ obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76}
+ assert rest_helpers.flatten_query_params(obj) == [
("a", "abc"),
("b", "def"),
+ ("c", True),
+ ("d", False),
+ ("e", 10),
+ ("f", -3.76),
+ ]
+
+
+def test_flatten_simple_dict_strict():
+ obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76}
+ assert rest_helpers.flatten_query_params(obj, strict=True) == [
+ ("a", "abc"),
+ ("b", "def"),
+ ("c", "true"),
+ ("d", "false"),
+ ("e", "10"),
+ ("f", "-3.76"),
]
diff --git a/tests/unit/test_rest_streaming.py b/tests/unit/test_rest_streaming.py
new file mode 100644
index 0000000..0f998df
--- /dev/null
+++ b/tests/unit/test_rest_streaming.py
@@ -0,0 +1,296 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import logging
+import random
+import time
+from typing import List
+from unittest.mock import patch
+
+import proto
+import pytest
+import requests
+
+from google.api_core import rest_streaming
+from google.api import http_pb2
+from google.api import httpbody_pb2
+
+from ..helpers import Composer, Song, EchoResponse, parse_responses
+
+
+__protobuf__ = proto.module(package=__name__)
+SEED = int(time.time())
+logging.info(f"Starting sync rest streaming tests with random seed: {SEED}")
+random.seed(SEED)
+
+
+class ResponseMock(requests.Response):
+ class _ResponseItr:
+ def __init__(self, _response_bytes: bytes, random_split=False):
+ self._responses_bytes = _response_bytes
+ self._i = 0
+ self._random_split = random_split
+
+ def __next__(self):
+ if self._i == len(self._responses_bytes):
+ raise StopIteration
+ if self._random_split:
+ n = random.randint(1, len(self._responses_bytes[self._i :]))
+ else:
+ n = 1
+ x = self._responses_bytes[self._i : self._i + n]
+ self._i += n
+ return x.decode("utf-8")
+
+ def __init__(
+ self,
+ responses: List[proto.Message],
+ response_cls,
+ random_split=False,
+ ):
+ super().__init__()
+ self._responses = responses
+ self._random_split = random_split
+ self._response_message_cls = response_cls
+
+ def _parse_responses(self):
+ return parse_responses(self._response_message_cls, self._responses)
+
+ def close(self):
+ raise NotImplementedError()
+
+ def iter_content(self, *args, **kwargs):
+ return self._ResponseItr(
+ self._parse_responses(),
+ random_split=self._random_split,
+ )
+
+
[email protected](
+ "random_split,resp_message_is_proto_plus",
+ [(False, True), (False, False)],
+)
+def test_next_simple(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = EchoResponse
+ responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")]
+ else:
+ response_type = httpbody_pb2.HttpBody
+ responses = [
+ httpbody_pb2.HttpBody(content_type="hello world"),
+ httpbody_pb2.HttpBody(content_type="yes"),
+ ]
+
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ assert list(itr) == responses
+
+
[email protected](
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+def test_next_nested(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = Song
+ responses = [
+ Song(title="some song", composer=Composer(given_name="some name")),
+ Song(title="another song", date_added=datetime.datetime(2021, 12, 17)),
+ ]
+ else:
+ # Although `http_pb2.HttpRule`` is used in the response, any response message
+ # can be used which meets this criteria for the test of having a nested field.
+ response_type = http_pb2.HttpRule
+ responses = [
+ http_pb2.HttpRule(
+ selector="some selector",
+ custom=http_pb2.CustomHttpPattern(kind="some kind"),
+ ),
+ http_pb2.HttpRule(
+ selector="another selector",
+ custom=http_pb2.CustomHttpPattern(path="some path"),
+ ),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ assert list(itr) == responses
+
+
[email protected](
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+def test_next_stress(random_split, resp_message_is_proto_plus):
+ n = 50
+ if resp_message_is_proto_plus:
+ response_type = Song
+ responses = [
+ Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i))
+ for i in range(n)
+ ]
+ else:
+ response_type = http_pb2.HttpRule
+ responses = [
+ http_pb2.HttpRule(
+ selector="selector_%d" % i,
+ custom=http_pb2.CustomHttpPattern(path="path_%d" % i),
+ )
+ for i in range(n)
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ assert list(itr) == responses
+
+
[email protected](
+ "random_split,resp_message_is_proto_plus",
+ [
+ (True, True),
+ (False, True),
+ (True, False),
+ (False, False),
+ ],
+)
+def test_next_escaped_characters_in_string(random_split, resp_message_is_proto_plus):
+ if resp_message_is_proto_plus:
+ response_type = Song
+ composer_with_relateds = Composer()
+ relateds = ["Artist A", "Artist B"]
+ composer_with_relateds.relateds = relateds
+
+ responses = [
+ Song(
+ title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n")
+ ),
+ Song(
+ title='{"this is weird": "totally"}',
+ composer=Composer(given_name="\\{}\\"),
+ ),
+ Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds),
+ ]
+ else:
+ response_type = http_pb2.Http
+ responses = [
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='ti"tle\nfoo\tbar{}',
+ custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"),
+ )
+ ]
+ ),
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='{"this is weird": "totally"}',
+ custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
+ )
+ ]
+ ),
+ http_pb2.Http(
+ rules=[
+ http_pb2.HttpRule(
+ selector='\\{"key": ["value",]}\\',
+ custom=http_pb2.CustomHttpPattern(kind="\\{}\\"),
+ )
+ ]
+ ),
+ ]
+ resp = ResponseMock(
+ responses=responses, random_split=random_split, response_cls=response_type
+ )
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ assert list(itr) == responses
+
+
[email protected]("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+def test_next_not_array(response_type):
+ with patch.object(
+ ResponseMock, "iter_content", return_value=iter('{"hello": 0}')
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ next(itr)
+ mock_method.assert_called_once()
+
+
[email protected]("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+def test_cancel(response_type):
+ with patch.object(ResponseMock, "close", return_value=None) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ itr.cancel()
+ mock_method.assert_called_once()
+
+
[email protected](
+ "response_type,return_value",
+ [
+ (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")),
+ (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")),
+ ],
+)
+def test_check_buffer(response_type, return_value):
+ with patch.object(
+ ResponseMock,
+ "_parse_responses",
+ return_value=return_value,
+ ):
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ next(itr)
+ next(itr)
+
+
[email protected]("response_type", [EchoResponse, httpbody_pb2.HttpBody])
+def test_next_html(response_type):
+ with patch.object(
+ ResponseMock, "iter_content", return_value=iter("<!DOCTYPE html><html></html>")
+ ) as mock_method:
+ resp = ResponseMock(responses=[], response_cls=response_type)
+ itr = rest_streaming.ResponseIterator(resp, response_type)
+ with pytest.raises(ValueError):
+ next(itr)
+ mock_method.assert_called_once()
+
+
+def test_invalid_response_class():
+ class SomeClass:
+ pass
+
+ resp = ResponseMock(responses=[], response_cls=SomeClass)
+ with pytest.raises(
+ ValueError,
+ match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message",
+ ):
+ rest_streaming.ResponseIterator(resp, SomeClass)
diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py
deleted file mode 100644
index 199ca55..0000000
--- a/tests/unit/test_retry.py
+++ /dev/null
@@ -1,458 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import datetime
-import itertools
-import re
-
-import mock
-import pytest
-import requests.exceptions
-
-from google.api_core import exceptions
-from google.api_core import retry
-from google.auth import exceptions as auth_exceptions
-
-
-def test_if_exception_type():
- predicate = retry.if_exception_type(ValueError)
-
- assert predicate(ValueError())
- assert not predicate(TypeError())
-
-
-def test_if_exception_type_multiple():
- predicate = retry.if_exception_type(ValueError, TypeError)
-
- assert predicate(ValueError())
- assert predicate(TypeError())
- assert not predicate(RuntimeError())
-
-
-def test_if_transient_error():
- assert retry.if_transient_error(exceptions.InternalServerError(""))
- assert retry.if_transient_error(exceptions.TooManyRequests(""))
- assert retry.if_transient_error(exceptions.ServiceUnavailable(""))
- assert retry.if_transient_error(requests.exceptions.ConnectionError(""))
- assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError(""))
- assert retry.if_transient_error(auth_exceptions.TransportError(""))
- assert not retry.if_transient_error(exceptions.InvalidArgument(""))
-
-
-# Make uniform return half of its maximum, which will be the calculated
-# sleep time.
[email protected]("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
-def test_exponential_sleep_generator_base_2(uniform):
- gen = retry.exponential_sleep_generator(1, 60, multiplier=2)
-
- result = list(itertools.islice(gen, 8))
- assert result == [1, 2, 4, 8, 16, 32, 60, 60]
-
-
[email protected]("time.sleep", autospec=True)
[email protected](
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-def test_retry_target_success(utcnow, sleep):
- predicate = retry.if_exception_type(ValueError)
- call_count = [0]
-
- def target():
- call_count[0] += 1
- if call_count[0] < 3:
- raise ValueError()
- return 42
-
- result = retry.retry_target(target, predicate, range(10), None)
-
- assert result == 42
- assert call_count[0] == 3
- sleep.assert_has_calls([mock.call(0), mock.call(1)])
-
-
[email protected]("time.sleep", autospec=True)
[email protected](
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-def test_retry_target_w_on_error(utcnow, sleep):
- predicate = retry.if_exception_type(ValueError)
- call_count = {"target": 0}
- to_raise = ValueError()
-
- def target():
- call_count["target"] += 1
- if call_count["target"] < 3:
- raise to_raise
- return 42
-
- on_error = mock.Mock()
-
- result = retry.retry_target(target, predicate, range(10), None, on_error=on_error)
-
- assert result == 42
- assert call_count["target"] == 3
-
- on_error.assert_has_calls([mock.call(to_raise), mock.call(to_raise)])
- sleep.assert_has_calls([mock.call(0), mock.call(1)])
-
-
[email protected]("time.sleep", autospec=True)
[email protected](
- "google.api_core.datetime_helpers.utcnow",
- return_value=datetime.datetime.min,
- autospec=True,
-)
-def test_retry_target_non_retryable_error(utcnow, sleep):
- predicate = retry.if_exception_type(ValueError)
- exception = TypeError()
- target = mock.Mock(side_effect=exception)
-
- with pytest.raises(TypeError) as exc_info:
- retry.retry_target(target, predicate, range(10), None)
-
- assert exc_info.value == exception
- sleep.assert_not_called()
-
-
[email protected]("time.sleep", autospec=True)
[email protected]("google.api_core.datetime_helpers.utcnow", autospec=True)
-def test_retry_target_deadline_exceeded(utcnow, sleep):
- predicate = retry.if_exception_type(ValueError)
- exception = ValueError("meep")
- target = mock.Mock(side_effect=exception)
- # Setup the timeline so that the first call takes 5 seconds but the second
- # call takes 6, which puts the retry over the deadline.
- utcnow.side_effect = [
- # The first call to utcnow establishes the start of the timeline.
- datetime.datetime.min,
- datetime.datetime.min + datetime.timedelta(seconds=5),
- datetime.datetime.min + datetime.timedelta(seconds=11),
- ]
-
- with pytest.raises(exceptions.RetryError) as exc_info:
- retry.retry_target(target, predicate, range(10), deadline=10)
-
- assert exc_info.value.cause == exception
- assert exc_info.match("Deadline of 10.0s exceeded")
- assert exc_info.match("last exception: meep")
- assert target.call_count == 2
-
-
-def test_retry_target_bad_sleep_generator():
- with pytest.raises(ValueError, match="Sleep generator"):
- retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None)
-
-
-class TestRetry(object):
- def test_constructor_defaults(self):
- retry_ = retry.Retry()
- assert retry_._predicate == retry.if_transient_error
- assert retry_._initial == 1
- assert retry_._maximum == 60
- assert retry_._multiplier == 2
- assert retry_._deadline == 120
- assert retry_._on_error is None
- assert retry_.deadline == 120
-
- def test_constructor_options(self):
- _some_function = mock.Mock()
-
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=_some_function,
- )
- assert retry_._predicate == mock.sentinel.predicate
- assert retry_._initial == 1
- assert retry_._maximum == 2
- assert retry_._multiplier == 3
- assert retry_._deadline == 4
- assert retry_._on_error is _some_function
-
- def test_with_deadline(self):
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_deadline(42)
- assert retry_ is not new_retry
- assert new_retry._deadline == 42
-
- # the rest of the attributes should remain the same
- assert new_retry._predicate is retry_._predicate
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_predicate(self):
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_predicate(mock.sentinel.predicate)
- assert retry_ is not new_retry
- assert new_retry._predicate == mock.sentinel.predicate
-
- # the rest of the attributes should remain the same
- assert new_retry._deadline == retry_._deadline
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
- assert new_retry._on_error is retry_._on_error
-
- def test_with_delay_noop(self):
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay()
- assert retry_ is not new_retry
- assert new_retry._initial == retry_._initial
- assert new_retry._maximum == retry_._maximum
- assert new_retry._multiplier == retry_._multiplier
-
- def test_with_delay(self):
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay(initial=5, maximum=6, multiplier=7)
- assert retry_ is not new_retry
- assert new_retry._initial == 5
- assert new_retry._maximum == 6
- assert new_retry._multiplier == 7
-
- # the rest of the attributes should remain the same
- assert new_retry._deadline == retry_._deadline
- assert new_retry._predicate is retry_._predicate
- assert new_retry._on_error is retry_._on_error
-
- def test_with_delay_partial_options(self):
- retry_ = retry.Retry(
- predicate=mock.sentinel.predicate,
- initial=1,
- maximum=2,
- multiplier=3,
- deadline=4,
- on_error=mock.sentinel.on_error,
- )
- new_retry = retry_.with_delay(initial=4)
- assert retry_ is not new_retry
- assert new_retry._initial == 4
- assert new_retry._maximum == 2
- assert new_retry._multiplier == 3
-
- new_retry = retry_.with_delay(maximum=4)
- assert retry_ is not new_retry
- assert new_retry._initial == 1
- assert new_retry._maximum == 4
- assert new_retry._multiplier == 3
-
- new_retry = retry_.with_delay(multiplier=4)
- assert retry_ is not new_retry
- assert new_retry._initial == 1
- assert new_retry._maximum == 2
- assert new_retry._multiplier == 4
-
- # the rest of the attributes should remain the same
- assert new_retry._deadline == retry_._deadline
- assert new_retry._predicate is retry_._predicate
- assert new_retry._on_error is retry_._on_error
-
- def test___str__(self):
- def if_exception_type(exc):
- return bool(exc) # pragma: NO COVER
-
- # Explicitly set all attributes as changed Retry defaults should not
- # cause this test to start failing.
- retry_ = retry.Retry(
- predicate=if_exception_type,
- initial=1.0,
- maximum=60.0,
- multiplier=2.0,
- deadline=120.0,
- on_error=None,
- )
- assert re.match(
- (
- r"<Retry predicate=<function.*?if_exception_type.*?>, "
- r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, "
- r"on_error=None>"
- ),
- str(retry_),
- )
-
- @mock.patch("time.sleep", autospec=True)
- def test___call___and_execute_success(self, sleep):
- retry_ = retry.Retry()
- target = mock.Mock(spec=["__call__"], return_value=42)
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- decorated = retry_(target)
- target.assert_not_called()
-
- result = decorated("meep")
-
- assert result == 42
- target.assert_called_once_with("meep")
- sleep.assert_not_called()
-
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
- @mock.patch("time.sleep", autospec=True)
- def test___call___and_execute_retry(self, sleep, uniform):
-
- on_error = mock.Mock(spec=["__call__"], side_effect=[None])
- retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError))
-
- target = mock.Mock(spec=["__call__"], side_effect=[ValueError(), 42])
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- decorated = retry_(target, on_error=on_error)
- target.assert_not_called()
-
- result = decorated("meep")
-
- assert result == 42
- assert target.call_count == 2
- target.assert_has_calls([mock.call("meep"), mock.call("meep")])
- sleep.assert_called_once_with(retry_._initial)
- assert on_error.call_count == 1
-
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
- @mock.patch("time.sleep", autospec=True)
- def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform):
-
- on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10)
- retry_ = retry.Retry(
- predicate=retry.if_exception_type(ValueError),
- initial=1.0,
- maximum=1024.0,
- multiplier=2.0,
- deadline=9.9,
- )
-
- utcnow = datetime.datetime.utcnow()
- utcnow_patcher = mock.patch(
- "google.api_core.datetime_helpers.utcnow", return_value=utcnow
- )
-
- target = mock.Mock(spec=["__call__"], side_effect=[ValueError()] * 10)
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- decorated = retry_(target, on_error=on_error)
- target.assert_not_called()
-
- with utcnow_patcher as patched_utcnow:
- # Make sure that calls to fake time.sleep() also advance the mocked
- # time clock.
- def increase_time(sleep_delay):
- patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay)
-
- sleep.side_effect = increase_time
-
- with pytest.raises(exceptions.RetryError):
- decorated("meep")
-
- assert target.call_count == 5
- target.assert_has_calls([mock.call("meep")] * 5)
- assert on_error.call_count == 5
-
- # check the delays
- assert sleep.call_count == 4 # once between each successive target calls
- last_wait = sleep.call_args.args[0]
- total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list)
-
- assert last_wait == 2.9 # and not 8.0, because the last delay was shortened
- assert total_wait == 9.9 # the same as the deadline
-
- @mock.patch("time.sleep", autospec=True)
- def test___init___without_retry_executed(self, sleep):
- _some_function = mock.Mock()
-
- retry_ = retry.Retry(
- predicate=retry.if_exception_type(ValueError), on_error=_some_function
- )
- # check the proper creation of the class
- assert retry_._on_error is _some_function
-
- target = mock.Mock(spec=["__call__"], side_effect=[42])
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- wrapped = retry_(target)
-
- result = wrapped("meep")
-
- assert result == 42
- target.assert_called_once_with("meep")
- sleep.assert_not_called()
- _some_function.assert_not_called()
-
- # Make uniform return half of its maximum, which is the calculated sleep time.
- @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0)
- @mock.patch("time.sleep", autospec=True)
- def test___init___when_retry_is_executed(self, sleep, uniform):
- _some_function = mock.Mock()
-
- retry_ = retry.Retry(
- predicate=retry.if_exception_type(ValueError), on_error=_some_function
- )
- # check the proper creation of the class
- assert retry_._on_error is _some_function
-
- target = mock.Mock(
- spec=["__call__"], side_effect=[ValueError(), ValueError(), 42]
- )
- # __name__ is needed by functools.partial.
- target.__name__ = "target"
-
- wrapped = retry_(target)
- target.assert_not_called()
-
- result = wrapped("meep")
-
- assert result == 42
- assert target.call_count == 3
- assert _some_function.call_count == 2
- target.assert_has_calls([mock.call("meep"), mock.call("meep")])
- sleep.assert_any_call(retry_._initial)
diff --git a/tests/unit/test_timeout.py b/tests/unit/test_timeout.py
index 30d624e..2c20202 100644
--- a/tests/unit/test_timeout.py
+++ b/tests/unit/test_timeout.py
@@ -14,14 +14,13 @@
import datetime
import itertools
+from unittest import mock
-import mock
-
-from google.api_core import timeout
+from google.api_core import timeout as timeouts
def test__exponential_timeout_generator_base_2():
- gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None)
+ gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None)
result = list(itertools.islice(gen, 8))
assert result == [1, 2, 4, 8, 16, 32, 60, 60]
@@ -34,7 +33,7 @@
datetime.datetime.min + datetime.timedelta(seconds=n) for n in range(15)
]
- gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0)
+ gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0)
result = list(itertools.islice(gen, 14))
# Should grow until the cumulative time is > 30s, then start decreasing as
@@ -42,22 +41,105 @@
assert result == [1, 2, 4, 8, 16, 24, 23, 22, 21, 20, 19, 18, 17, 16]
-class TestConstantTimeout(object):
+class TestTimeToDeadlineTimeout(object):
def test_constructor(self):
- timeout_ = timeout.ConstantTimeout()
+ timeout_ = timeouts.TimeToDeadlineTimeout()
assert timeout_._timeout is None
def test_constructor_args(self):
- timeout_ = timeout.ConstantTimeout(42.0)
+ timeout_ = timeouts.TimeToDeadlineTimeout(42.0)
assert timeout_._timeout == 42.0
def test___str__(self):
- timeout_ = timeout.ConstantTimeout(1)
+ timeout_ = timeouts.TimeToDeadlineTimeout(1)
+ assert str(timeout_) == "<TimeToDeadlineTimeout timeout=1.0>"
+
+ def test_apply(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+
+ datetime.datetime.now(tz=datetime.timezone.utc)
+ datetime.timedelta(seconds=1)
+
+ now = datetime.datetime.now(tz=datetime.timezone.utc)
+
+ times = [
+ now,
+ now + datetime.timedelta(seconds=0.0009),
+ now + datetime.timedelta(seconds=1),
+ now + datetime.timedelta(seconds=39),
+ now + datetime.timedelta(seconds=42),
+ now + datetime.timedelta(seconds=43),
+ ]
+
+ def _clock():
+ return times.pop(0)
+
+ timeout_ = timeouts.TimeToDeadlineTimeout(42.0, _clock)
+ wrapped = timeout_(target)
+
+ wrapped()
+ target.assert_called_with(timeout=42.0)
+ wrapped()
+ target.assert_called_with(timeout=41.0)
+ wrapped()
+ target.assert_called_with(timeout=3.0)
+ wrapped()
+ target.assert_called_with(timeout=42.0)
+ wrapped()
+ target.assert_called_with(timeout=42.0)
+
+ def test_apply_no_timeout(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+
+ datetime.datetime.now(tz=datetime.timezone.utc)
+ datetime.timedelta(seconds=1)
+
+ now = datetime.datetime.now(tz=datetime.timezone.utc)
+
+ times = [
+ now,
+ now + datetime.timedelta(seconds=0.0009),
+ now + datetime.timedelta(seconds=1),
+ now + datetime.timedelta(seconds=2),
+ ]
+
+ def _clock():
+ return times.pop(0)
+
+ timeout_ = timeouts.TimeToDeadlineTimeout(clock=_clock)
+ wrapped = timeout_(target)
+
+ wrapped()
+ target.assert_called_with()
+ wrapped()
+ target.assert_called_with()
+
+ def test_apply_passthrough(self):
+ target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
+ timeout_ = timeouts.TimeToDeadlineTimeout(42.0)
+ wrapped = timeout_(target)
+
+ wrapped(1, 2, meep="moop")
+
+ target.assert_called_once_with(1, 2, meep="moop", timeout=42.0)
+
+
+class TestConstantTimeout(object):
+ def test_constructor(self):
+ timeout_ = timeouts.ConstantTimeout()
+ assert timeout_._timeout is None
+
+ def test_constructor_args(self):
+ timeout_ = timeouts.ConstantTimeout(42.0)
+ assert timeout_._timeout == 42.0
+
+ def test___str__(self):
+ timeout_ = timeouts.ConstantTimeout(1)
assert str(timeout_) == "<ConstantTimeout timeout=1.0>"
def test_apply(self):
target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeout.ConstantTimeout(42.0)
+ timeout_ = timeouts.ConstantTimeout(42.0)
wrapped = timeout_(target)
wrapped()
@@ -66,7 +148,7 @@
def test_apply_passthrough(self):
target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeout.ConstantTimeout(42.0)
+ timeout_ = timeouts.ConstantTimeout(42.0)
wrapped = timeout_(target)
wrapped(1, 2, meep="moop")
@@ -76,30 +158,30 @@
class TestExponentialTimeout(object):
def test_constructor(self):
- timeout_ = timeout.ExponentialTimeout()
- assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT
- assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT
- assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER
- assert timeout_._deadline == timeout._DEFAULT_DEADLINE
+ timeout_ = timeouts.ExponentialTimeout()
+ assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT
+ assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT
+ assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER
+ assert timeout_._deadline == timeouts._DEFAULT_DEADLINE
def test_constructor_args(self):
- timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4)
+ timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4)
assert timeout_._initial == 1
assert timeout_._maximum == 2
assert timeout_._multiplier == 3
assert timeout_._deadline == 4
def test_with_timeout(self):
- original_timeout = timeout.ExponentialTimeout()
+ original_timeout = timeouts.ExponentialTimeout()
timeout_ = original_timeout.with_deadline(42)
assert original_timeout is not timeout_
- assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT
- assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT
- assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER
+ assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT
+ assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT
+ assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER
assert timeout_._deadline == 42
def test___str__(self):
- timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4)
+ timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4)
assert str(timeout_) == (
"<ExponentialTimeout initial=1.0, maximum=2.0, multiplier=3.0, "
"deadline=4.0>"
@@ -107,7 +189,7 @@
def test_apply(self):
target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeout.ExponentialTimeout(1, 10, 2)
+ timeout_ = timeouts.ExponentialTimeout(1, 10, 2)
wrapped = timeout_(target)
wrapped()
@@ -121,7 +203,7 @@
def test_apply_passthrough(self):
target = mock.Mock(spec=["__call__", "__name__"], __name__="target")
- timeout_ = timeout.ExponentialTimeout(42.0, 100, 2)
+ timeout_ = timeouts.ExponentialTimeout(42.0, 100, 2)
wrapped = timeout_(target)
wrapped(1, 2, meep="moop")
diff --git a/tests/unit/test_universe.py b/tests/unit/test_universe.py
new file mode 100644
index 0000000..214e00a
--- /dev/null
+++ b/tests/unit/test_universe.py
@@ -0,0 +1,63 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+from google.api_core import universe
+
+
+class _Fake_Credentials:
+ def __init__(self, universe_domain=None):
+ if universe_domain:
+ self.universe_domain = universe_domain
+
+
+def test_determine_domain():
+ domain_client = "foo.com"
+ domain_env = "bar.com"
+
+ assert universe.determine_domain(domain_client, domain_env) == domain_client
+ assert universe.determine_domain(None, domain_env) == domain_env
+ assert universe.determine_domain(domain_client, None) == domain_client
+ assert universe.determine_domain(None, None) == universe.DEFAULT_UNIVERSE
+
+ with pytest.raises(universe.EmptyUniverseError):
+ universe.determine_domain("", None)
+
+ with pytest.raises(universe.EmptyUniverseError):
+ universe.determine_domain(None, "")
+
+
+def test_compare_domains():
+ fake_domain = "foo.com"
+ another_fake_domain = "bar.com"
+
+ assert universe.compare_domains(universe.DEFAULT_UNIVERSE, _Fake_Credentials())
+ assert universe.compare_domains(fake_domain, _Fake_Credentials(fake_domain))
+
+ with pytest.raises(universe.UniverseMismatchError) as excinfo:
+ universe.compare_domains(
+ universe.DEFAULT_UNIVERSE, _Fake_Credentials(fake_domain)
+ )
+ assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0
+ assert str(excinfo.value).find(fake_domain) >= 0
+
+ with pytest.raises(universe.UniverseMismatchError) as excinfo:
+ universe.compare_domains(fake_domain, _Fake_Credentials())
+ assert str(excinfo.value).find(fake_domain) >= 0
+ assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0
+
+ with pytest.raises(universe.UniverseMismatchError) as excinfo:
+ universe.compare_domains(fake_domain, _Fake_Credentials(another_fake_domain))
+ assert str(excinfo.value).find(fake_domain) >= 0
+ assert str(excinfo.value).find(another_fake_domain) >= 0
diff --git a/tests/unit/test_version_header.py b/tests/unit/test_version_header.py
new file mode 100644
index 0000000..ea7028e
--- /dev/null
+++ b/tests/unit/test_version_header.py
@@ -0,0 +1,23 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from google.api_core import version_header
+
+
[email protected]("version_identifier", ["some_value", ""])
+def test_to_api_version_header(version_identifier):
+ value = version_header.to_api_version_header(version_identifier)
+ assert value == (version_header.API_VERSION_METADATA_KEY, version_identifier)