Merge remote-tracking branch 'origin/upstream' am: aedd517eda
Original change: undetermined
Change-Id: I985e4c20f19561dfa6aee8a768061cdf01696ffd
Signed-off-by: Automerger Merge Worker <[email protected]>
diff --git a/.editorconfig b/.editorconfig
new file mode 100644
index 0000000..26fb670
--- /dev/null
+++ b/.editorconfig
@@ -0,0 +1,12 @@
+root = true
+
+[*.{py,pyi,rst,md,yml,yaml,toml,json}]
+trim_trailing_whitespace = true
+insert_final_newline = true
+indent_style = space
+
+[*.{py,pyi,toml,json}]
+indent_size = 4
+
+[*.{yml,yaml}]
+indent_size = 2
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..5c56314
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,10 @@
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/"
+ schedule:
+ interval: monthly
+ groups:
+ actions:
+ patterns:
+ - "*"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644
index 0000000..9f06280
--- /dev/null
+++ b/.github/workflows/ci.yml
@@ -0,0 +1,135 @@
+name: Test and lint
+
+on:
+ schedule:
+ - cron: "0 2 * * *" # 2am UTC
+ push:
+ branches:
+ - main
+ pull_request:
+ workflow_dispatch:
+
+permissions:
+ contents: read
+
+env:
+ FORCE_COLOR: 1
+ PIP_DISABLE_PIP_VERSION_CHECK: 1
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+jobs:
+ tests:
+ name: Run tests
+
+ if: >-
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
+ ${{
+ github.event_name != 'schedule'
+ || (
+ github.repository == 'python/typing_extensions'
+ && github.event_name == 'schedule'
+ )
+ }}
+
+ strategy:
+ fail-fast: false
+ matrix:
+ # We try to test on the earliest available bugfix release of each
+ # Python version, because typing sometimes changed between bugfix releases.
+ # For available versions, see:
+ # https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
+ python-version:
+ - "3.8"
+ - "3.8.0"
+ - "3.9"
+ - "3.9.0"
+ - "3.10"
+ - "3.10.0"
+ - "3.11"
+ - "3.11.0"
+ - "3.12"
+ - "3.13"
+ - "pypy3.8"
+ - "pypy3.9"
+ - "pypy3.10"
+
+ runs-on: ubuntu-20.04
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ allow-prereleases: true
+
+ - name: Test typing_extensions
+ run: |
+ # Be wary of running `pip install` here, since it becomes easy for us to
+ # accidentally pick up typing_extensions as installed by a dependency
+ cd src
+ python -m unittest test_typing_extensions.py
+
+ - name: Test CPython typing test suite
+ # Test suite fails on PyPy even without typing_extensions
+ if: ${{ !startsWith(matrix.python-version, 'pypy') }}
+ run: |
+ cd src
+ # Run the typing test suite from CPython with typing_extensions installed,
+ # because we monkeypatch typing under some circumstances.
+ python -c 'import typing_extensions; import test.__main__' test_typing -v
+
+ linting:
+ name: Lint
+
+ # no reason to run this as a cron job
+ if: github.event_name != 'schedule'
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3"
+ cache: "pip"
+ cache-dependency-path: "test-requirements.txt"
+ - name: Install dependencies
+ run: pip install -r test-requirements.txt
+ - name: Lint implementation
+ run: ruff check
+
+ create-issue-on-failure:
+ name: Create an issue if daily tests failed
+ runs-on: ubuntu-latest
+
+ needs: [tests]
+
+ if: >-
+ ${{
+ github.repository == 'python/typing_extensions'
+ && always()
+ && github.event_name == 'schedule'
+ && needs.tests.result == 'failure'
+ }}
+
+ permissions:
+ issues: write
+
+ steps:
+ - uses: actions/github-script@v7
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ await github.rest.issues.create({
+ owner: "python",
+ repo: "typing_extensions",
+ title: `Daily tests failed on ${new Date().toDateString()}`,
+ body: "Runs listed here: https://github.com/python/typing_extensions/actions/workflows/ci.yml",
+ })
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 0000000..4770472
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,149 @@
+# Based on
+# https://packaging.python.org/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/
+
+name: Test builds and publish Python distribution to PyPI
+
+on:
+ release:
+ types: [published]
+ push:
+ branches: [main]
+ pull_request:
+
+permissions:
+ contents: read
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+jobs:
+ build:
+ name: Build distribution
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Check package metadata
+ run: python scripts/check_package.py ${{ github.ref }}
+ - name: Install pypa/build
+ run: |
+ # Be wary of running `pip install` here, since it becomes easy for us to
+ # accidentally pick up typing_extensions as installed by a dependency
+ python -m pip install --upgrade build
+ python -m pip list
+ - name: Build a binary wheel and a source tarball
+ run: python -m build
+ - name: Store the distribution packages
+ uses: actions/upload-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+
+ test-wheel:
+ name: Test wheel
+ needs:
+ - build
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Download all the dists
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Install wheel
+ run: |
+ export path_to_file=$(find dist -type f -name "typing_extensions-*.whl")
+ echo "::notice::Installing wheel: $path_to_file"
+ python -m pip install --user $path_to_file
+ python -m pip list
+ - name: Run typing_extensions tests against installed package
+ run: rm src/typing_extensions.py && python src/test_typing_extensions.py
+
+ test-sdist:
+ name: Test source distribution
+ needs:
+ - build
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Download all the dists
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Unpack and test source distribution
+ run: |
+ export path_to_file=$(find dist -type f -name "typing_extensions-*.tar.gz")
+ echo "::notice::Unpacking source distribution: $path_to_file"
+ tar xzf $path_to_file -C dist/
+ cd ${path_to_file%.tar.gz}/src
+ python test_typing_extensions.py
+
+ test-sdist-installed:
+ name: Test installed source distribution
+ needs:
+ - build
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Download all the dists
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Install source distribution
+ run: |
+ export path_to_file=$(find dist -type f -name "typing_extensions-*.tar.gz")
+ echo "::notice::Installing source distribution: $path_to_file"
+ python -m pip install --user $path_to_file
+ python -m pip list
+ - name: Run typing_extensions tests against installed package
+ run: rm src/typing_extensions.py && python src/test_typing_extensions.py
+
+ publish-to-pypi:
+ name: >-
+ Publish Python distribution to PyPI
+ if: github.event_name == 'release' # only publish to PyPI on releases
+ needs:
+ - test-sdist
+ - test-sdist-installed
+ - test-wheel
+ - build
+ runs-on: ubuntu-latest
+ environment:
+ name: publish
+ url: https://pypi.org/p/typing-extensions
+ permissions:
+ id-token: write # IMPORTANT: mandatory for trusted publishing
+
+ steps:
+ - name: Download all the dists
+ uses: actions/download-artifact@v4
+ with:
+ name: python-package-distributions
+ path: dist/
+ - name: Ensure exactly one sdist and one wheel have been downloaded
+ run: test $(ls dist/*.tar.gz | wc -l) = 1 && test $(ls dist/*.whl | wc -l) = 1
+ - name: Publish distribution to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
diff --git a/.github/workflows/third_party.yml b/.github/workflows/third_party.yml
new file mode 100644
index 0000000..8424d8f
--- /dev/null
+++ b/.github/workflows/third_party.yml
@@ -0,0 +1,409 @@
+# This workflow is a daily cron job,
+# running the tests of various third-party libraries that use us.
+# This helps us spot regressions early,
+# and helps flag when third-party libraries are making incorrect assumptions
+# that might cause them to break when we cut a new release.
+
+name: Third-party tests
+
+on:
+ schedule:
+ - cron: "30 2 * * *" # 02:30 UTC
+ pull_request:
+ paths:
+ - ".github/workflows/third_party.yml"
+ workflow_dispatch:
+
+permissions:
+ contents: read
+
+env:
+ PIP_DISABLE_PIP_VERSION_CHECK: 1
+ FORCE_COLOR: 1
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+jobs:
+ pydantic:
+ name: pydantic tests
+ if: >-
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
+ ${{
+ github.event_name != 'schedule'
+ || (
+ github.repository == 'python/typing_extensions'
+ && github.event_name == 'schedule'
+ )
+ }}
+ strategy:
+ fail-fast: false
+ matrix:
+ # PyPy is deliberately omitted here,
+ # since pydantic's tests intermittently segfault on PyPy,
+ # and it's nothing to do with typing_extensions
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ steps:
+ - name: Checkout pydantic
+ uses: actions/checkout@v4
+ with:
+ repository: pydantic/pydantic
+ - name: Edit pydantic pyproject.toml
+ # pydantic's python-requires means pdm won't let us add typing-extensions-latest
+ # as a requirement unless we do this
+ run: sed -i 's/^requires-python = .*/requires-python = ">=3.8"/' pyproject.toml
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
+ - name: Setup pdm for pydantic tests
+ uses: pdm-project/setup-pdm@v4
+ with:
+ python-version: ${{ matrix.python-version }}
+ allow-python-prereleases: true
+ - name: Add local version of typing_extensions as a dependency
+ run: pdm add ./typing-extensions-latest
+ - name: Install pydantic test dependencies
+ run: pdm install -G testing -G email
+ - name: List installed dependencies
+ run: pdm list -vv # pdm equivalent to `pip list`
+ - name: Run pydantic tests
+ run: pdm run pytest
+
+ typing_inspect:
+ name: typing_inspect tests
+ if: >-
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
+ ${{
+ github.event_name != 'schedule'
+ || (
+ github.repository == 'python/typing_extensions'
+ && github.event_name == 'schedule'
+ )
+ }}
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ["3.8", "3.9", "3.10", "3.11"]
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ steps:
+ - name: Checkout typing_inspect
+ uses: actions/checkout@v4
+ with:
+ repository: ilevkivskyi/typing_inspect
+ path: typing_inspect
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Install typing_inspect test dependencies
+ run: |
+ cd typing_inspect
+ uv pip install --system -r test-requirements.txt --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
+ - name: Install typing_extensions latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
+ - name: List all installed dependencies
+ run: uv pip freeze
+ - name: Run typing_inspect tests
+ run: |
+ cd typing_inspect
+ pytest
+
+ pyanalyze:
+ name: pyanalyze tests
+ if: >-
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
+ ${{
+ github.event_name != 'schedule'
+ || (
+ github.repository == 'python/typing_extensions'
+ && github.event_name == 'schedule'
+ )
+ }}
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ steps:
+ - name: Check out pyanalyze
+ uses: actions/checkout@v4
+ with:
+ repository: quora/pyanalyze
+ path: pyanalyze
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ allow-prereleases: true
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Install pyanalyze test requirements
+ run: |
+ cd pyanalyze
+ uv pip install --system 'pyanalyze[tests] @ .' --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
+ - name: Install typing_extensions latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
+ - name: List all installed dependencies
+ run: uv pip freeze
+ - name: Run pyanalyze tests
+ run: |
+ cd pyanalyze
+ pytest pyanalyze/
+
+ typeguard:
+ name: typeguard tests
+ if: >-
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
+ ${{
+ github.event_name != 'schedule'
+ || (
+ github.repository == 'python/typing_extensions'
+ && github.event_name == 'schedule'
+ )
+ }}
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "pypy3.10"]
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ steps:
+ - name: Check out typeguard
+ uses: actions/checkout@v4
+ with:
+ repository: agronholm/typeguard
+ path: typeguard
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ allow-prereleases: true
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Install typeguard test requirements
+ run: |
+ cd typeguard
+ uv pip install --system "typeguard[test] @ ." --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
+ - name: Install typing_extensions latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
+ - name: List all installed dependencies
+ run: uv pip freeze
+ - name: Run typeguard tests
+ run: |
+ cd typeguard
+ pytest
+
+ typed-argument-parser:
+ name: typed-argument-parser tests
+ if: >-
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
+ ${{
+ github.event_name != 'schedule'
+ || (
+ github.repository == 'python/typing_extensions'
+ && github.event_name == 'schedule'
+ )
+ }}
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ steps:
+ - name: Check out typed-argument-parser
+ uses: actions/checkout@v4
+ with:
+ repository: swansonk14/typed-argument-parser
+ path: typed-argument-parser
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Configure git for typed-argument-parser tests
+ # typed-argument parser does this in their CI,
+ # and the tests fail unless we do this
+ run: |
+ git config --global user.email "[email protected]"
+ git config --global user.name "Your Name"
+ - name: Install typed-argument-parser test requirements
+ run: |
+ cd typed-argument-parser
+ uv pip install --system "typed-argument-parser @ ." --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
+ uv pip install --system pytest --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
+ - name: Install typing_extensions latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
+ - name: List all installed dependencies
+ run: uv pip freeze
+ - name: Run typed-argument-parser tests
+ run: |
+ cd typed-argument-parser
+ pytest
+
+ mypy:
+ name: stubtest & mypyc tests
+ if: >-
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
+ ${{
+ github.event_name != 'schedule'
+ || (
+ github.repository == 'python/typing_extensions'
+ && github.event_name == 'schedule'
+ )
+ }}
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ steps:
+ - name: Checkout mypy for stubtest and mypyc tests
+ uses: actions/checkout@v4
+ with:
+ repository: python/mypy
+ path: mypy
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ allow-prereleases: true
+ - name: Install uv
+ run: curl -LsSf https://astral.sh/uv/install.sh | sh
+ - name: Install mypy test requirements
+ run: |
+ cd mypy
+ uv pip install --system -r test-requirements.txt --exclude-newer $(git show -s --date=format:'%Y-%m-%dT%H:%M:%SZ' --format=%cd HEAD)
+ uv pip install --system -e .
+ - name: Install typing_extensions latest
+ run: uv pip install --system "typing-extensions @ ./typing-extensions-latest"
+ - name: List all installed dependencies
+ run: uv pip freeze
+ - name: Run stubtest & mypyc tests
+ run: |
+ cd mypy
+ pytest -n 2 ./mypy/test/teststubtest.py ./mypyc/test/test_run.py ./mypyc/test/test_external.py
+
+ cattrs:
+ name: cattrs tests
+ if: >-
+ # if 'schedule' was the trigger,
+ # don't run it on contributors' forks
+ ${{
+ github.event_name != 'schedule'
+ || (
+ github.repository == 'python/typing_extensions'
+ && github.event_name == 'schedule'
+ )
+ }}
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version: ["3.8", "3.9", "3.10", "3.11"]
+ runs-on: ubuntu-latest
+ timeout-minutes: 60
+ steps:
+ - name: Checkout cattrs
+ uses: actions/checkout@v4
+ with:
+ repository: python-attrs/cattrs
+ - name: Checkout typing_extensions
+ uses: actions/checkout@v4
+ with:
+ path: typing-extensions-latest
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install pdm for cattrs
+ run: pip install pdm
+ - name: Add latest typing-extensions as a dependency
+ run: |
+ pdm remove typing-extensions
+ pdm add --dev ./typing-extensions-latest
+ - name: Install cattrs test dependencies
+ run: pdm install --dev -G :all
+ - name: List all installed dependencies
+ run: pdm list -vv
+ - name: Run cattrs tests
+ run: pdm run pytest tests
+
+ create-issue-on-failure:
+ name: Create an issue if daily tests failed
+ runs-on: ubuntu-latest
+
+ needs:
+ - pydantic
+ - typing_inspect
+ - pyanalyze
+ - typeguard
+ - typed-argument-parser
+ - mypy
+ - cattrs
+
+ if: >-
+ ${{
+ github.repository == 'python/typing_extensions'
+ && always()
+ && github.event_name == 'schedule'
+ && (
+ needs.pydantic.result == 'failure'
+ || needs.typing_inspect.result == 'failure'
+ || needs.pyanalyze.result == 'failure'
+ || needs.typeguard.result == 'failure'
+ || needs.typed-argument-parser.result == 'failure'
+ || needs.mypy.result == 'failure'
+ || needs.cattrs.result == 'failure'
+ )
+ }}
+
+ permissions:
+ issues: write
+
+ steps:
+ - uses: actions/github-script@v7
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ script: |
+ await github.rest.issues.create({
+ owner: "python",
+ repo: "typing_extensions",
+ title: `Third-party tests failed on ${new Date().toDateString()}`,
+ body: "Runs listed here: https://github.com/python/typing_extensions/actions/workflows/third_party.yml",
+ })
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..ee36fe7
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,18 @@
+MANIFEST
+
+__pycache__/
+build/
+dist/
+tmp/
+venv*/
+
+.cache/
+.idea/
+.tox/
+.venv*/
+.vscode/
+.python-version
+
+*.swp
+*.pyc
+*.egg-info/
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
new file mode 100644
index 0000000..60419be
--- /dev/null
+++ b/.readthedocs.yaml
@@ -0,0 +1,13 @@
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+version: 2
+
+build:
+ os: ubuntu-22.04
+ tools:
+ python: "3.12"
+
+sphinx:
+ configuration: doc/conf.py
+
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..75d03cb
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,41 @@
+// Copyright 2024 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["external_python_typing_extensions_license"],
+}
+
+license {
+ name: "external_python_typing_extensions_license",
+ license_kinds: [
+ "SPDX-license-identifier-MIT",
+ ],
+ license_text: [
+ "LICENSE",
+ ],
+}
+
+filegroup {
+ name: "typing_extensions_srcs",
+ srcs: [
+ "src/typing_extensions.py",
+ ],
+ path: "src",
+}
+
+python_library {
+ name: "typing_extensions_py",
+ host_supported: true,
+ srcs: [":typing_extensions_srcs"],
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 0000000..90f5b68
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,478 @@
+# Release 4.12.2 (June 7, 2024)
+
+- Fix regression in v4.12.0 where specialization of certain
+ generics with an overridden `__eq__` method would raise errors.
+ Patch by Jelle Zijlstra.
+- Fix tests so they pass on 3.13.0b2
+
+# Release 4.12.1 (June 1, 2024)
+
+- Preliminary changes for compatibility with the draft implementation
+ of PEP 649 in Python 3.14. Patch by Jelle Zijlstra.
+- Fix regression in v4.12.0 where nested `Annotated` types would cause
+ `TypeError` to be raised if the nested `Annotated` type had unhashable
+ metadata. Patch by Alex Waygood.
+
+# Release 4.12.0 (May 23, 2024)
+
+This release is mostly the same as 4.12.0rc1 but fixes one more
+longstanding bug.
+
+- Fix incorrect behaviour of `typing_extensions.ParamSpec` on Python 3.8 and
+ 3.9 that meant that
+ `isinstance(typing_extensions.ParamSpec("P"), typing.TypeVar)` would have a
+ different result in some situations depending on whether or not a profiling
+ function had been set using `sys.setprofile`. Patch by Alex Waygood.
+
+# Release 4.12.0rc1 (May 16, 2024)
+
+This release focuses on compatibility with the upcoming release of
+Python 3.13. Most changes are related to the implementation of type
+parameter defaults (PEP 696).
+
+Thanks to all of the people who contributed patches, especially Alex
+Waygood, who did most of the work adapting typing-extensions to the
+CPython PEP 696 implementation.
+
+Full changelog:
+
+- Improve the implementation of type parameter defaults (PEP 696)
+ - Backport the `typing.NoDefault` sentinel object from Python 3.13.
+ TypeVars, ParamSpecs and TypeVarTuples without default values now have
+ their `__default__` attribute set to this sentinel value.
+ - TypeVars, ParamSpecs and TypeVarTuples now have a `has_default()`
+ method, matching `typing.TypeVar`, `typing.ParamSpec` and
+ `typing.TypeVarTuple` on Python 3.13+.
+ - TypeVars, ParamSpecs and TypeVarTuples with `default=None` passed to
+ their constructors now have their `__default__` attribute set to `None`
+ at runtime rather than `types.NoneType`.
+ - Fix most tests for `TypeVar`, `ParamSpec` and `TypeVarTuple` on Python
+ 3.13.0b1 and newer.
+ - Backport CPython PR [#118774](https://github.com/python/cpython/pull/118774),
+ allowing type parameters without default values to follow those with
+ default values in some type parameter lists. Patch by Alex Waygood,
+ backporting a CPython PR by Jelle Zijlstra.
+ - It is now disallowed to use a `TypeVar` with a default value after a
+ `TypeVarTuple` in a type parameter list. This matches the CPython
+ implementation of PEP 696 on Python 3.13+.
+ - Fix bug in PEP-696 implementation where a default value for a `ParamSpec`
+ would be cast to a tuple if a list was provided.
+ Patch by Alex Waygood.
+- Fix `Protocol` tests on Python 3.13.0a6 and newer. 3.13.0a6 adds a new
+ `__static_attributes__` attribute to all classes in Python,
+ which broke some assumptions made by the implementation of
+ `typing_extensions.Protocol`. Similarly, 3.13.0b1 adds the new
+ `__firstlineno__` attribute to all classes.
+- Fix `AttributeError` when using `typing_extensions.runtime_checkable`
+ in combination with `typing.Protocol` on Python 3.12.2 or newer.
+ Patch by Alex Waygood.
+- At runtime, `assert_never` now includes the repr of the argument
+ in the `AssertionError`. Patch by Hashem, backporting of the original
+ fix https://github.com/python/cpython/pull/91720 by Jelle Zijlstra.
+- The second and third parameters of `typing_extensions.Generator`,
+ and the second parameter of `typing_extensions.AsyncGenerator`,
+ now default to `None`. This matches the behaviour of `typing.Generator`
+ and `typing.AsyncGenerator` on Python 3.13+.
+- `typing_extensions.ContextManager` and
+ `typing_extensions.AsyncContextManager` now have an optional second
+ parameter, which defaults to `Optional[bool]`. The new parameter
+ signifies the return type of the `__(a)exit__` method, matching
+ `typing.ContextManager` and `typing.AsyncContextManager` on Python
+ 3.13+.
+- Backport `types.CapsuleType` from Python 3.13.
+- Releases are now made using [Trusted Publishers](https://docs.pypi.org/trusted-publishers/)
+ improving the security of the release process. Patch by Jelle Zijlstra.
+
+# Release 4.12.0a1 and 4.12.0a2 (May 16, 2024)
+
+These releases primarily test a revised release workflow. If all goes
+well, release 4.12.0rc1 will follow soon.
+
+# Release 4.11.0 (April 5, 2024)
+
+This feature release provides improvements to various recently
+added features, most importantly type parameter defaults (PEP 696).
+
+There are no changes since 4.11.0rc1.
+
+# Release 4.11.0rc1 (March 24, 2024)
+
+- Fix tests on Python 3.13.0a5. Patch by Jelle Zijlstra.
+- Fix the runtime behavior of type parameters with defaults (PEP 696).
+ Patch by Nadir Chowdhury.
+- Fix minor discrepancy between error messages produced by `typing`
+ and `typing_extensions` on Python 3.10. Patch by Jelle Zijlstra.
+- When `include_extra=False`, `get_type_hints()` now strips `ReadOnly` from the annotation.
+
+# Release 4.10.0 (February 24, 2024)
+
+This feature release adds support for PEP 728 (TypedDict with extra
+items) and PEP 742 (``TypeIs``).
+
+There are no changes since 4.10.0rc1.
+
+# Release 4.10.0rc1 (February 17, 2024)
+
+- Add support for PEP 728, supporting the `closed` keyword argument and the
+ special `__extra_items__` key for TypedDict. Patch by Zixuan James Li.
+- Add support for PEP 742, adding `typing_extensions.TypeIs`. Patch
+ by Jelle Zijlstra.
+- Drop runtime error when a read-only `TypedDict` item overrides a mutable
+ one. Type checkers should still flag this as an error. Patch by Jelle
+ Zijlstra.
+- Speedup `issubclass()` checks against simple runtime-checkable protocols by
+ around 6% (backporting https://github.com/python/cpython/pull/112717, by Alex
+ Waygood).
+- Fix a regression in the implementation of protocols where `typing.Protocol`
+ classes that were not marked as `@runtime_checkable` would be unnecessarily
+ introspected, potentially causing exceptions to be raised if the protocol had
+ problematic members. Patch by Alex Waygood, backporting
+ https://github.com/python/cpython/pull/113401.
+
+# Release 4.9.0 (December 9, 2023)
+
+This feature release adds `typing_extensions.ReadOnly`, as specified
+by PEP 705, and makes various other improvements, especially to
+`@typing_extensions.deprecated()`.
+
+There are no changes since 4.9.0rc1.
+
+# Release 4.9.0rc1 (November 29, 2023)
+
+- Add support for PEP 705, adding `typing_extensions.ReadOnly`. Patch
+ by Jelle Zijlstra.
+- All parameters on `NewType.__call__` are now positional-only. This means that
+ the signature of `typing_extensions.NewType.__call__` now exactly matches the
+ signature of `typing.NewType.__call__`. Patch by Alex Waygood.
+- Fix bug with using `@deprecated` on a mixin class. Inheriting from a
+ deprecated class now raises a `DeprecationWarning`. Patch by Jelle Zijlstra.
+- `@deprecated` now gives a better error message if you pass a non-`str`
+ argument to the `msg` parameter. Patch by Alex Waygood.
+- `@deprecated` is now implemented as a class for better introspectability.
+ Patch by Jelle Zijlstra.
+- Exclude `__match_args__` from `Protocol` members.
+ Backport of https://github.com/python/cpython/pull/110683 by Nikita Sobolev.
+- When creating a `typing_extensions.NamedTuple` class, ensure `__set_name__`
+ is called on all objects that define `__set_name__` and exist in the values
+ of the `NamedTuple` class's class dictionary. Patch by Alex Waygood,
+ backporting https://github.com/python/cpython/pull/111876.
+- Improve the error message when trying to call `issubclass()` against a
+ `Protocol` that has non-method members. Patch by Alex Waygood (backporting
+ https://github.com/python/cpython/pull/112344, by Randolph Scholz).
+
+# Release 4.8.0 (September 17, 2023)
+
+No changes since 4.8.0rc1.
+
+# Release 4.8.0rc1 (September 7, 2023)
+
+- Add `typing_extensions.Doc`, as proposed by PEP 727. Patch by
+ Sebastián Ramírez.
+- Drop support for Python 3.7 (including PyPy-3.7). Patch by Alex Waygood.
+- Fix bug where `get_original_bases()` would return incorrect results when
+ called on a concrete subclass of a generic class. Patch by Alex Waygood
+ (backporting https://github.com/python/cpython/pull/107584, by James
+ Hilton-Balfe).
+- Fix bug where `ParamSpec(default=...)` would raise a `TypeError` on Python
+ versions <3.11. Patch by James Hilton-Balfe
+
+# Release 4.7.1 (July 2, 2023)
+
+- Fix support for `TypedDict`, `NamedTuple` and `is_protocol` on PyPy-3.7 and
+ PyPy-3.8. Patch by Alex Waygood. Note that PyPy-3.7 and PyPy-3.8 are unsupported
+ by the PyPy project. The next feature release of typing-extensions will
+ drop support for PyPy-3.7 and may also drop support for PyPy-3.8.
+
+# Release 4.7.0 (June 28, 2023)
+
+- This is expected to be the last feature release supporting Python 3.7,
+ which reaches its end of life on June 27, 2023. Version 4.8.0 will support
+ only Python 3.8.0 and up.
+- Fix bug where a `typing_extensions.Protocol` class that had one or more
+ non-callable members would raise `TypeError` when `issubclass()`
+ was called against it, even if it defined a custom `__subclasshook__`
+ method. The correct behaviour -- which has now been restored -- is not to
+ raise `TypeError` in these situations if a custom `__subclasshook__` method
+ is defined. Patch by Alex Waygood (backporting
+ https://github.com/python/cpython/pull/105976).
+
+# Release 4.7.0rc1 (June 21, 2023)
+
+- Add `typing_extensions.get_protocol_members` and
+ `typing_extensions.is_protocol` (backport of CPython PR #104878).
+ Patch by Jelle Zijlstra.
+- `typing_extensions` now re-exports all names in the standard library's
+ `typing` module, except the deprecated `ByteString`. Patch by Jelle
+ Zijlstra.
+- Due to changes in the implementation of `typing_extensions.Protocol`,
+ `typing.runtime_checkable` can now be used on `typing_extensions.Protocol`
+ (previously, users had to use `typing_extensions.runtime_checkable` if they
+ were using `typing_extensions.Protocol`).
+- Align the implementation of `TypedDict` with the implementation in the
+ standard library on Python 3.9 and higher.
+ `typing_extensions.TypedDict` is now a function instead of a class. The
+ private functions `_check_fails`, `_dict_new`, and `_typeddict_new`
+ have been removed. `is_typeddict` now returns `False` when called with
+ `TypedDict` itself as the argument. Patch by Jelle Zijlstra.
+- Declare support for Python 3.12. Patch by Jelle Zijlstra.
+- Fix tests on Python 3.13, which removes support for creating
+ `TypedDict` classes through the keyword-argument syntax. Patch by
+ Jelle Zijlstra.
+- Fix a regression introduced in v4.6.3 that meant that
+ ``issubclass(object, typing_extensions.Protocol)`` would erroneously raise
+ ``TypeError``. Patch by Alex Waygood (backporting the CPython PR
+ https://github.com/python/cpython/pull/105239).
+- Allow `Protocol` classes to inherit from `typing_extensions.Buffer` or
+ `collections.abc.Buffer`. Patch by Alex Waygood (backporting
+ https://github.com/python/cpython/pull/104827, by Jelle Zijlstra).
+- Allow classes to inherit from both `typing.Protocol` and `typing_extensions.Protocol`
+ simultaneously. Since v4.6.0, this caused `TypeError` to be raised due to a
+ metaclass conflict. Patch by Alex Waygood.
+- Backport several deprecations from CPython relating to unusual ways to
+ create `TypedDict`s and `NamedTuple`s. CPython PRs #105609 and #105780
+ by Alex Waygood; `typing_extensions` backport by Jelle Zijlstra.
+ - Creating a `NamedTuple` using the functional syntax with keyword arguments
+ (`NT = NamedTuple("NT", a=int)`) is now deprecated.
+ - Creating a `NamedTuple` with zero fields using the syntax `NT = NamedTuple("NT")`
+ or `NT = NamedTuple("NT", None)` is now deprecated.
+ - Creating a `TypedDict` with zero fields using the syntax `TD = TypedDict("TD")`
+ or `TD = TypedDict("TD", None)` is now deprecated.
+- Fix bug on Python 3.7 where a protocol `X` that had a member `a` would not be
+ considered an implicit subclass of an unrelated protocol `Y` that only has a
+ member `a`. Where the members of `X` are a superset of the members of `Y`,
+ `X` should always be considered a subclass of `Y` iff `Y` is a
+ runtime-checkable protocol that only has callable members. Patch by Alex
+ Waygood (backporting CPython PR
+ https://github.com/python/cpython/pull/105835).
+
+# Release 4.6.3 (June 1, 2023)
+
+- Fix a regression introduced in v4.6.0 in the implementation of
+ runtime-checkable protocols. The regression meant
+ that doing `class Foo(X, typing_extensions.Protocol)`, where `X` was a class that
+ had `abc.ABCMeta` as its metaclass, would then cause subsequent
+ `isinstance(1, X)` calls to erroneously raise `TypeError`. Patch by
+ Alex Waygood (backporting the CPython PR
+ https://github.com/python/cpython/pull/105152).
+- Sync the repository's LICENSE file with that of CPython.
+ `typing_extensions` is distributed under the same license as
+ CPython itself.
+- Skip a problematic test on Python 3.12.0b1. The test fails on 3.12.0b1 due to
+ a bug in CPython, which will be fixed in 3.12.0b2. The
+ `typing_extensions` test suite now passes on 3.12.0b1.
+
+# Release 4.6.2 (May 25, 2023)
+
+- Fix use of `@deprecated` on classes with `__new__` but no `__init__`.
+ Patch by Jelle Zijlstra.
+- Fix regression in version 4.6.1 where comparing a generic class against a
+ runtime-checkable protocol using `isinstance()` would cause `AttributeError`
+ to be raised if using Python 3.7.
+
+# Release 4.6.1 (May 23, 2023)
+
+- Change deprecated `@runtime` to formal API `@runtime_checkable` in the error
+ message. Patch by Xuehai Pan.
+- Fix regression in 4.6.0 where attempting to define a `Protocol` that was
+ generic over a `ParamSpec` or a `TypeVarTuple` would cause `TypeError` to be
+ raised. Patch by Alex Waygood.
+
+# Release 4.6.0 (May 22, 2023)
+
+- `typing_extensions` is now documented at
+ https://typing-extensions.readthedocs.io/en/latest/. Patch by Jelle Zijlstra.
+- Add `typing_extensions.Buffer`, a marker class for buffer types, as proposed
+ by PEP 688. Equivalent to `collections.abc.Buffer` in Python 3.12. Patch by
+ Jelle Zijlstra.
+- Backport two CPython PRs fixing various issues with `typing.Literal`:
+ https://github.com/python/cpython/pull/23294 and
+ https://github.com/python/cpython/pull/23383. Both CPython PRs were
+ originally by Yurii Karabas, and both were backported to Python >=3.9.1, but
+ no earlier. Patch by Alex Waygood.
+
+ A side effect of one of the changes is that equality comparisons of `Literal`
+ objects will now raise a `TypeError` if one of the `Literal` objects being
+ compared has a mutable parameter. (Using mutable parameters with `Literal` is
+ not supported by PEP 586 or by any major static type checkers.)
+- `Literal` is now reimplemented on all Python versions <= 3.10.0. The
+ `typing_extensions` version does not suffer from the bug that was fixed in
+ https://github.com/python/cpython/pull/29334. (The CPython bugfix was
+ backported to CPython 3.10.1 and 3.9.8, but no earlier.)
+- Backport [CPython PR 26067](https://github.com/python/cpython/pull/26067)
+ (originally by Yurii Karabas), ensuring that `isinstance()` calls on
+ protocols raise `TypeError` when the protocol is not decorated with
+ `@runtime_checkable`. Patch by Alex Waygood.
+- Backport several significant performance improvements to runtime-checkable
+ protocols that have been made in Python 3.12 (see
+ https://github.com/python/cpython/issues/74690 for details). Patch by Alex
+ Waygood.
+
+ A side effect of one of the performance improvements is that the members of
+ a runtime-checkable protocol are now considered “frozen” at runtime as soon
+ as the class has been created. Monkey-patching attributes onto a
+ runtime-checkable protocol will still work, but will have no impact on
+ `isinstance()` checks comparing objects to the protocol. See
+ ["What's New in Python 3.12"](https://docs.python.org/3.12/whatsnew/3.12.html#typing)
+ for more details.
+- `isinstance()` checks against runtime-checkable protocols now use
+ `inspect.getattr_static()` rather than `hasattr()` to lookup whether
+ attributes exist (backporting https://github.com/python/cpython/pull/103034).
+ This means that descriptors and `__getattr__` methods are no longer
+ unexpectedly evaluated during `isinstance()` checks against runtime-checkable
+ protocols. However, it may also mean that some objects which used to be
+ considered instances of a runtime-checkable protocol on older versions of
+ `typing_extensions` may no longer be considered instances of that protocol
+ using the new release, and vice versa. Most users are unlikely to be affected
+ by this change. Patch by Alex Waygood.
+- Backport the ability to define `__init__` methods on Protocol classes, a
+ change made in Python 3.11 (originally implemented in
+ https://github.com/python/cpython/pull/31628 by Adrian Garcia Badaracco).
+ Patch by Alex Waygood.
+- Speedup `isinstance(3, typing_extensions.SupportsIndex)` by >10x on Python
+ <3.12. Patch by Alex Waygood.
+- Add `typing_extensions` versions of `SupportsInt`, `SupportsFloat`,
+ `SupportsComplex`, `SupportsBytes`, `SupportsAbs` and `SupportsRound`. These
+ have the same semantics as the versions from the `typing` module, but
+ `isinstance()` checks against the `typing_extensions` versions are >10x faster
+ at runtime on Python <3.12. Patch by Alex Waygood.
+- Add `__orig_bases__` to non-generic TypedDicts, call-based TypedDicts, and
+ call-based NamedTuples. Other TypedDicts and NamedTuples already had the attribute.
+ Patch by Adrian Garcia Badaracco.
+- Add `typing_extensions.get_original_bases`, a backport of
+ [`types.get_original_bases`](https://docs.python.org/3.12/library/types.html#types.get_original_bases),
+ introduced in Python 3.12 (CPython PR
+ https://github.com/python/cpython/pull/101827, originally by James
+ Hilton-Balfe). Patch by Alex Waygood.
+
+ This function should always produce correct results when called on classes
+ constructed using features from `typing_extensions`. However, it may
+ produce incorrect results when called on some `NamedTuple` or `TypedDict`
+ classes that use `typing.{NamedTuple,TypedDict}` on Python <=3.11.
+- Constructing a call-based `TypedDict` using keyword arguments for the fields
+ now causes a `DeprecationWarning` to be emitted. This matches the behaviour
+ of `typing.TypedDict` on 3.11 and 3.12.
+- Backport the implementation of `NewType` from 3.10 (where it is implemented
+ as a class rather than a function). This allows user-defined `NewType`s to be
+ pickled. Patch by Alex Waygood.
+- Fix tests and import on Python 3.12, where `typing.TypeVar` can no longer be
+ subclassed. Patch by Jelle Zijlstra.
+- Add `typing_extensions.TypeAliasType`, a backport of `typing.TypeAliasType`
+ from PEP 695. Patch by Jelle Zijlstra.
+- Backport changes to the repr of `typing.Unpack` that were made in order to
+ implement [PEP 692](https://peps.python.org/pep-0692/) (backport of
+ https://github.com/python/cpython/pull/104048). Patch by Alex Waygood.
+
+# Release 4.5.0 (February 14, 2023)
+
+- Runtime support for PEP 702, adding `typing_extensions.deprecated`. Patch
+ by Jelle Zijlstra.
+- Add better default value for TypeVar `default` parameter, PEP 696. Enables
+ runtime check if `None` was passed as default. Patch by Marc Mueller (@cdce8p).
+- The `@typing_extensions.override` decorator now sets the `.__override__`
+ attribute. Patch by Steven Troxler.
+- Fix `get_type_hints()` on cross-module inherited `TypedDict` in 3.9 and 3.10.
+ Patch by Carl Meyer.
+- Add `frozen_default` parameter on `dataclass_transform`. Patch by Erik De Bonte.
+
+# Release 4.4.0 (October 6, 2022)
+
+- Add `typing_extensions.Any` a backport of python 3.11's Any class which is
+ subclassable at runtime. (backport from python/cpython#31841, by Shantanu
+ and Jelle Zijlstra). Patch by James Hilton-Balfe (@Gobot1234).
+- Add initial support for TypeVarLike `default` parameter, PEP 696.
+ Patch by Marc Mueller (@cdce8p).
+- Runtime support for PEP 698, adding `typing_extensions.override`. Patch by
+ Jelle Zijlstra.
+- Add the `infer_variance` parameter to `TypeVar`, as specified in PEP 695.
+ Patch by Jelle Zijlstra.
+
+# Release 4.3.0 (July 1, 2022)
+
+- Add `typing_extensions.NamedTuple`, allowing for generic `NamedTuple`s on
+ Python <3.11 (backport from python/cpython#92027, by Serhiy Storchaka). Patch
+ by Alex Waygood (@AlexWaygood).
+- Adjust `typing_extensions.TypedDict` to allow for generic `TypedDict`s on
+ Python <3.11 (backport from python/cpython#27663, by Samodya Abey). Patch by
+ Alex Waygood (@AlexWaygood).
+
+# Release 4.2.0 (April 17, 2022)
+
+- Re-export `typing.Unpack` and `typing.TypeVarTuple` on Python 3.11.
+- Add `ParamSpecArgs` and `ParamSpecKwargs` to `__all__`.
+- Improve "accepts only single type" error messages.
+- Improve the distributed package. Patch by Marc Mueller (@cdce8p).
+- Update `typing_extensions.dataclass_transform` to rename the
+ `field_descriptors` parameter to `field_specifiers` and accept
+ arbitrary keyword arguments.
+- Add `typing_extensions.get_overloads` and
+ `typing_extensions.clear_overloads`, and add registry support to
+ `typing_extensions.overload`. Backport from python/cpython#89263.
+- Add `typing_extensions.assert_type`. Backport from bpo-46480.
+- Drop support for Python 3.6. Original patch by Adam Turner (@AA-Turner).
+
+# Release 4.1.1 (February 13, 2022)
+
+- Fix importing `typing_extensions` on Python 3.7.0 and 3.7.1. Original
+ patch by Nikita Sobolev (@sobolevn).
+
+# Release 4.1.0 (February 12, 2022)
+
+- Runtime support for PEP 646, adding `typing_extensions.TypeVarTuple`
+ and `typing_extensions.Unpack`.
+- Add interaction of `Required` and `NotRequired` with `__required_keys__`,
+ `__optional_keys__` and `get_type_hints()`. Patch by David Cabot (@d-k-bo).
+- Runtime support for PEP 675 and `typing_extensions.LiteralString`.
+- Add `Never` and `assert_never`. Backport from bpo-46475.
+- `ParamSpec` args and kwargs are now equal to themselves. Backport from
+ bpo-46676. Patch by Gregory Beauregard (@GBeauregard).
+- Add `reveal_type`. Backport from bpo-46414.
+- Runtime support for PEP 681 and `typing_extensions.dataclass_transform`.
+- `Annotated` can now wrap `ClassVar` and `Final`. Backport from
+ bpo-46491. Patch by Gregory Beauregard (@GBeauregard).
+- Add missed `Required` and `NotRequired` to `__all__`. Patch by
+ Yuri Karabas (@uriyyo).
+- The `@final` decorator now sets the `__final__` attribute on the
+ decorated object to allow runtime introspection. Backport from
+ bpo-46342.
+- Add `is_typeddict`. Patch by Chris Moradi (@chrismoradi) and James
+ Hilton-Balfe (@Gobot1234).
+
+# Release 4.0.1 (November 30, 2021)
+
+- Fix broken sdist in release 4.0.0. Patch by Adam Turner (@AA-Turner).
+- Fix equality comparison for `Required` and `NotRequired`. Patch by
+ Jelle Zijlstra (@jellezijlstra).
+- Fix usage of `Self` as a type argument. Patch by Chris Wesseling
+ (@CharString) and James Hilton-Balfe (@Gobot1234).
+
+# Release 4.0.0 (November 14, 2021)
+
+- Starting with version 4.0.0, typing_extensions uses Semantic Versioning.
+ See the README for more information.
+- Dropped support for Python versions 3.5 and older, including Python 2.7.
+- Simplified backports for Python 3.6.0 and newer. Patch by Adam Turner (@AA-Turner).
+
+## Added in version 4.0.0
+
+- Runtime support for PEP 673 and `typing_extensions.Self`. Patch by
+ James Hilton-Balfe (@Gobot1234).
+- Runtime support for PEP 655 and `typing_extensions.Required` and `NotRequired`.
+ Patch by David Foster (@davidfstr).
+
+## Removed in version 4.0.0
+
+The following non-exported but non-private names have been removed as they are
+unneeded for supporting Python 3.6 and newer.
+
+- TypingMeta
+- OLD_GENERICS
+- SUBS_TREE
+- HAVE_ANNOTATED
+- HAVE_PROTOCOLS
+- V_co
+- VT_co
+
+# Previous releases
+
+Prior to release 4.0.0 we did not provide a changelog. Please check
+the Git history for details.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..1b030d5
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,70 @@
+Code in this repository should follow CPython's style guidelines and
+contributors need to sign the PSF Contributor Agreement.
+
+# typing\_extensions
+
+The `typing_extensions` module provides a way to access new features from the standard
+library `typing` module in older versions of Python. For example, Python 3.10 adds
+`typing.TypeGuard`, but users of older versions of Python can use `typing_extensions` to
+use `TypeGuard` in their code even if they are unable to upgrade to Python 3.10.
+
+If you contribute the runtime implementation of a new `typing` feature to CPython, you
+are encouraged to also implement the feature in `typing_extensions`. Because the runtime
+implementation of much of the infrastructure in the `typing` module has changed over
+time, this may require different code for some older Python versions.
+
+`typing_extensions` may also include experimental features that are not yet part of the
+standard library, so that users can experiment with them before they are added to the
+standard library. Such features should already be specified in a PEP or merged into
+CPython's `main` branch.
+
+# Versioning scheme
+
+Starting with version 4.0.0, `typing_extensions` uses
+[Semantic Versioning](https://semver.org/). See the documentation
+for more detail.
+
+# Type stubs
+
+A stub file for `typing_extensions` is maintained
+[in typeshed](https://github.com/python/typeshed/blob/main/stdlib/typing_extensions.pyi).
+Because of the special status that `typing_extensions` holds in the typing ecosystem,
+the stubs are placed in the standard library in typeshed and distributed as
+part of the stubs bundled with individual type checkers.
+
+# Running tests
+
+Testing `typing_extensions` can be tricky because many development tools depend on
+`typing_extensions`, so you may end up testing some installed version of the library,
+rather than your local code.
+
+The simplest way to run the tests locally is:
+
+- `cd src/`
+- `python test_typing_extensions.py`
+
+Alternatively, you can invoke `unittest` explicitly:
+
+- `python -m unittest test_typing_extensions.py`
+
+Running these commands in the `src/` directory ensures that the local file
+`typing_extensions.py` is used, instead of any other version of the library you
+may have installed.
+
+# Workflow for PyPI releases
+
+- Make sure you follow the versioning policy in the documentation
+ (e.g., release candidates before any feature release)
+
+- Ensure that GitHub Actions reports no errors.
+
+- Update the version number in `typing_extensions/pyproject.toml` and in
+ `typing_extensions/CHANGELOG.md`.
+
+- Create a new GitHub release at https://github.com/python/typing_extensions/releases/new.
+ Details:
+ - The tag should be just the version number, e.g. `4.1.1`.
+ - Copy the release notes from `CHANGELOG.md`.
+
+- Release automation will finish the release. You'll have to manually
+ approve the last step before upload.
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..30a1c4b
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,484 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations, which became
+Zope Corporation. In 2001, the Python Software Foundation (PSF, see
+https://www.python.org/psf/) was formed, a non-profit organization
+created specifically to own Python-related Intellectual Property.
+Zope Corporation was a sponsoring member of the PSF.
+
+All Python releases are Open Source (see https://opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+Python software and documentation are licensed under the
+Python Software Foundation License Version 2.
+
+Starting with Python 3.8.6, examples, recipes, and other code in
+the documentation are dual licensed under the PSF License Version 2
+and the Zero-Clause BSD license.
+
+Some software incorporated into Python is under different licenses.
+The licenses are listed with code falling under that license.
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
+----------------------------------------------------------------------
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+
+APACHE LICENSE FOR ANDROID BLUEPRINT FILE
+-----------------------------------------
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..75ee9ef
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,16 @@
+name: "typing_extensions"
+description:
+ "Static Typing for Python"
+
+third_party {
+homepage: "https://github.com/python/typing_extensions"
+ identifier {
+ type: "Git"
+ value: "https://github.com/python/typing_extensions"
+ primary_source: true
+ version: "4.12.1"
+ }
+ version: "4.12.1"
+ last_upgrade_date { year: 2024 month: 6 day: 21 }
+ license_type: NOTICE
+}
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..2e8f086
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1 @@
+include platform/system/core:main:/janitors/OWNERS
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..1eddb2a
--- /dev/null
+++ b/README.md
@@ -0,0 +1,37 @@
+# Typing Extensions
+
+[](https://gitter.im/python/typing)
+
+[Documentation](https://typing-extensions.readthedocs.io/en/latest/#) –
+[PyPI](https://pypi.org/project/typing-extensions/)
+
+## Overview
+
+The `typing_extensions` module serves two related purposes:
+
+- Enable use of new type system features on older Python versions. For example,
+ `typing.TypeGuard` is new in Python 3.10, but `typing_extensions` allows
+ users on previous Python versions to use it too.
+- Enable experimentation with new type system PEPs before they are accepted and
+ added to the `typing` module.
+
+`typing_extensions` is treated specially by static type checkers such as
+mypy and pyright. Objects defined in `typing_extensions` are treated the same
+way as equivalent forms in `typing`.
+
+`typing_extensions` uses
+[Semantic Versioning](https://semver.org/). The
+major version will be incremented only for backwards-incompatible changes.
+Therefore, it's safe to depend
+on `typing_extensions` like this: `typing_extensions >=x.y, <(x+1)`,
+where `x.y` is the first version that includes all features you need.
+
+## Included items
+
+See [the documentation](https://typing-extensions.readthedocs.io/en/latest/#) for a
+complete listing of module contents.
+
+## Contributing
+
+See [CONTRIBUTING.md](https://github.com/python/typing_extensions/blob/main/CONTRIBUTING.md)
+for how to contribute to `typing_extensions`.
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000..efd1d6a
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,10 @@
+# Security Policy
+
+## Supported Versions
+
+Only the latest release is supported.
+
+## Reporting a Vulnerability
+
+To report an issue, go to https://github.com/python/typing_extensions/security.
+We commit to respond to any issue within 14 days and promptly release any fixes.
diff --git a/doc/.gitignore b/doc/.gitignore
new file mode 100644
index 0000000..69fa449
--- /dev/null
+++ b/doc/.gitignore
@@ -0,0 +1 @@
+_build/
diff --git a/doc/Makefile b/doc/Makefile
new file mode 100644
index 0000000..d4bb2cb
--- /dev/null
+++ b/doc/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?=
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = .
+BUILDDIR = _build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/doc/_extensions/__init__.py b/doc/_extensions/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/doc/_extensions/__init__.py
diff --git a/doc/_extensions/gh_link.py b/doc/_extensions/gh_link.py
new file mode 100644
index 0000000..3442dbd
--- /dev/null
+++ b/doc/_extensions/gh_link.py
@@ -0,0 +1,29 @@
+from docutils import nodes
+
+
+def setup(app):
+ app.add_role(
+ "pr", autolink("https://github.com/python/typing_extensions/pull/{}", "PR #")
+ )
+ app.add_role(
+ "pr-cpy", autolink("https://github.com/python/cpython/pull/{}", "CPython PR #")
+ )
+ app.add_role(
+ "issue",
+ autolink("https://github.com/python/typing_extensions/issues/{}", "issue #"),
+ )
+ app.add_role(
+ "issue-cpy",
+ autolink("https://github.com/python/cpython/issues/{}", "CPython issue #"),
+ )
+
+
+def autolink(pattern: str, prefix: str):
+ def role(name, rawtext, text: str, lineno, inliner, options=None, content=None):
+ if options is None:
+ options = {}
+ url = pattern.format(text)
+ node = nodes.reference(rawtext, f"{prefix}{text}", refuri=url, **options)
+ return [node], []
+
+ return role
diff --git a/doc/conf.py b/doc/conf.py
new file mode 100644
index 0000000..4227360
--- /dev/null
+++ b/doc/conf.py
@@ -0,0 +1,50 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# For the full list of built-in configuration values, see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+import os.path
+import sys
+
+from docutils.nodes import Element
+from sphinx.writers.html5 import HTML5Translator
+
+sys.path.insert(0, os.path.abspath('.'))
+
+# -- Project information -----------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
+
+project = 'typing_extensions'
+copyright = '2023, Guido van Rossum and others'
+author = 'Guido van Rossum and others'
+release = '4.6.0'
+
+# -- General configuration ---------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
+
+extensions = ['sphinx.ext.intersphinx', '_extensions.gh_link']
+
+templates_path = ['_templates']
+exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
+
+intersphinx_mapping = {'py': ('https://docs.python.org/3.12', None)}
+
+add_module_names = False
+
+# -- Options for HTML output -------------------------------------------------
+# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
+
+html_theme = 'alabaster'
+
+
+class MyTranslator(HTML5Translator):
+ """Adds a link target to name without `typing_extensions.` prefix."""
+ def visit_desc_signature(self, node: Element) -> None:
+ desc_name = node.get("fullname")
+ if desc_name:
+ self.body.append(f'<span id="{desc_name}"></span>')
+ super().visit_desc_signature(node)
+
+
+def setup(app):
+ app.set_translator('html', MyTranslator)
diff --git a/doc/index.rst b/doc/index.rst
new file mode 100644
index 0000000..3f0d2d4
--- /dev/null
+++ b/doc/index.rst
@@ -0,0 +1,1238 @@
+.. module:: typing_extensions
+
+Welcome to typing_extensions's documentation!
+=============================================
+
+``typing_extensions`` complements the standard-library :py:mod:`typing` module,
+providing runtime support for type hints as specified by :pep:`484` and subsequent
+PEPs. The module serves two related purposes:
+
+- Enable use of new type system features on older Python versions. For example,
+ :py:data:`typing.TypeGuard` is new in Python 3.10, but ``typing_extensions`` allows
+ users on previous Python versions to use it too.
+- Enable experimentation with type system features proposed in new PEPs before they are accepted and
+ added to the :py:mod:`typing` module.
+
+New features may be added to ``typing_extensions`` as soon as they are specified
+in a PEP that has been added to the `python/peps <https://github.com/python/peps>`_
+repository. If the PEP is accepted, the feature will then be added to the
+:py:mod:`typing` module for the next CPython release. No typing PEP that
+affected ``typing_extensions`` has been rejected so far, so we haven't yet
+figured out how to deal with that possibility.
+
+Bugfixes and new typing features that don't require a PEP may be added to
+``typing_extensions`` once they are merged into CPython's main branch.
+
+``typing_extensions`` also re-exports all names from the :py:mod:`typing` module,
+including those that have always been present in the module. This allows users to
+import names from ``typing_extensions`` without having to remember exactly when
+each object was added to :py:mod:`typing`. There are a few exceptions:
+:py:class:`typing.ByteString`, which is deprecated and due to be removed in Python
+3.14, is not re-exported. Similarly, the ``typing.io`` and ``typing.re`` submodules,
+which are removed in Python 3.13, are excluded.
+
+Versioning and backwards compatibility
+--------------------------------------
+
+Starting with version 4.0.0, ``typing_extensions`` uses
+`Semantic Versioning <https://semver.org>`_. A changelog is
+maintained `on GitHub <https://github.com/python/typing_extensions/blob/main/CHANGELOG.md>`_.
+
+The major version is incremented for all backwards-incompatible changes.
+Therefore, it's safe to depend
+on ``typing_extensions`` like this: ``typing_extensions >=x.y, <(x+1)``,
+where ``x.y`` is the first version that includes all features you need.
+In view of the wide usage of ``typing_extensions`` across the ecosystem,
+we are highly hesitant to break backwards compatibility, and we do not
+expect to increase the major version number in the foreseeable future.
+
+Feature releases, with version numbers of the form 4.N.0, are made at
+irregular intervals when enough new features accumulate. Before a
+feature release, at least one release candidate (with a version number
+of the form 4.N.0rc1) should be released to give downstream users time
+to test. After at least a week of testing, the new feature version
+may then be released. If necessary, additional release candidates can
+be added.
+
+Bugfix releases, with version numbers of the form 4.N.1 or higher,
+may be made if bugs are discovered after a feature release.
+
+We provide no backward compatibility guarantees for prereleases (e.g.,
+release candidates) and for unreleased code in our Git repository.
+
+Before version 4.0.0, the versioning scheme loosely followed the Python
+version from which features were backported; for example,
+``typing_extensions`` 3.10.0.0 was meant to reflect ``typing`` as of
+Python 3.10.0. During this period, no changelog was maintained.
+
+Runtime use of types
+~~~~~~~~~~~~~~~~~~~~
+
+We aim for complete backwards compatibility in terms of the names we export:
+code like ``from typing_extensions import X`` that works on one
+typing-extensions release will continue to work on the next.
+It is more difficult to maintain compatibility for users that introspect
+types at runtime, as almost any detail can potentially break compatibility.
+Users who introspect types should follow these guidelines to minimize
+the risk of compatibility issues:
+
+- Always check for both the :mod:`typing` and ``typing_extensions`` versions
+ of objects, even if they are currently the same on some Python version.
+ Future ``typing_extensions`` releases may re-export a separate version of
+ the object to backport some new feature or bugfix.
+- Use public APIs like :func:`get_origin` and :func:`get_original_bases` to
+ access internal information about types, instead of accessing private
+ attributes directly. If some information is not available through a public
+ attribute, consider opening an issue in CPython to add such an API.
+
+Here is an example recipe for a general-purpose function that could be used for
+reasonably performant runtime introspection of typing objects. The function
+will be resilient against any potential changes in ``typing_extensions`` that
+alter whether an object is reimplemented in ``typing_extensions``, rather than
+simply being re-exported from the :mod:`typing` module::
+
+ import functools
+ import typing
+ import typing_extensions
+ from typing import Tuple, Any
+
+ # Use an unbounded cache for this function, for optimal performance
+ @functools.lru_cache(maxsize=None)
+ def get_typing_objects_by_name_of(name: str) -> Tuple[Any, ...]:
+ result = tuple(
+ getattr(module, name)
+ # You could potentially also include mypy_extensions here,
+ # if your library supports mypy_extensions
+ for module in (typing, typing_extensions)
+ if hasattr(module, name)
+ )
+ if not result:
+ raise ValueError(
+ f"Neither typing nor typing_extensions has an object called {name!r}"
+ )
+ return result
+
+
+ # Use a cache here as well, but make it a bounded cache
+ # (the default cache size is 128)
+ @functools.lru_cache()
+ def is_typing_name(obj: object, name: str) -> bool:
+ return any(obj is thing for thing in get_typing_objects_by_name_of(name))
+
+Example usage::
+
+ >>> import typing, typing_extensions
+ >>> from functools import partial
+ >>> from typing_extensions import get_origin
+ >>> is_literal = partial(is_typing_name, name="Literal")
+ >>> is_literal(typing.Literal)
+ True
+ >>> is_literal(typing_extensions.Literal)
+ True
+ >>> is_literal(typing.Any)
+ False
+ >>> is_literal(get_origin(typing.Literal[42]))
+ True
+ >>> is_literal(get_origin(typing_extensions.Final[42]))
+ False
+
+Python version support
+----------------------
+
+``typing_extensions`` currently supports Python versions 3.8 and higher. In the future,
+support for older Python versions will be dropped some time after that version
+reaches end of life.
+
+Module contents
+---------------
+
+As most of the features in ``typing_extensions`` exist in :py:mod:`typing`
+in newer versions of Python, the documentation here is brief and focuses
+on aspects that are specific to ``typing_extensions``, such as limitations
+on specific Python versions.
+
+Special typing primitives
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. data:: Annotated
+
+ See :py:data:`typing.Annotated` and :pep:`593`. In ``typing`` since 3.9.
+
+ .. versionchanged:: 4.1.0
+
+ ``Annotated`` can now wrap :data:`ClassVar` and :data:`Final`.
+
+.. data:: Any
+
+ See :py:data:`typing.Any`.
+
+ Since Python 3.11, ``typing.Any`` can be used as a base class.
+ ``typing_extensions.Any`` supports this feature on older versions.
+
+ .. versionadded:: 4.4.0
+
+ Added to support inheritance from ``Any``.
+
+.. data:: Concatenate
+
+ See :py:data:`typing.Concatenate` and :pep:`612`. In ``typing`` since 3.10.
+
+ The backport does not support certain operations involving ``...`` as
+ a parameter; see :issue:`48` and :issue:`110` for details.
+
+.. data:: Final
+
+ See :py:data:`typing.Final` and :pep:`591`. In ``typing`` since 3.8.
+
+.. data:: Literal
+
+ See :py:data:`typing.Literal` and :pep:`586`. In ``typing`` since 3.8.
+
+ :py:data:`typing.Literal` does not flatten or deduplicate parameters on Python <3.9.1, and a
+ caching bug was fixed in 3.10.1/3.9.8. The ``typing_extensions`` version
+ flattens and deduplicates parameters on all Python versions, and the caching
+ bug is also fixed on all versions.
+
+ .. versionchanged:: 4.6.0
+
+ Backported the bug fixes from :pr-cpy:`29334`, :pr-cpy:`23294`, and :pr-cpy:`23383`.
+
+.. data:: LiteralString
+
+ See :py:data:`typing.LiteralString` and :pep:`675`. In ``typing`` since 3.11.
+
+ .. versionadded:: 4.1.0
+
+.. class:: NamedTuple
+
+ See :py:class:`typing.NamedTuple`.
+
+ ``typing_extensions`` backports several changes
+ to ``NamedTuple`` on Python 3.11 and lower: in 3.11,
+ support for generic ``NamedTuple``\ s was added, and
+ in 3.12, the ``__orig_bases__`` attribute was added.
+
+ .. versionadded:: 4.3.0
+
+ Added to provide support for generic ``NamedTuple``\ s.
+
+ .. versionchanged:: 4.6.0
+
+ Support for the ``__orig_bases__`` attribute was added.
+
+ .. versionchanged:: 4.7.0
+
+ The undocumented keyword argument syntax for creating NamedTuple classes
+ (``NT = NamedTuple("NT", x=int)``) is deprecated, and will be disallowed
+ in Python 3.15. Use the class-based syntax or the functional syntax instead.
+
+ .. versionchanged:: 4.7.0
+
+ When using the functional syntax to create a NamedTuple class, failing to
+ pass a value to the 'fields' parameter (``NT = NamedTuple("NT")``) is
+ deprecated. Passing ``None`` to the 'fields' parameter
+ (``NT = NamedTuple("NT", None)``) is also deprecated. Both will be
+ disallowed in Python 3.15. To create a NamedTuple class with zero fields,
+ use ``class NT(NamedTuple): pass`` or ``NT = NamedTuple("NT", [])``.
+
+
+.. data:: Never
+
+ See :py:data:`typing.Never`. In ``typing`` since 3.11.
+
+ .. versionadded:: 4.1.0
+
+.. class:: NewType(name, tp)
+
+ See :py:class:`typing.NewType`. In ``typing`` since 3.5.2.
+
+ Instances of ``NewType`` were made picklable in 3.10 and an error message was
+ improved in 3.11; ``typing_extensions`` backports these changes.
+
+ .. versionchanged:: 4.6.0
+
+ The improvements from Python 3.10 and 3.11 were backported.
+
+.. data:: NoDefault
+
+ See :py:class:`typing.NoDefault`. In ``typing`` since 3.13.0.
+
+ .. versionadded:: 4.12.0
+
+.. data:: NotRequired
+
+ See :py:data:`typing.NotRequired` and :pep:`655`. In ``typing`` since 3.11.
+
+ .. versionadded:: 4.0.0
+
+.. class:: ParamSpec(name, *, default=NoDefault)
+
+ See :py:class:`typing.ParamSpec` and :pep:`612`. In ``typing`` since 3.10.
+
+ The ``typing_extensions`` version adds support for the
+ ``default=`` argument from :pep:`696`.
+
+ On older Python versions, ``typing_extensions.ParamSpec`` may not work
+ correctly with introspection tools like :func:`get_args` and
+ :func:`get_origin`. Certain special cases in user-defined
+ :py:class:`typing.Generic`\ s are also not available (e.g., see :issue:`126`).
+
+ .. versionchanged:: 4.4.0
+
+ Added support for the ``default=`` argument.
+
+ .. versionchanged:: 4.6.0
+
+ The implementation was changed for compatibility with Python 3.12.
+
+ .. versionchanged:: 4.8.0
+
+ Passing an ellipsis literal (``...``) to *default* now works on Python
+ 3.10 and lower.
+
+ .. versionchanged:: 4.12.0
+
+ The :attr:`!__default__` attribute is now set to ``None`` if
+ ``default=None`` is passed, and to :data:`NoDefault` if no value is passed.
+
+ Previously, passing ``None`` would result in :attr:`!__default__` being set
+ to :py:class:`types.NoneType`, and passing no value for the parameter would
+ result in :attr:`!__default__` being set to ``None``.
+
+ .. versionchanged:: 4.12.0
+
+ ParamSpecs now have a ``has_default()`` method, for compatibility
+ with :py:class:`typing.ParamSpec` on Python 3.13+.
+
+.. class:: ParamSpecArgs
+
+.. class:: ParamSpecKwargs
+
+ See :py:class:`typing.ParamSpecArgs` and :py:class:`typing.ParamSpecKwargs`.
+ In ``typing`` since 3.10.
+
+.. class:: Protocol
+
+ See :py:class:`typing.Protocol` and :pep:`544`. In ``typing`` since 3.8.
+
+ Python 3.12 improves the performance of runtime-checkable protocols;
+ ``typing_extensions`` backports this improvement.
+
+ .. versionchanged:: 4.6.0
+
+ Backported the ability to define ``__init__`` methods on Protocol classes.
+
+ .. versionchanged:: 4.6.0
+
+ Backported changes to runtime-checkable protocols from Python 3.12,
+ including :pr-cpy:`103034` and :pr-cpy:`26067`.
+
+ .. versionchanged:: 4.7.0
+
+ Classes can now inherit from both :py:class:`typing.Protocol` and
+ ``typing_extensions.Protocol`` simultaneously. Previously, this led to
+ :py:exc:`TypeError` being raised due to a metaclass conflict.
+
+ It is recommended to avoid doing this if possible. Not all features and
+ bugfixes that ``typing_extensions.Protocol`` backports from newer Python
+ versions are guaranteed to work if :py:class:`typing.Protocol` is also
+ present in a protocol class's :py:term:`method resolution order`. See
+ :issue:`245` for some examples.
+
+.. data:: ReadOnly
+
+ See :pep:`705`. Indicates that a :class:`TypedDict` item may not be modified.
+
+ .. versionadded:: 4.9.0
+
+.. data:: Required
+
+ See :py:data:`typing.Required` and :pep:`655`. In ``typing`` since 3.11.
+
+ .. versionadded:: 4.0.0
+
+.. data:: Self
+
+ See :py:data:`typing.Self` and :pep:`673`. In ``typing`` since 3.11.
+
+ .. versionadded:: 4.0.0
+
+.. data:: TypeAlias
+
+ See :py:data:`typing.TypeAlias` and :pep:`613`. In ``typing`` since 3.10.
+
+.. class:: TypeAliasType(name, value, *, type_params=())
+
+ See :py:class:`typing.TypeAliasType` and :pep:`695`. In ``typing`` since 3.12.
+
+ .. versionadded:: 4.6.0
+
+.. data:: TypeGuard
+
+ See :py:data:`typing.TypeGuard` and :pep:`647`. In ``typing`` since 3.10.
+
+.. data:: TypeIs
+
+ See :pep:`742`. Similar to :data:`TypeGuard`, but allows more type narrowing.
+
+ .. versionadded:: 4.10.0
+
+.. class:: TypedDict(dict, total=True)
+
+ See :py:class:`typing.TypedDict` and :pep:`589`. In ``typing`` since 3.8.
+
+ ``typing_extensions`` backports various bug fixes and improvements
+ to ``TypedDict`` on Python 3.11 and lower.
+ :py:class:`TypedDict` does not store runtime information
+ about which (if any) keys are non-required in Python 3.8, and does not
+ honor the ``total`` keyword with old-style ``TypedDict()`` in Python
+ 3.9.0 and 3.9.1. :py:class:`typing.TypedDict` also does not support multiple inheritance
+ with :py:class:`typing.Generic` on Python <3.11, and :py:class:`typing.TypedDict` classes do not
+ consistently have the ``__orig_bases__`` attribute on Python <3.12. The
+ ``typing_extensions`` backport provides all of these features and bugfixes on
+ all Python versions.
+
+ Historically, ``TypedDict`` has supported an alternative creation syntax
+ where the fields are supplied as keyword arguments (e.g.,
+ ``TypedDict("TD", a=int, b=str)``). In CPython, this feature was deprecated
+ in Python 3.11 and removed in Python 3.13. ``typing_extensions.TypedDict``
+ raises a :py:exc:`DeprecationWarning` when this syntax is used in Python 3.12
+ or lower and fails with a :py:exc:`TypeError` in Python 3.13 and higher.
+
+ ``typing_extensions`` supports the experimental :data:`ReadOnly` qualifier
+ proposed by :pep:`705`. It is reflected in the following attributes:
+
+ .. attribute:: __readonly_keys__
+
+ A :py:class:`frozenset` containing the names of all read-only keys. Keys
+ are read-only if they carry the :data:`ReadOnly` qualifier.
+
+ .. versionadded:: 4.9.0
+
+ .. attribute:: __mutable_keys__
+
+ A :py:class:`frozenset` containing the names of all mutable keys. Keys
+ are mutable if they do not carry the :data:`ReadOnly` qualifier.
+
+ .. versionadded:: 4.9.0
+
+ The experimental ``closed`` keyword argument and the special key
+ ``__extra_items__`` proposed in :pep:`728` are supported.
+
+ When ``closed`` is unspecified or ``closed=False`` is given,
+ ``__extra_items__`` behaves like a regular key. Otherwise, this becomes a
+ special key that does not show up in ``__readonly_keys__``,
+ ``__mutable_keys__``, ``__required_keys__``, ``__optional_keys``, or
+ ``__annotations__``.
+
+ For runtime introspection, two attributes can be looked at:
+
+ .. attribute:: __closed__
+
+ A boolean flag indicating whether the current ``TypedDict`` is
+ considered closed. This is not inherited by the ``TypedDict``'s
+ subclasses.
+
+ .. versionadded:: 4.10.0
+
+ .. attribute:: __extra_items__
+
+ The type annotation of the extra items allowed on the ``TypedDict``.
+ This attribute defaults to ``None`` on a TypedDict that has itself and
+ all its bases non-closed. This default is different from ``type(None)``
+ that represents ``__extra_items__: None`` defined on a closed
+ ``TypedDict``.
+
+ If ``__extra_items__`` is not defined or inherited on a closed
+ ``TypedDict``, this defaults to ``Never``.
+
+ .. versionadded:: 4.10.0
+
+ .. versionchanged:: 4.3.0
+
+ Added support for generic ``TypedDict``\ s.
+
+ .. versionchanged:: 4.6.0
+
+ A :py:exc:`DeprecationWarning` is now emitted when a call-based
+ ``TypedDict`` is constructed using keyword arguments.
+
+ .. versionchanged:: 4.6.0
+
+ Support for the ``__orig_bases__`` attribute was added.
+
+ .. versionchanged:: 4.7.0
+
+ ``TypedDict`` is now a function rather than a class.
+ This brings ``typing_extensions.TypedDict`` closer to the implementation
+ of :py:mod:`typing.TypedDict` on Python 3.9 and higher.
+
+ .. versionchanged:: 4.7.0
+
+ When using the functional syntax to create a TypedDict class, failing to
+ pass a value to the 'fields' parameter (``TD = TypedDict("TD")``) is
+ deprecated. Passing ``None`` to the 'fields' parameter
+ (``TD = TypedDict("TD", None)``) is also deprecated. Both will be
+ disallowed in Python 3.15. To create a TypedDict class with 0 fields,
+ use ``class TD(TypedDict): pass`` or ``TD = TypedDict("TD", {})``.
+
+ .. versionchanged:: 4.9.0
+
+ Support for the :data:`ReadOnly` qualifier was added.
+
+ .. versionchanged:: 4.10.0
+
+ The keyword argument ``closed`` and the special key ``__extra_items__``
+ when ``closed=True`` is given were supported.
+
+.. class:: TypeVar(name, *constraints, bound=None, covariant=False,
+ contravariant=False, infer_variance=False, default=NoDefault)
+
+ See :py:class:`typing.TypeVar`.
+
+ The ``typing_extensions`` version adds support for the
+ ``default=`` argument from :pep:`696`, as well as the
+ ``infer_variance=`` argument from :pep:`695` (also available
+ in Python 3.12).
+
+ .. versionadded:: 4.4.0
+
+ Added in order to support the new ``default=`` and
+ ``infer_variance=`` arguments.
+
+ .. versionchanged:: 4.6.0
+
+ The implementation was changed for compatibility with Python 3.12.
+
+ .. versionchanged:: 4.12.0
+
+ The :attr:`!__default__` attribute is now set to ``None`` if
+ ``default=None`` is passed, and to :data:`NoDefault` if no value is passed.
+
+ Previously, passing ``None`` would result in :attr:`!__default__` being set
+ to :py:class:`types.NoneType`, and passing no value for the parameter would
+ result in :attr:`!__default__` being set to ``None``.
+
+ .. versionchanged:: 4.12.0
+
+ TypeVars now have a ``has_default()`` method, for compatibility
+ with :py:class:`typing.TypeVar` on Python 3.13+.
+
+.. class:: TypeVarTuple(name, *, default=NoDefault)
+
+ See :py:class:`typing.TypeVarTuple` and :pep:`646`. In ``typing`` since 3.11.
+
+ The ``typing_extensions`` version adds support for the
+ ``default=`` argument from :pep:`696`.
+
+ .. versionadded:: 4.1.0
+
+ .. versionchanged:: 4.4.0
+
+ Added support for the ``default=`` argument.
+
+ .. versionchanged:: 4.6.0
+
+ The implementation was changed for compatibility with Python 3.12.
+
+ .. versionchanged:: 4.12.0
+
+ The :attr:`!__default__` attribute is now set to ``None`` if
+ ``default=None`` is passed, and to :data:`NoDefault` if no value is passed.
+
+ Previously, passing ``None`` would result in :attr:`!__default__` being set
+ to :py:class:`types.NoneType`, and passing no value for the parameter would
+ result in :attr:`!__default__` being set to ``None``.
+
+ .. versionchanged:: 4.12.0
+
+ TypeVarTuples now have a ``has_default()`` method, for compatibility
+ with :py:class:`typing.TypeVarTuple` on Python 3.13+.
+
+ .. versionchanged:: 4.12.0
+
+ It is now disallowed to use a `TypeVar` with a default value after a
+ `TypeVarTuple` in a type parameter list. This matches the CPython
+ implementation of PEP 696 on Python 3.13+.
+
+.. data:: Unpack
+
+ See :py:data:`typing.Unpack` and :pep:`646`. In ``typing`` since 3.11.
+
+ In Python 3.12, the ``repr()`` was changed as a result of :pep:`692`.
+ ``typing_extensions`` backports this change.
+
+ Generic type aliases involving ``Unpack`` may not work correctly on
+ Python 3.10 and lower; see :issue:`103` for details.
+
+ .. versionadded:: 4.1.0
+
+ .. versionchanged:: 4.6.0
+
+ Backport ``repr()`` changes from Python 3.12.
+
+Abstract Base Classes
+~~~~~~~~~~~~~~~~~~~~~
+
+.. class:: Buffer
+
+ See :py:class:`collections.abc.Buffer`. Added to the standard library
+ in Python 3.12.
+
+ .. versionadded:: 4.6.0
+
+Protocols
+~~~~~~~~~
+
+.. class:: SupportsAbs
+
+ See :py:class:`typing.SupportsAbs`.
+
+ ``typing_extensions`` backports a more performant version of this
+ protocol on Python 3.11 and lower.
+
+ .. versionadded:: 4.6.0
+
+.. class:: SupportsBytes
+
+ See :py:class:`typing.SupportsBytes`.
+
+ ``typing_extensions`` backports a more performant version of this
+ protocol on Python 3.11 and lower.
+
+ .. versionadded:: 4.6.0
+
+.. class:: SupportsComplex
+
+ See :py:class:`typing.SupportsComplex`.
+
+ ``typing_extensions`` backports a more performant version of this
+ protocol on Python 3.11 and lower.
+
+ .. versionadded:: 4.6.0
+
+.. class:: SupportsFloat
+
+ See :py:class:`typing.SupportsFloat`.
+
+ ``typing_extensions`` backports a more performant version of this
+ protocol on Python 3.11 and lower.
+
+ .. versionadded:: 4.6.0
+
+.. class:: SupportsIndex
+
+ See :py:class:`typing.SupportsIndex`. In ``typing`` since 3.8.
+
+ ``typing_extensions`` backports a more performant version of this
+ protocol on Python 3.11 and lower.
+
+ .. versionchanged:: 4.6.0
+
+ Backported the performance improvements from Python 3.12.
+
+.. class:: SupportsInt
+
+ See :py:class:`typing.SupportsInt`.
+
+ ``typing_extensions`` backports a more performant version of this
+ protocol on Python 3.11 and lower.
+
+ .. versionadded:: 4.6.0
+
+.. class:: SupportsRound
+
+ See :py:class:`typing.SupportsRound`.
+
+ ``typing_extensions`` backports a more performant version of this
+ protocol on Python 3.11 and lower.
+
+ .. versionadded:: 4.6.0
+
+Decorators
+~~~~~~~~~~
+
+.. decorator:: dataclass_transform(*, eq_default=False, order_default=False,
+ kw_only_default=False, frozen_default=False,
+ field_specifiers=(), **kwargs)
+
+ See :py:func:`typing.dataclass_transform` and :pep:`681`. In ``typing`` since 3.11.
+
+ Python 3.12 adds the ``frozen_default`` parameter; ``typing_extensions``
+ backports this parameter.
+
+ .. versionadded:: 4.1.0
+
+ .. versionchanged:: 4.2.0
+
+ The ``field_descriptors`` parameter was renamed to ``field_specifiers``.
+ For compatibility, the decorator now accepts arbitrary keyword arguments.
+
+ .. versionchanged:: 4.5.0
+
+ The ``frozen_default`` parameter was added.
+
+.. decorator:: deprecated(msg, *, category=DeprecationWarning, stacklevel=1)
+
+ See :pep:`702`. In the :mod:`warnings` module since Python 3.13.
+
+ .. versionadded:: 4.5.0
+
+ .. versionchanged:: 4.9.0
+
+ Inheriting from a deprecated class now also raises a runtime
+ :py:exc:`DeprecationWarning`.
+
+.. decorator:: final
+
+ See :py:func:`typing.final` and :pep:`591`. In ``typing`` since 3.8.
+
+ Since Python 3.11, this decorator supports runtime introspection
+ by setting the ``__final__`` attribute wherever possible; ``typing_extensions.final``
+ backports this feature.
+
+ .. versionchanged:: 4.1.0
+
+ The decorator now attempts to set the ``__final__`` attribute on decorated objects.
+
+.. decorator:: overload
+
+ See :py:func:`typing.overload`.
+
+ Since Python 3.11, this decorator supports runtime introspection
+ through :func:`get_overloads`; ``typing_extensions.overload``
+ backports this feature.
+
+ .. versionchanged:: 4.2.0
+
+ Introspection support via :func:`get_overloads` was added.
+
+.. decorator:: override
+
+ See :py:func:`typing.override` and :pep:`698`. In ``typing`` since 3.12.
+
+ .. versionadded:: 4.4.0
+
+ .. versionchanged:: 4.5.0
+
+ The decorator now attempts to set the ``__override__`` attribute on the decorated
+ object.
+
+.. decorator:: runtime_checkable
+
+ See :py:func:`typing.runtime_checkable`. In ``typing`` since 3.8.
+
+ In Python 3.12, the performance of runtime-checkable protocols was
+ improved, and ``typing_extensions`` backports these performance
+ improvements.
+
+Functions
+~~~~~~~~~
+
+.. function:: assert_never(arg)
+
+ See :py:func:`typing.assert_never`. In ``typing`` since 3.11.
+
+ .. versionadded:: 4.1.0
+
+.. function:: assert_type(val, typ)
+
+ See :py:func:`typing.assert_type`. In ``typing`` since 3.11.
+
+ .. versionadded:: 4.2.0
+
+.. function:: clear_overloads()
+
+ See :py:func:`typing.clear_overloads`. In ``typing`` since 3.11.
+
+ .. versionadded:: 4.2.0
+
+.. function:: get_args(tp)
+
+ See :py:func:`typing.get_args`. In ``typing`` since 3.8.
+
+ This function was changed in 3.9 and 3.10 to deal with :data:`Annotated`
+ and :class:`ParamSpec` correctly; ``typing_extensions`` backports these
+ fixes.
+
+.. function:: get_origin(tp)
+
+ See :py:func:`typing.get_origin`. In ``typing`` since 3.8.
+
+ This function was changed in 3.9 and 3.10 to deal with :data:`Annotated`
+ and :class:`ParamSpec` correctly; ``typing_extensions`` backports these
+ fixes.
+
+.. function:: get_original_bases(cls)
+
+ See :py:func:`types.get_original_bases`. Added to the standard library
+ in Python 3.12.
+
+ This function should always produce correct results when called on classes
+ constructed using features from ``typing_extensions``. However, it may
+ produce incorrect results when called on some :py:class:`NamedTuple` or
+ :py:class:`TypedDict` classes on Python <=3.11.
+
+ .. versionadded:: 4.6.0
+
+.. function:: get_overloads(func)
+
+ See :py:func:`typing.get_overloads`. In ``typing`` since 3.11.
+
+ Before Python 3.11, this works only with overloads created through
+ :func:`overload`, not with :py:func:`typing.overload`.
+
+ .. versionadded:: 4.2.0
+
+.. function:: get_protocol_members(tp)
+
+ Return the set of members defined in a :class:`Protocol`. This works with protocols
+ defined using either :class:`typing.Protocol` or :class:`typing_extensions.Protocol`.
+
+ ::
+
+ >>> from typing_extensions import Protocol, get_protocol_members
+ >>> class P(Protocol):
+ ... def a(self) -> str: ...
+ ... b: int
+ >>> get_protocol_members(P)
+ frozenset({'a', 'b'})
+
+ Raise :py:exc:`TypeError` for arguments that are not Protocols.
+
+ .. versionadded:: 4.7.0
+
+.. function:: get_type_hints(obj, globalns=None, localns=None, include_extras=False)
+
+ See :py:func:`typing.get_type_hints`.
+
+ In Python 3.11, this function was changed to support the new
+ :py:data:`typing.Required` and :py:data:`typing.NotRequired`.
+ ``typing_extensions`` backports these fixes.
+
+ .. versionchanged:: 4.1.0
+
+ Interaction with :data:`Required` and :data:`NotRequired`.
+
+ .. versionchanged:: 4.11.0
+
+ When ``include_extra=False``, ``get_type_hints()`` now strips
+ :data:`ReadOnly` from the annotation.
+
+.. function:: is_protocol(tp)
+
+ Determine if a type is a :class:`Protocol`. This works with protocols
+ defined using either :py:class:`typing.Protocol` or :class:`typing_extensions.Protocol`.
+
+ For example::
+
+ class P(Protocol):
+ def a(self) -> str: ...
+ b: int
+
+ is_protocol(P) # => True
+ is_protocol(int) # => False
+
+ .. versionadded:: 4.7.0
+
+.. function:: is_typeddict(tp)
+
+ See :py:func:`typing.is_typeddict`. In ``typing`` since 3.10.
+
+ On versions where :class:`TypedDict` is not the same as
+ :py:class:`typing.TypedDict`, this function recognizes
+ ``TypedDict`` classes created through either mechanism.
+
+ .. versionadded:: 4.1.0
+
+ .. versionchanged:: 4.7.0
+
+ :func:`is_typeddict` now returns ``False`` when called with
+ :data:`TypedDict` itself as the argument, consistent with the
+ behavior of :py:func:`typing.is_typeddict`.
+
+.. function:: reveal_type(obj)
+
+ See :py:func:`typing.reveal_type`. In ``typing`` since 3.11.
+
+ .. versionadded:: 4.1.0
+
+
+Annotation metadata
+~~~~~~~~~~~~~~~~~~~
+
+.. class:: Doc(documentation, /)
+
+ Define the documentation of a type annotation using :data:`Annotated`, to be
+ used in class attributes, function and method parameters, return values,
+ and variables.
+
+ The value should be a positional-only string literal to allow static tools
+ like editors and documentation generators to use it.
+
+ This complements docstrings.
+
+ The string value passed is available in the attribute ``documentation``.
+
+ Example::
+
+ >>> from typing_extensions import Annotated, Doc
+ >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
+
+ .. versionadded:: 4.8.0
+
+ See :pep:`727`.
+
+ .. attribute:: documentation
+
+ The documentation string passed to :class:`Doc`.
+
+
+Capsule objects
+~~~~~~~~~~~~~~~
+
+.. class:: CapsuleType
+
+ The type of :py:ref:`capsule objects <capsules>`.
+ See :py:class:`types.CapsuleType`, where it has existed since Python 3.13.
+
+ Note that this may not exist on all implementations of Python; it is only
+ guaranteed to exist on CPython.
+
+ .. versionadded:: 4.12.0
+
+
+Pure aliases
+~~~~~~~~~~~~
+
+Most of these are simply re-exported from the :mod:`typing` module on all supported
+versions of Python, but all are listed here for completeness.
+
+.. class:: AbstractSet
+
+ See :py:class:`typing.AbstractSet`.
+
+ .. versionadded:: 4.7.0
+
+.. data:: AnyStr
+
+ See :py:data:`typing.AnyStr`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: AsyncContextManager
+
+ See :py:class:`typing.AsyncContextManager`. In ``typing`` since 3.5.4 and 3.6.2.
+
+ .. versionchanged:: 4.12.0
+
+ ``AsyncContextManager`` now has an optional second parameter, defaulting to
+ ``Optional[bool]``, signifying the return type of the ``__aexit__`` method.
+
+.. class:: AsyncGenerator
+
+ See :py:class:`typing.AsyncGenerator`. In ``typing`` since 3.6.1.
+
+ .. versionchanged:: 4.12.0
+
+ The second type parameter is now optional (it defaults to ``None``).
+
+.. class:: AsyncIterable
+
+ See :py:class:`typing.AsyncIterable`. In ``typing`` since 3.5.2.
+
+.. class:: AsyncIterator
+
+ See :py:class:`typing.AsyncIterator`. In ``typing`` since 3.5.2.
+
+.. class:: Awaitable
+
+ See :py:class:`typing.Awaitable`. In ``typing`` since 3.5.2.
+
+.. class:: BinaryIO
+
+ See :py:class:`typing.BinaryIO`.
+
+ .. versionadded:: 4.7.0
+
+.. data:: Callable
+
+ See :py:data:`typing.Callable`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: ChainMap
+
+ See :py:class:`typing.ChainMap`. In ``typing`` since 3.5.4 and 3.6.1.
+
+.. data:: ClassVar
+
+ See :py:data:`typing.ClassVar` and :pep:`526`. In ``typing`` since 3.5.3.
+
+.. class:: Collection
+
+ See :py:class:`typing.Collection`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Container
+
+ See :py:class:`typing.Container`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: ContextManager
+
+ See :py:class:`typing.ContextManager`. In ``typing`` since 3.5.4.
+
+ .. versionchanged:: 4.12.0
+
+ ``ContextManager`` now has an optional second parameter, defaulting to
+ ``Optional[bool]``, signifying the return type of the ``__exit__`` method.
+
+.. class:: Coroutine
+
+ See :py:class:`typing.Coroutine`. In ``typing`` since 3.5.3.
+
+.. class:: Counter
+
+ See :py:class:`typing.Counter`. In ``typing`` since 3.5.4 and 3.6.1.
+
+.. class:: DefaultDict
+
+ See :py:class:`typing.DefaultDict`. In ``typing`` since 3.5.2.
+
+.. class:: Deque
+
+ See :py:class:`typing.Deque`. In ``typing`` since 3.5.4 and 3.6.1.
+
+.. class:: Dict
+
+ See :py:class:`typing.Dict`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: ForwardRef
+
+ See :py:class:`typing.ForwardRef`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: FrozenSet
+
+ See :py:class:`typing.FrozenSet`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Generator
+
+ See :py:class:`typing.Generator`.
+
+ .. versionadded:: 4.7.0
+
+ .. versionchanged:: 4.12.0
+
+ The second type and third type parameters are now optional
+ (they both default to ``None``).
+
+.. class:: Generic
+
+ See :py:class:`typing.Generic`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Hashable
+
+ See :py:class:`typing.Hashable`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: IO
+
+ See :py:class:`typing.IO`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: ItemsView
+
+ See :py:class:`typing.ItemsView`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Iterable
+
+ See :py:class:`typing.Iterable`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Iterator
+
+ See :py:class:`typing.Iterator`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: KeysView
+
+ See :py:class:`typing.KeysView`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: List
+
+ See :py:class:`typing.List`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Mapping
+
+ See :py:class:`typing.Mapping`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: MappingView
+
+ See :py:class:`typing.MappingView`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Match
+
+ See :py:class:`typing.Match`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: MutableMapping
+
+ See :py:class:`typing.MutableMapping`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: MutableSequence
+
+ See :py:class:`typing.MutableSequence`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: MutableSet
+
+ See :py:class:`typing.MutableSet`.
+
+ .. versionadded:: 4.7.0
+
+.. data:: NoReturn
+
+ See :py:data:`typing.NoReturn`. In ``typing`` since 3.5.4 and 3.6.2.
+
+.. data:: Optional
+
+ See :py:data:`typing.Optional`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: OrderedDict
+
+ See :py:class:`typing.OrderedDict`. In ``typing`` since 3.7.2.
+
+.. class:: Pattern
+
+ See :py:class:`typing.Pattern`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Reversible
+
+ See :py:class:`typing.Reversible`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Sequence
+
+ See :py:class:`typing.Sequence`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Set
+
+ See :py:class:`typing.Set`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Sized
+
+ See :py:class:`typing.Sized`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Text
+
+ See :py:class:`typing.Text`. In ``typing`` since 3.5.2.
+
+.. class:: TextIO
+
+ See :py:class:`typing.TextIO`.
+
+ .. versionadded:: 4.7.0
+
+.. data:: Tuple
+
+ See :py:data:`typing.Tuple`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: Type
+
+ See :py:class:`typing.Type`. In ``typing`` since 3.5.2.
+
+.. data:: TYPE_CHECKING
+
+ See :py:data:`typing.TYPE_CHECKING`. In ``typing`` since 3.5.2.
+
+.. data:: Union
+
+ See :py:data:`typing.Union`.
+
+ .. versionadded:: 4.7.0
+
+.. class:: ValuesView
+
+ See :py:class:`typing.ValuesView`.
+
+ .. versionadded:: 4.7.0
+
+.. function:: cast
+
+ See :py:func:`typing.cast`.
+
+ .. versionadded:: 4.7.0
+
+.. decorator:: no_type_check
+
+ See :py:func:`typing.no_type_check`.
+
+ .. versionadded:: 4.7.0
+
+.. decorator:: no_type_check_decorator
+
+ See :py:func:`typing.no_type_check_decorator`.
+
+ .. versionadded:: 4.7.0
+
+Security
+--------
+
+``typing_extensions`` is among the most widely used packages in the
+Python ecosystem. Therefore, we take security seriously and strive
+to use a transparent, secure release process.
+
+We commit to the following in order to keep the package secure in the
+future:
+
+* ``typing_extensions`` will never include any native extensions, only
+ pure Python code.
+* ``typing_extensions`` will not have any third-party dependencies.
+* We will follow best practices for a secure release process.
+
+If you have any feedback on our security process, please `open an issue
+<https://github.com/python/typing_extensions/issues/new>`__. To report
+an issue privately, use `GitHub's private reporting feature
+<https://github.com/python/typing_extensions/security>`__.
diff --git a/doc/make.bat b/doc/make.bat
new file mode 100644
index 0000000..32bb245
--- /dev/null
+++ b/doc/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=.
+set BUILDDIR=_build
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.https://www.sphinx-doc.org/
+ exit /b 1
+)
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..3388d55
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,101 @@
+# Build system requirements.
+[build-system]
+requires = ["flit_core >=3.4,<4"]
+build-backend = "flit_core.buildapi"
+
+# Project metadata
+[project]
+name = "typing_extensions"
+version = "4.12.2"
+description = "Backported and Experimental Type Hints for Python 3.8+"
+readme = "README.md"
+requires-python = ">=3.8"
+license = { file = "LICENSE" }
+keywords = [
+ "annotations",
+ "backport",
+ "checker",
+ "checking",
+ "function",
+ "hinting",
+ "hints",
+ "type",
+ "typechecking",
+ "typehinting",
+ "typehints",
+ "typing",
+]
+# Classifiers list: https://pypi.org/classifiers/
+classifiers = [
+ "Development Status :: 5 - Production/Stable",
+ "Environment :: Console",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Python Software Foundation License",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3 :: Only",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Topic :: Software Development",
+]
+
+[project.urls]
+Home = "https://github.com/python/typing_extensions"
+Repository = "https://github.com/python/typing_extensions"
+Changes = "https://github.com/python/typing_extensions/blob/main/CHANGELOG.md"
+Documentation = "https://typing-extensions.readthedocs.io/"
+"Bug Tracker" = "https://github.com/python/typing_extensions/issues"
+"Q & A" = "https://github.com/python/typing/discussions"
+
+# Project metadata -- authors. Flit stores this as a list of dicts, so it can't
+# be inline above.
+[[project.authors]]
+name = "Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Michael Lee"
+email = "[email protected]"
+
+[tool.flit.sdist]
+include = ["CHANGELOG.md", "README.md", "tox.ini", "*/*test*.py"]
+exclude = []
+
+[tool.ruff]
+line-length = 90
+target-version = "py38"
+
+[tool.ruff.lint]
+select = [
+ "B",
+ "C4",
+ "E",
+ "F",
+ "I",
+ "ISC001",
+ "PGH004",
+ "RUF",
+ "SIM201",
+ "SIM202",
+ "UP",
+ "W",
+]
+
+# Ignore various "modernization" rules that tell you off for importing/using
+# deprecated things from the typing module, etc.
+ignore = ["UP006", "UP007", "UP013", "UP014", "UP019", "UP035", "UP038"]
+
+[tool.ruff.lint.per-file-ignores]
+"!src/typing_extensions.py" = [
+ "B018",
+ "B024",
+ "C4",
+ "E302",
+ "E306",
+ "E501",
+ "E701",
+]
+
+[tool.ruff.lint.isort]
+extra-standard-library = ["tomllib"]
+known-first-party = ["typing_extensions", "_typed_dict_test_helper"]
diff --git a/scripts/check_package.py b/scripts/check_package.py
new file mode 100644
index 0000000..f52df41
--- /dev/null
+++ b/scripts/check_package.py
@@ -0,0 +1,60 @@
+import argparse
+import re
+import sys
+import tomllib
+from pathlib import Path
+
+
+class ValidationError(Exception):
+ pass
+
+
+def check(github_ref: str | None) -> None:
+ pyproject = Path(__file__).parent.parent / "pyproject.toml"
+ if not pyproject.exists():
+ raise ValidationError("pyproject.toml not found")
+ with pyproject.open("rb") as f:
+ data = tomllib.load(f)
+ pyproject_version = data["project"]["version"]
+
+ if github_ref is not None and github_ref.startswith("refs/tags/"):
+ version = github_ref.removeprefix("refs/tags/")
+ if version != pyproject_version:
+ raise ValidationError(
+ f"Version mismatch: GitHub ref is {version}, "
+ f"but pyproject.toml is {pyproject_version}"
+ )
+
+ requires_python = data["project"]["requires-python"]
+ assert sys.version_info[0] == 3, "Rewrite this script when Python 4 comes out"
+ match = re.fullmatch(r">=3\.(\d+)", requires_python)
+ if not match:
+ raise ValidationError(f"Invalid requires-python: {requires_python!r}")
+ lowest_minor = int(match.group(1))
+
+ description = data["project"]["description"]
+ if not description.endswith(f"3.{lowest_minor}+"):
+ raise ValidationError(f"Description should mention Python 3.{lowest_minor}+")
+
+ classifiers = set(data["project"]["classifiers"])
+ for should_be_supported in range(lowest_minor, sys.version_info[1] + 1):
+ if (
+ f"Programming Language :: Python :: 3.{should_be_supported}"
+ not in classifiers
+ ):
+ raise ValidationError(
+ f"Missing classifier for Python 3.{should_be_supported}"
+ )
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser("Script to check the package metadata")
+ parser.add_argument(
+ "github_ref", type=str, help="The current GitHub ref", nargs="?"
+ )
+ args = parser.parse_args()
+ try:
+ check(args.github_ref)
+ except ValidationError as e:
+ print(e)
+ sys.exit(1)
diff --git a/src/_typed_dict_test_helper.py b/src/_typed_dict_test_helper.py
new file mode 100644
index 0000000..73cf919
--- /dev/null
+++ b/src/_typed_dict_test_helper.py
@@ -0,0 +1,23 @@
+from __future__ import annotations
+
+from typing import Generic, Optional, T
+
+from typing_extensions import Annotated, Required, TypedDict
+
+
+# this class must not be imported into test_typing_extensions.py at top level, otherwise
+# the test_get_type_hints_cross_module_subclass test will pass for the wrong reason
+class _DoNotImport:
+ pass
+
+
+class Foo(TypedDict):
+ a: _DoNotImport
+
+
+class FooGeneric(TypedDict, Generic[T]):
+ a: Optional[T]
+
+
+class VeryAnnotated(TypedDict, total=False):
+ a: Annotated[Annotated[Annotated[Required[int], "a"], "b"], "c"]
diff --git a/src/test_typing_extensions.py b/src/test_typing_extensions.py
new file mode 100644
index 0000000..2f98765
--- /dev/null
+++ b/src/test_typing_extensions.py
@@ -0,0 +1,7037 @@
+import abc
+import collections
+import collections.abc
+import contextlib
+import copy
+import gc
+import importlib
+import inspect
+import io
+import pickle
+import re
+import subprocess
+import sys
+import tempfile
+import textwrap
+import types
+import typing
+import warnings
+from collections import defaultdict
+from functools import lru_cache
+from pathlib import Path
+from unittest import TestCase, main, skipIf, skipUnless
+from unittest.mock import patch
+
+import typing_extensions
+from _typed_dict_test_helper import Foo, FooGeneric, VeryAnnotated
+from typing_extensions import (
+ Annotated,
+ Any,
+ AnyStr,
+ AsyncContextManager,
+ AsyncIterator,
+ Awaitable,
+ Buffer,
+ Callable,
+ ClassVar,
+ Concatenate,
+ Dict,
+ Doc,
+ Final,
+ Generic,
+ IntVar,
+ Iterable,
+ Iterator,
+ List,
+ Literal,
+ LiteralString,
+ NamedTuple,
+ Never,
+ NewType,
+ NoDefault,
+ NoReturn,
+ NotRequired,
+ Optional,
+ ParamSpec,
+ ParamSpecArgs,
+ ParamSpecKwargs,
+ Protocol,
+ ReadOnly,
+ Required,
+ Self,
+ Set,
+ Tuple,
+ Type,
+ TypeAlias,
+ TypeAliasType,
+ TypedDict,
+ TypeGuard,
+ TypeIs,
+ TypeVar,
+ TypeVarTuple,
+ Union,
+ Unpack,
+ assert_never,
+ assert_type,
+ clear_overloads,
+ dataclass_transform,
+ deprecated,
+ final,
+ get_args,
+ get_origin,
+ get_original_bases,
+ get_overloads,
+ get_protocol_members,
+ get_type_hints,
+ is_protocol,
+ is_typeddict,
+ no_type_check,
+ overload,
+ override,
+ reveal_type,
+ runtime,
+ runtime_checkable,
+)
+
+NoneType = type(None)
+T = TypeVar("T")
+KT = TypeVar("KT")
+VT = TypeVar("VT")
+
+# Flags used to mark tests that only apply after a specific
+# version of the typing module.
+TYPING_3_9_0 = sys.version_info[:3] >= (3, 9, 0)
+TYPING_3_10_0 = sys.version_info[:3] >= (3, 10, 0)
+
+# 3.11 makes runtime type checks (_type_check) more lenient.
+TYPING_3_11_0 = sys.version_info[:3] >= (3, 11, 0)
+
+# 3.12 changes the representation of Unpack[] (PEP 692)
+TYPING_3_12_0 = sys.version_info[:3] >= (3, 12, 0)
+
+# 3.13 drops support for the keyword argument syntax of TypedDict
+TYPING_3_13_0 = sys.version_info[:3] >= (3, 13, 0)
+
+# https://github.com/python/cpython/pull/27017 was backported into some 3.9 and 3.10
+# versions, but not all
+HAS_FORWARD_MODULE = "module" in inspect.signature(typing._type_check).parameters
+
+skip_if_py313_beta_1 = skipIf(
+ sys.version_info[:5] == (3, 13, 0, 'beta', 1),
+ "Bugfixes will be released in 3.13.0b2"
+)
+
+ANN_MODULE_SOURCE = '''\
+import sys
+from typing import List, Optional
+from functools import wraps
+
+try:
+ __annotations__[1] = 2
+except NameError:
+ assert sys.version_info >= (3, 14)
+
+class C:
+
+ x = 5; y: Optional['C'] = None
+
+from typing import Tuple
+x: int = 5; y: str = x; f: Tuple[int, int]
+
+class M(type):
+ try:
+ __annotations__['123'] = 123
+ except NameError:
+ assert sys.version_info >= (3, 14)
+ o: type = object
+
+(pars): bool = True
+
+class D(C):
+ j: str = 'hi'; k: str= 'bye'
+
+from types import new_class
+h_class = new_class('H', (C,))
+j_class = new_class('J')
+
+class F():
+ z: int = 5
+ def __init__(self, x):
+ pass
+
+class Y(F):
+ def __init__(self):
+ super(F, self).__init__(123)
+
+class Meta(type):
+ def __new__(meta, name, bases, namespace):
+ return super().__new__(meta, name, bases, namespace)
+
+class S(metaclass = Meta):
+ x: str = 'something'
+ y: str = 'something else'
+
+def foo(x: int = 10):
+ def bar(y: List[str]):
+ x: str = 'yes'
+ bar()
+
+def dec(func):
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return wrapper
+'''
+
+ANN_MODULE_2_SOURCE = '''\
+from typing import no_type_check, ClassVar
+
+i: int = 1
+j: int
+x: float = i/10
+
+def f():
+ class C: ...
+ return C()
+
+f().new_attr: object = object()
+
+class C:
+ def __init__(self, x: int) -> None:
+ self.x = x
+
+c = C(5)
+c.new_attr: int = 10
+
+__annotations__ = {}
+
+
+@no_type_check
+class NTC:
+ def meth(self, param: complex) -> None:
+ ...
+
+class CV:
+ var: ClassVar['CV']
+
+CV.var = CV()
+'''
+
+ANN_MODULE_3_SOURCE = '''\
+def f_bad_ann():
+ __annotations__[1] = 2
+
+class C_OK:
+ def __init__(self, x: int) -> None:
+ self.x: no_such_name = x # This one is OK as proposed by Guido
+
+class D_bad_ann:
+ def __init__(self, x: int) -> None:
+ sfel.y: int = 0
+
+def g_bad_ann():
+ no_such_name.attr: int = 0
+'''
+
+
+class BaseTestCase(TestCase):
+ def assertIsSubclass(self, cls, class_or_tuple, msg=None):
+ if not issubclass(cls, class_or_tuple):
+ message = f'{cls!r} is not a subclass of {class_or_tuple!r}'
+ if msg is not None:
+ message += f' : {msg}'
+ raise self.failureException(message)
+
+ def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
+ if issubclass(cls, class_or_tuple):
+ message = f'{cls!r} is a subclass of {class_or_tuple!r}'
+ if msg is not None:
+ message += f' : {msg}'
+ raise self.failureException(message)
+
+
+class Employee:
+ pass
+
+
+class BottomTypeTestsMixin:
+ bottom_type: ClassVar[Any]
+
+ def test_equality(self):
+ self.assertEqual(self.bottom_type, self.bottom_type)
+ self.assertIs(self.bottom_type, self.bottom_type)
+ self.assertNotEqual(self.bottom_type, None)
+
+ def test_get_origin(self):
+ self.assertIs(get_origin(self.bottom_type), None)
+
+ def test_instance_type_error(self):
+ with self.assertRaises(TypeError):
+ isinstance(42, self.bottom_type)
+
+ def test_subclass_type_error(self):
+ with self.assertRaises(TypeError):
+ issubclass(Employee, self.bottom_type)
+ with self.assertRaises(TypeError):
+ issubclass(NoReturn, self.bottom_type)
+
+ def test_not_generic(self):
+ with self.assertRaises(TypeError):
+ self.bottom_type[int]
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class A(self.bottom_type):
+ pass
+ with self.assertRaises(TypeError):
+ class B(type(self.bottom_type)):
+ pass
+
+ def test_cannot_instantiate(self):
+ with self.assertRaises(TypeError):
+ self.bottom_type()
+ with self.assertRaises(TypeError):
+ type(self.bottom_type)()
+
+ def test_pickle(self):
+ for proto in range(pickle.HIGHEST_PROTOCOL):
+ pickled = pickle.dumps(self.bottom_type, protocol=proto)
+ self.assertIs(self.bottom_type, pickle.loads(pickled))
+
+
+class NoReturnTests(BottomTypeTestsMixin, BaseTestCase):
+ bottom_type = NoReturn
+
+ def test_repr(self):
+ if hasattr(typing, 'NoReturn'):
+ self.assertEqual(repr(NoReturn), 'typing.NoReturn')
+ else:
+ self.assertEqual(repr(NoReturn), 'typing_extensions.NoReturn')
+
+ def test_get_type_hints(self):
+ def some(arg: NoReturn) -> NoReturn: ...
+ def some_str(arg: 'NoReturn') -> 'typing.NoReturn': ...
+
+ expected = {'arg': NoReturn, 'return': NoReturn}
+ for target in some, some_str:
+ with self.subTest(target=target):
+ self.assertEqual(gth(target), expected)
+
+ def test_not_equality(self):
+ self.assertNotEqual(NoReturn, Never)
+ self.assertNotEqual(Never, NoReturn)
+
+
+class NeverTests(BottomTypeTestsMixin, BaseTestCase):
+ bottom_type = Never
+
+ def test_repr(self):
+ if hasattr(typing, 'Never'):
+ self.assertEqual(repr(Never), 'typing.Never')
+ else:
+ self.assertEqual(repr(Never), 'typing_extensions.Never')
+
+ def test_get_type_hints(self):
+ def some(arg: Never) -> Never: ...
+ def some_str(arg: 'Never') -> 'typing_extensions.Never': ...
+
+ expected = {'arg': Never, 'return': Never}
+ for target in [some, some_str]:
+ with self.subTest(target=target):
+ self.assertEqual(gth(target), expected)
+
+
+class AssertNeverTests(BaseTestCase):
+ def test_exception(self):
+ with self.assertRaises(AssertionError):
+ assert_never(None)
+
+ value = "some value"
+ with self.assertRaisesRegex(AssertionError, value):
+ assert_never(value)
+
+ # Make sure a huge value doesn't get printed in its entirety
+ huge_value = "a" * 10000
+ with self.assertRaises(AssertionError) as cm:
+ assert_never(huge_value)
+ self.assertLess(
+ len(cm.exception.args[0]),
+ typing_extensions._ASSERT_NEVER_REPR_MAX_LENGTH * 2,
+ )
+
+
+class OverrideTests(BaseTestCase):
+ def test_override(self):
+ class Base:
+ def normal_method(self): ...
+ @staticmethod
+ def static_method_good_order(): ...
+ @staticmethod
+ def static_method_bad_order(): ...
+ @staticmethod
+ def decorator_with_slots(): ...
+
+ class Derived(Base):
+ @override
+ def normal_method(self):
+ return 42
+
+ @staticmethod
+ @override
+ def static_method_good_order():
+ return 42
+
+ @override
+ @staticmethod
+ def static_method_bad_order():
+ return 42
+
+ self.assertIsSubclass(Derived, Base)
+ instance = Derived()
+ self.assertEqual(instance.normal_method(), 42)
+ self.assertIs(True, instance.normal_method.__override__)
+ self.assertEqual(Derived.static_method_good_order(), 42)
+ self.assertIs(True, Derived.static_method_good_order.__override__)
+ self.assertEqual(Derived.static_method_bad_order(), 42)
+ self.assertIs(False, hasattr(Derived.static_method_bad_order, "__override__"))
+
+
+class DeprecatedTests(BaseTestCase):
+ def test_dunder_deprecated(self):
+ @deprecated("A will go away soon")
+ class A:
+ pass
+
+ self.assertEqual(A.__deprecated__, "A will go away soon")
+ self.assertIsInstance(A, type)
+
+ @deprecated("b will go away soon")
+ def b():
+ pass
+
+ self.assertEqual(b.__deprecated__, "b will go away soon")
+ self.assertIsInstance(b, types.FunctionType)
+
+ @overload
+ @deprecated("no more ints")
+ def h(x: int) -> int: ...
+ @overload
+ def h(x: str) -> str: ...
+ def h(x):
+ return x
+
+ overloads = get_overloads(h)
+ self.assertEqual(len(overloads), 2)
+ self.assertEqual(overloads[0].__deprecated__, "no more ints")
+
+ def test_class(self):
+ @deprecated("A will go away soon")
+ class A:
+ pass
+
+ with self.assertWarnsRegex(DeprecationWarning, "A will go away soon"):
+ A()
+ with self.assertWarnsRegex(DeprecationWarning, "A will go away soon"):
+ with self.assertRaises(TypeError):
+ A(42)
+
+ def test_class_with_init(self):
+ @deprecated("HasInit will go away soon")
+ class HasInit:
+ def __init__(self, x):
+ self.x = x
+
+ with self.assertWarnsRegex(DeprecationWarning, "HasInit will go away soon"):
+ instance = HasInit(42)
+ self.assertEqual(instance.x, 42)
+
+ def test_class_with_new(self):
+ has_new_called = False
+
+ @deprecated("HasNew will go away soon")
+ class HasNew:
+ def __new__(cls, x):
+ nonlocal has_new_called
+ has_new_called = True
+ return super().__new__(cls)
+
+ def __init__(self, x) -> None:
+ self.x = x
+
+ with self.assertWarnsRegex(DeprecationWarning, "HasNew will go away soon"):
+ instance = HasNew(42)
+ self.assertEqual(instance.x, 42)
+ self.assertTrue(has_new_called)
+
+ def test_class_with_inherited_new(self):
+ new_base_called = False
+
+ class NewBase:
+ def __new__(cls, x):
+ nonlocal new_base_called
+ new_base_called = True
+ return super().__new__(cls)
+
+ def __init__(self, x) -> None:
+ self.x = x
+
+ @deprecated("HasInheritedNew will go away soon")
+ class HasInheritedNew(NewBase):
+ pass
+
+ with self.assertWarnsRegex(DeprecationWarning, "HasInheritedNew will go away soon"):
+ instance = HasInheritedNew(42)
+ self.assertEqual(instance.x, 42)
+ self.assertTrue(new_base_called)
+
+ def test_class_with_new_but_no_init(self):
+ new_called = False
+
+ @deprecated("HasNewNoInit will go away soon")
+ class HasNewNoInit:
+ def __new__(cls, x):
+ nonlocal new_called
+ new_called = True
+ obj = super().__new__(cls)
+ obj.x = x
+ return obj
+
+ with self.assertWarnsRegex(DeprecationWarning, "HasNewNoInit will go away soon"):
+ instance = HasNewNoInit(42)
+ self.assertEqual(instance.x, 42)
+ self.assertTrue(new_called)
+
+ def test_mixin_class(self):
+ @deprecated("Mixin will go away soon")
+ class Mixin:
+ pass
+
+ class Base:
+ def __init__(self, a) -> None:
+ self.a = a
+
+ with self.assertWarnsRegex(DeprecationWarning, "Mixin will go away soon"):
+ class Child(Base, Mixin):
+ pass
+
+ instance = Child(42)
+ self.assertEqual(instance.a, 42)
+
+ def test_existing_init_subclass(self):
+ @deprecated("C will go away soon")
+ class C:
+ def __init_subclass__(cls) -> None:
+ cls.inited = True
+
+ with self.assertWarnsRegex(DeprecationWarning, "C will go away soon"):
+ C()
+
+ with self.assertWarnsRegex(DeprecationWarning, "C will go away soon"):
+ class D(C):
+ pass
+
+ self.assertTrue(D.inited)
+ self.assertIsInstance(D(), D) # no deprecation
+
+ def test_existing_init_subclass_in_base(self):
+ class Base:
+ def __init_subclass__(cls, x) -> None:
+ cls.inited = x
+
+ @deprecated("C will go away soon")
+ class C(Base, x=42):
+ pass
+
+ self.assertEqual(C.inited, 42)
+
+ with self.assertWarnsRegex(DeprecationWarning, "C will go away soon"):
+ C()
+
+ with self.assertWarnsRegex(DeprecationWarning, "C will go away soon"):
+ class D(C, x=3):
+ pass
+
+ self.assertEqual(D.inited, 3)
+
+ def test_init_subclass_has_correct_cls(self):
+ init_subclass_saw = None
+
+ @deprecated("Base will go away soon")
+ class Base:
+ def __init_subclass__(cls) -> None:
+ nonlocal init_subclass_saw
+ init_subclass_saw = cls
+
+ self.assertIsNone(init_subclass_saw)
+
+ with self.assertWarnsRegex(DeprecationWarning, "Base will go away soon"):
+ class C(Base):
+ pass
+
+ self.assertIs(init_subclass_saw, C)
+
+ def test_init_subclass_with_explicit_classmethod(self):
+ init_subclass_saw = None
+
+ @deprecated("Base will go away soon")
+ class Base:
+ @classmethod
+ def __init_subclass__(cls) -> None:
+ nonlocal init_subclass_saw
+ init_subclass_saw = cls
+
+ self.assertIsNone(init_subclass_saw)
+
+ with self.assertWarnsRegex(DeprecationWarning, "Base will go away soon"):
+ class C(Base):
+ pass
+
+ self.assertIs(init_subclass_saw, C)
+
+ def test_function(self):
+ @deprecated("b will go away soon")
+ def b():
+ pass
+
+ with self.assertWarnsRegex(DeprecationWarning, "b will go away soon"):
+ b()
+
+ def test_method(self):
+ class Capybara:
+ @deprecated("x will go away soon")
+ def x(self):
+ pass
+
+ instance = Capybara()
+ with self.assertWarnsRegex(DeprecationWarning, "x will go away soon"):
+ instance.x()
+
+ def test_property(self):
+ class Capybara:
+ @property
+ @deprecated("x will go away soon")
+ def x(self):
+ pass
+
+ @property
+ def no_more_setting(self):
+ return 42
+
+ @no_more_setting.setter
+ @deprecated("no more setting")
+ def no_more_setting(self, value):
+ pass
+
+ instance = Capybara()
+ with self.assertWarnsRegex(DeprecationWarning, "x will go away soon"):
+ instance.x
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ self.assertEqual(instance.no_more_setting, 42)
+
+ with self.assertWarnsRegex(DeprecationWarning, "no more setting"):
+ instance.no_more_setting = 42
+
+ def test_category(self):
+ @deprecated("c will go away soon", category=RuntimeWarning)
+ def c():
+ pass
+
+ with self.assertWarnsRegex(RuntimeWarning, "c will go away soon"):
+ c()
+
+ def test_turn_off_warnings(self):
+ @deprecated("d will go away soon", category=None)
+ def d():
+ pass
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ d()
+
+ def test_only_strings_allowed(self):
+ with self.assertRaisesRegex(
+ TypeError,
+ "Expected an object of type str for 'message', not 'type'"
+ ):
+ @deprecated
+ class Foo: ...
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "Expected an object of type str for 'message', not 'function'"
+ ):
+ @deprecated
+ def foo(): ...
+
+ def test_no_retained_references_to_wrapper_instance(self):
+ @deprecated('depr')
+ def d(): pass
+
+ self.assertFalse(any(
+ isinstance(cell.cell_contents, deprecated) for cell in d.__closure__
+ ))
+
+
+class AnyTests(BaseTestCase):
+ def test_can_subclass(self):
+ class Mock(Any): pass
+ self.assertTrue(issubclass(Mock, Any))
+ self.assertIsInstance(Mock(), Mock)
+
+ class Something: pass
+ self.assertFalse(issubclass(Something, Any))
+ self.assertNotIsInstance(Something(), Mock)
+
+ class MockSomething(Something, Mock): pass
+ self.assertTrue(issubclass(MockSomething, Any))
+ ms = MockSomething()
+ self.assertIsInstance(ms, MockSomething)
+ self.assertIsInstance(ms, Something)
+ self.assertIsInstance(ms, Mock)
+
+ class SubclassesAny(Any):
+ ...
+
+ def test_repr(self):
+ if sys.version_info >= (3, 11):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(Any), f"{mod_name}.Any")
+
+ @skipIf(sys.version_info[:3] == (3, 11, 0), "A bug was fixed in 3.11.1")
+ def test_repr_on_Any_subclass(self):
+ self.assertEqual(
+ repr(self.SubclassesAny),
+ f"<class '{self.SubclassesAny.__module__}.AnyTests.SubclassesAny'>"
+ )
+
+ def test_instantiation(self):
+ with self.assertRaises(TypeError):
+ Any()
+
+ self.SubclassesAny()
+
+ def test_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(object(), Any)
+
+ isinstance(object(), self.SubclassesAny)
+
+
+class ClassVarTests(BaseTestCase):
+
+ def test_basics(self):
+ if not TYPING_3_11_0:
+ with self.assertRaises(TypeError):
+ ClassVar[1]
+ with self.assertRaises(TypeError):
+ ClassVar[int, str]
+ with self.assertRaises(TypeError):
+ ClassVar[int][str]
+
+ def test_repr(self):
+ if hasattr(typing, 'ClassVar'):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(ClassVar), mod_name + '.ClassVar')
+ cv = ClassVar[int]
+ self.assertEqual(repr(cv), mod_name + '.ClassVar[int]')
+ cv = ClassVar[Employee]
+ self.assertEqual(repr(cv), mod_name + f'.ClassVar[{__name__}.Employee]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(ClassVar)):
+ pass
+ with self.assertRaises(TypeError):
+ class D(type(ClassVar[int])):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ ClassVar()
+ with self.assertRaises(TypeError):
+ type(ClassVar)()
+ with self.assertRaises(TypeError):
+ type(ClassVar[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, ClassVar[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, ClassVar)
+
+
+class FinalTests(BaseTestCase):
+
+ def test_basics(self):
+ if not TYPING_3_11_0:
+ with self.assertRaises(TypeError):
+ Final[1]
+ with self.assertRaises(TypeError):
+ Final[int, str]
+ with self.assertRaises(TypeError):
+ Final[int][str]
+
+ def test_repr(self):
+ self.assertEqual(repr(Final), 'typing.Final')
+ cv = Final[int]
+ self.assertEqual(repr(cv), 'typing.Final[int]')
+ cv = Final[Employee]
+ self.assertEqual(repr(cv), f'typing.Final[{__name__}.Employee]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(Final)):
+ pass
+ with self.assertRaises(TypeError):
+ class D(type(Final[int])):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ Final()
+ with self.assertRaises(TypeError):
+ type(Final)()
+ with self.assertRaises(TypeError):
+ type(Final[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, Final[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, Final)
+
+
+class RequiredTests(BaseTestCase):
+
+ def test_basics(self):
+ if not TYPING_3_11_0:
+ with self.assertRaises(TypeError):
+ Required[1]
+ with self.assertRaises(TypeError):
+ Required[int, str]
+ with self.assertRaises(TypeError):
+ Required[int][str]
+
+ def test_repr(self):
+ if hasattr(typing, 'Required'):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(Required), f'{mod_name}.Required')
+ cv = Required[int]
+ self.assertEqual(repr(cv), f'{mod_name}.Required[int]')
+ cv = Required[Employee]
+ self.assertEqual(repr(cv), f'{mod_name}.Required[{__name__}.Employee]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(Required)):
+ pass
+ with self.assertRaises(TypeError):
+ class D(type(Required[int])):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ Required()
+ with self.assertRaises(TypeError):
+ type(Required)()
+ with self.assertRaises(TypeError):
+ type(Required[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, Required[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, Required)
+
+
+class NotRequiredTests(BaseTestCase):
+
+ def test_basics(self):
+ if not TYPING_3_11_0:
+ with self.assertRaises(TypeError):
+ NotRequired[1]
+ with self.assertRaises(TypeError):
+ NotRequired[int, str]
+ with self.assertRaises(TypeError):
+ NotRequired[int][str]
+
+ def test_repr(self):
+ if hasattr(typing, 'NotRequired'):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(NotRequired), f'{mod_name}.NotRequired')
+ cv = NotRequired[int]
+ self.assertEqual(repr(cv), f'{mod_name}.NotRequired[int]')
+ cv = NotRequired[Employee]
+ self.assertEqual(repr(cv), f'{mod_name}.NotRequired[{ __name__}.Employee]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(NotRequired)):
+ pass
+ with self.assertRaises(TypeError):
+ class D(type(NotRequired[int])):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ NotRequired()
+ with self.assertRaises(TypeError):
+ type(NotRequired)()
+ with self.assertRaises(TypeError):
+ type(NotRequired[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, NotRequired[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, NotRequired)
+
+
+class IntVarTests(BaseTestCase):
+ def test_valid(self):
+ IntVar("T_ints")
+
+ def test_invalid(self):
+ with self.assertRaises(TypeError):
+ IntVar("T_ints", int)
+ with self.assertRaises(TypeError):
+ IntVar("T_ints", bound=int)
+ with self.assertRaises(TypeError):
+ IntVar("T_ints", covariant=True)
+
+
+class LiteralTests(BaseTestCase):
+ def test_basics(self):
+ Literal[1]
+ Literal[1, 2, 3]
+ Literal["x", "y", "z"]
+ Literal[None]
+
+ def test_enum(self):
+ import enum
+ class My(enum.Enum):
+ A = 'A'
+
+ self.assertEqual(Literal[My.A].__args__, (My.A,))
+
+ def test_illegal_parameters_do_not_raise_runtime_errors(self):
+ # Type checkers should reject these types, but we do not
+ # raise errors at runtime to maintain maximum flexibility
+ Literal[int]
+ Literal[Literal[1, 2], Literal[4, 5]]
+ Literal[3j + 2, ..., ()]
+ Literal[b"foo", "bar"]
+ Literal[{"foo": 3, "bar": 4}]
+ Literal[T]
+
+ def test_literals_inside_other_types(self):
+ List[Literal[1, 2, 3]]
+ List[Literal[("foo", "bar", "baz")]]
+
+ def test_repr(self):
+ # we backport various bugfixes that were added in 3.10.1 and earlier
+ if sys.version_info >= (3, 10, 1):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(Literal[1]), mod_name + ".Literal[1]")
+ self.assertEqual(repr(Literal[1, True, "foo"]), mod_name + ".Literal[1, True, 'foo']")
+ self.assertEqual(repr(Literal[int]), mod_name + ".Literal[int]")
+ self.assertEqual(repr(Literal), mod_name + ".Literal")
+ self.assertEqual(repr(Literal[None]), mod_name + ".Literal[None]")
+ self.assertEqual(repr(Literal[1, 2, 3, 3]), mod_name + ".Literal[1, 2, 3]")
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ Literal()
+ with self.assertRaises(TypeError):
+ Literal[1]()
+ with self.assertRaises(TypeError):
+ type(Literal)()
+ with self.assertRaises(TypeError):
+ type(Literal[1])()
+
+ def test_no_isinstance_or_issubclass(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, Literal[1])
+ with self.assertRaises(TypeError):
+ isinstance(int, Literal[1])
+ with self.assertRaises(TypeError):
+ issubclass(1, Literal[1])
+ with self.assertRaises(TypeError):
+ issubclass(int, Literal[1])
+
+ def test_no_subclassing(self):
+ with self.assertRaises(TypeError):
+ class Foo(Literal[1]): pass
+ with self.assertRaises(TypeError):
+ class Bar(Literal): pass
+
+ def test_no_multiple_subscripts(self):
+ with self.assertRaises(TypeError):
+ Literal[1][1]
+
+ def test_equal(self):
+ self.assertNotEqual(Literal[0], Literal[False])
+ self.assertNotEqual(Literal[True], Literal[1])
+ self.assertNotEqual(Literal[1], Literal[2])
+ self.assertNotEqual(Literal[1, True], Literal[1])
+ self.assertNotEqual(Literal[1, True], Literal[1, 1])
+ self.assertNotEqual(Literal[1, 2], Literal[True, 2])
+ self.assertEqual(Literal[1], Literal[1])
+ self.assertEqual(Literal[1, 2], Literal[2, 1])
+ self.assertEqual(Literal[1, 2, 3], Literal[1, 2, 3, 3])
+
+ def test_hash(self):
+ self.assertEqual(hash(Literal[1]), hash(Literal[1]))
+ self.assertEqual(hash(Literal[1, 2]), hash(Literal[2, 1]))
+ self.assertEqual(hash(Literal[1, 2, 3]), hash(Literal[1, 2, 3, 3]))
+
+ def test_args(self):
+ self.assertEqual(Literal[1, 2, 3].__args__, (1, 2, 3))
+ self.assertEqual(Literal[1, 2, 3, 3].__args__, (1, 2, 3))
+ self.assertEqual(Literal[1, Literal[2], Literal[3, 4]].__args__, (1, 2, 3, 4))
+ # Mutable arguments will not be deduplicated
+ self.assertEqual(Literal[[], []].__args__, ([], []))
+
+ def test_union_of_literals(self):
+ self.assertEqual(Union[Literal[1], Literal[2]].__args__,
+ (Literal[1], Literal[2]))
+ self.assertEqual(Union[Literal[1], Literal[1]],
+ Literal[1])
+
+ self.assertEqual(Union[Literal[False], Literal[0]].__args__,
+ (Literal[False], Literal[0]))
+ self.assertEqual(Union[Literal[True], Literal[1]].__args__,
+ (Literal[True], Literal[1]))
+
+ import enum
+ class Ints(enum.IntEnum):
+ A = 0
+ B = 1
+
+ self.assertEqual(Union[Literal[Ints.A], Literal[Ints.B]].__args__,
+ (Literal[Ints.A], Literal[Ints.B]))
+
+ self.assertEqual(Union[Literal[Ints.A], Literal[Ints.A]],
+ Literal[Ints.A])
+ self.assertEqual(Union[Literal[Ints.B], Literal[Ints.B]],
+ Literal[Ints.B])
+
+ self.assertEqual(Union[Literal[0], Literal[Ints.A], Literal[False]].__args__,
+ (Literal[0], Literal[Ints.A], Literal[False]))
+ self.assertEqual(Union[Literal[1], Literal[Ints.B], Literal[True]].__args__,
+ (Literal[1], Literal[Ints.B], Literal[True]))
+
+ @skipUnless(TYPING_3_10_0, "Python 3.10+ required")
+ def test_or_type_operator_with_Literal(self):
+ self.assertEqual((Literal[1] | Literal[2]).__args__,
+ (Literal[1], Literal[2]))
+
+ self.assertEqual((Literal[0] | Literal[False]).__args__,
+ (Literal[0], Literal[False]))
+ self.assertEqual((Literal[1] | Literal[True]).__args__,
+ (Literal[1], Literal[True]))
+
+ self.assertEqual(Literal[1] | Literal[1], Literal[1])
+ self.assertEqual(Literal['a'] | Literal['a'], Literal['a'])
+
+ import enum
+ class Ints(enum.IntEnum):
+ A = 0
+ B = 1
+
+ self.assertEqual(Literal[Ints.A] | Literal[Ints.A], Literal[Ints.A])
+ self.assertEqual(Literal[Ints.B] | Literal[Ints.B], Literal[Ints.B])
+
+ self.assertEqual((Literal[Ints.B] | Literal[Ints.A]).__args__,
+ (Literal[Ints.B], Literal[Ints.A]))
+
+ self.assertEqual((Literal[0] | Literal[Ints.A]).__args__,
+ (Literal[0], Literal[Ints.A]))
+ self.assertEqual((Literal[1] | Literal[Ints.B]).__args__,
+ (Literal[1], Literal[Ints.B]))
+
+ def test_flatten(self):
+ l1 = Literal[Literal[1], Literal[2], Literal[3]]
+ l2 = Literal[Literal[1, 2], 3]
+ l3 = Literal[Literal[1, 2, 3]]
+ for lit in l1, l2, l3:
+ self.assertEqual(lit, Literal[1, 2, 3])
+ self.assertEqual(lit.__args__, (1, 2, 3))
+
+ def test_does_not_flatten_enum(self):
+ import enum
+ class Ints(enum.IntEnum):
+ A = 1
+ B = 2
+
+ literal = Literal[
+ Literal[Ints.A],
+ Literal[Ints.B],
+ Literal[1],
+ Literal[2],
+ ]
+ self.assertEqual(literal.__args__, (Ints.A, Ints.B, 1, 2))
+
+ def test_caching_of_Literal_respects_type(self):
+ self.assertIs(type(Literal[1].__args__[0]), int)
+ self.assertIs(type(Literal[True].__args__[0]), bool)
+
+
+class MethodHolder:
+ @classmethod
+ def clsmethod(cls): ...
+ @staticmethod
+ def stmethod(): ...
+ def method(self): ...
+
+
+if TYPING_3_11_0:
+ registry_holder = typing
+else:
+ registry_holder = typing_extensions
+
+
+class OverloadTests(BaseTestCase):
+
+ def test_overload_fails(self):
+ with self.assertRaises(RuntimeError):
+
+ @overload
+ def blah():
+ pass
+
+ blah()
+
+ def test_overload_succeeds(self):
+ @overload
+ def blah():
+ pass
+
+ def blah():
+ pass
+
+ blah()
+
+ @skipIf(
+ sys.implementation.name == "pypy",
+ "sum() and print() are not compiled in pypy"
+ )
+ @patch(
+ f"{registry_holder.__name__}._overload_registry",
+ defaultdict(lambda: defaultdict(dict))
+ )
+ def test_overload_on_compiled_functions(self):
+ registry = registry_holder._overload_registry
+ # The registry starts out empty:
+ self.assertEqual(registry, {})
+
+ # This should just not fail:
+ overload(sum)
+ overload(print)
+
+ # No overloads are recorded:
+ self.assertEqual(get_overloads(sum), [])
+ self.assertEqual(get_overloads(print), [])
+
+ def set_up_overloads(self):
+ def blah():
+ pass
+
+ overload1 = blah
+ overload(blah)
+
+ def blah():
+ pass
+
+ overload2 = blah
+ overload(blah)
+
+ def blah():
+ pass
+
+ return blah, [overload1, overload2]
+
+ # Make sure we don't clear the global overload registry
+ @patch(
+ f"{registry_holder.__name__}._overload_registry",
+ defaultdict(lambda: defaultdict(dict))
+ )
+ def test_overload_registry(self):
+ registry = registry_holder._overload_registry
+ # The registry starts out empty
+ self.assertEqual(registry, {})
+
+ impl, overloads = self.set_up_overloads()
+ self.assertNotEqual(registry, {})
+ self.assertEqual(list(get_overloads(impl)), overloads)
+
+ def some_other_func(): pass
+ overload(some_other_func)
+ other_overload = some_other_func
+ def some_other_func(): pass
+ self.assertEqual(list(get_overloads(some_other_func)), [other_overload])
+ # Unrelated function still has no overloads:
+ def not_overloaded(): pass
+ self.assertEqual(list(get_overloads(not_overloaded)), [])
+
+ # Make sure that after we clear all overloads, the registry is
+ # completely empty.
+ clear_overloads()
+ self.assertEqual(registry, {})
+ self.assertEqual(get_overloads(impl), [])
+
+ # Querying a function with no overloads shouldn't change the registry.
+ def the_only_one(): pass
+ self.assertEqual(get_overloads(the_only_one), [])
+ self.assertEqual(registry, {})
+
+ def test_overload_registry_repeated(self):
+ for _ in range(2):
+ impl, overloads = self.set_up_overloads()
+
+ self.assertEqual(list(get_overloads(impl)), overloads)
+
+
+class AssertTypeTests(BaseTestCase):
+
+ def test_basics(self):
+ arg = 42
+ self.assertIs(assert_type(arg, int), arg)
+ self.assertIs(assert_type(arg, Union[str, float]), arg)
+ self.assertIs(assert_type(arg, AnyStr), arg)
+ self.assertIs(assert_type(arg, None), arg)
+
+ def test_errors(self):
+ # Bogus calls are not expected to fail.
+ arg = 42
+ self.assertIs(assert_type(arg, 42), arg)
+ self.assertIs(assert_type(arg, 'hello'), arg)
+
+
+T_a = TypeVar('T_a')
+
+class AwaitableWrapper(Awaitable[T_a]):
+
+ def __init__(self, value):
+ self.value = value
+
+ def __await__(self) -> typing.Iterator[T_a]:
+ yield
+ return self.value
+
+class AsyncIteratorWrapper(AsyncIterator[T_a]):
+
+ def __init__(self, value: Iterable[T_a]):
+ self.value = value
+
+ def __aiter__(self) -> AsyncIterator[T_a]:
+ return self
+
+ async def __anext__(self) -> T_a:
+ data = await self.value
+ if data:
+ return data
+ else:
+ raise StopAsyncIteration
+
+class ACM:
+ async def __aenter__(self) -> int:
+ return 42
+
+ async def __aexit__(self, etype, eval, tb):
+ return None
+
+
+class A:
+ y: float
+class B(A):
+ x: ClassVar[Optional['B']] = None
+ y: int
+ b: int
+class CSub(B):
+ z: ClassVar['CSub'] = B()
+class G(Generic[T]):
+ lst: ClassVar[List[T]] = []
+
+class Loop:
+ attr: Final['Loop']
+
+class NoneAndForward:
+ parent: 'NoneAndForward'
+ meaning: None
+
+class XRepr(NamedTuple):
+ x: int
+ y: int = 1
+
+ def __str__(self):
+ return f'{self.x} -> {self.y}'
+
+ def __add__(self, other):
+ return 0
+
+@runtime_checkable
+class HasCallProtocol(Protocol):
+ __call__: typing.Callable
+
+
+async def g_with(am: AsyncContextManager[int]):
+ x: int
+ async with am as x:
+ return x
+
+try:
+ g_with(ACM()).send(None)
+except StopIteration as e:
+ assert e.args[0] == 42
+
+Label = TypedDict('Label', [('label', str)])
+
+class Point2D(TypedDict):
+ x: int
+ y: int
+
+class Point2Dor3D(Point2D, total=False):
+ z: int
+
+class LabelPoint2D(Point2D, Label): ...
+
+class Options(TypedDict, total=False):
+ log_level: int
+ log_path: str
+
+class BaseAnimal(TypedDict):
+ name: str
+
+class Animal(BaseAnimal, total=False):
+ voice: str
+ tail: bool
+
+class Cat(Animal):
+ fur_color: str
+
+class TotalMovie(TypedDict):
+ title: str
+ year: NotRequired[int]
+
+class NontotalMovie(TypedDict, total=False):
+ title: Required[str]
+ year: int
+
+class ParentNontotalMovie(TypedDict, total=False):
+ title: Required[str]
+
+class ChildTotalMovie(ParentNontotalMovie):
+ year: NotRequired[int]
+
+class ParentDeeplyAnnotatedMovie(TypedDict):
+ title: Annotated[Annotated[Required[str], "foobar"], "another level"]
+
+class ChildDeeplyAnnotatedMovie(ParentDeeplyAnnotatedMovie):
+ year: NotRequired[Annotated[int, 2000]]
+
+class AnnotatedMovie(TypedDict):
+ title: Annotated[Required[str], "foobar"]
+ year: NotRequired[Annotated[int, 2000]]
+
+class WeirdlyQuotedMovie(TypedDict):
+ title: Annotated['Annotated[Required[str], "foobar"]', "another level"]
+ year: NotRequired['Annotated[int, 2000]']
+
+
+gth = get_type_hints
+
+
+class GetTypeHintTests(BaseTestCase):
+ @classmethod
+ def setUpClass(cls):
+ with tempfile.TemporaryDirectory() as tempdir:
+ sys.path.append(tempdir)
+ Path(tempdir, "ann_module.py").write_text(ANN_MODULE_SOURCE)
+ Path(tempdir, "ann_module2.py").write_text(ANN_MODULE_2_SOURCE)
+ Path(tempdir, "ann_module3.py").write_text(ANN_MODULE_3_SOURCE)
+ cls.ann_module = importlib.import_module("ann_module")
+ cls.ann_module2 = importlib.import_module("ann_module2")
+ cls.ann_module3 = importlib.import_module("ann_module3")
+ sys.path.pop()
+
+ @classmethod
+ def tearDownClass(cls):
+ for modname in "ann_module", "ann_module2", "ann_module3":
+ delattr(cls, modname)
+ del sys.modules[modname]
+
+ def test_get_type_hints_modules(self):
+ if sys.version_info >= (3, 14):
+ ann_module_type_hints = {'f': Tuple[int, int], 'x': int, 'y': str}
+ else:
+ ann_module_type_hints = {1: 2, 'f': Tuple[int, int], 'x': int, 'y': str}
+ self.assertEqual(gth(self.ann_module), ann_module_type_hints)
+ self.assertEqual(gth(self.ann_module2), {})
+ self.assertEqual(gth(self.ann_module3), {})
+
+ def test_get_type_hints_classes(self):
+ self.assertEqual(gth(self.ann_module.C, self.ann_module.__dict__),
+ {'y': Optional[self.ann_module.C]})
+ self.assertIsInstance(gth(self.ann_module.j_class), dict)
+ if sys.version_info >= (3, 14):
+ self.assertEqual(gth(self.ann_module.M), {'o': type})
+ else:
+ self.assertEqual(gth(self.ann_module.M), {'123': 123, 'o': type})
+ self.assertEqual(gth(self.ann_module.D),
+ {'j': str, 'k': str, 'y': Optional[self.ann_module.C]})
+ self.assertEqual(gth(self.ann_module.Y), {'z': int})
+ self.assertEqual(gth(self.ann_module.h_class),
+ {'y': Optional[self.ann_module.C]})
+ self.assertEqual(gth(self.ann_module.S), {'x': str, 'y': str})
+ self.assertEqual(gth(self.ann_module.foo), {'x': int})
+ self.assertEqual(gth(NoneAndForward, globals()),
+ {'parent': NoneAndForward, 'meaning': type(None)})
+
+ def test_respect_no_type_check(self):
+ @no_type_check
+ class NoTpCheck:
+ class Inn:
+ def __init__(self, x: 'not a type'): ... # noqa: F722 # (yes, there's a syntax error in this annotation, that's the point)
+ self.assertTrue(NoTpCheck.__no_type_check__)
+ self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__)
+ self.assertEqual(gth(self.ann_module2.NTC.meth), {})
+ class ABase(Generic[T]):
+ def meth(x: int): ...
+ @no_type_check
+ class Der(ABase): ...
+ self.assertEqual(gth(ABase.meth), {'x': int})
+
+ def test_get_type_hints_ClassVar(self):
+ self.assertEqual(gth(self.ann_module2.CV, self.ann_module2.__dict__),
+ {'var': ClassVar[self.ann_module2.CV]})
+ self.assertEqual(gth(B, globals()),
+ {'y': int, 'x': ClassVar[Optional[B]], 'b': int})
+ self.assertEqual(gth(CSub, globals()),
+ {'z': ClassVar[CSub], 'y': int, 'b': int,
+ 'x': ClassVar[Optional[B]]})
+ self.assertEqual(gth(G), {'lst': ClassVar[List[T]]})
+
+ def test_final_forward_ref(self):
+ self.assertEqual(gth(Loop, globals())['attr'], Final[Loop])
+ self.assertNotEqual(gth(Loop, globals())['attr'], Final[int])
+ self.assertNotEqual(gth(Loop, globals())['attr'], Final)
+
+
+class GetUtilitiesTestCase(TestCase):
+ def test_get_origin(self):
+ T = TypeVar('T')
+ P = ParamSpec('P')
+ Ts = TypeVarTuple('Ts')
+ class C(Generic[T]): pass
+ self.assertIs(get_origin(C[int]), C)
+ self.assertIs(get_origin(C[T]), C)
+ self.assertIs(get_origin(int), None)
+ self.assertIs(get_origin(ClassVar[int]), ClassVar)
+ self.assertIs(get_origin(Union[int, str]), Union)
+ self.assertIs(get_origin(Literal[42, 43]), Literal)
+ self.assertIs(get_origin(Final[List[int]]), Final)
+ self.assertIs(get_origin(Generic), Generic)
+ self.assertIs(get_origin(Generic[T]), Generic)
+ self.assertIs(get_origin(List[Tuple[T, T]][int]), list)
+ self.assertIs(get_origin(Annotated[T, 'thing']), Annotated)
+ self.assertIs(get_origin(List), list)
+ self.assertIs(get_origin(Tuple), tuple)
+ self.assertIs(get_origin(Callable), collections.abc.Callable)
+ if sys.version_info >= (3, 9):
+ self.assertIs(get_origin(list[int]), list)
+ self.assertIs(get_origin(list), None)
+ self.assertIs(get_origin(P.args), P)
+ self.assertIs(get_origin(P.kwargs), P)
+ self.assertIs(get_origin(Required[int]), Required)
+ self.assertIs(get_origin(NotRequired[int]), NotRequired)
+ self.assertIs(get_origin(Unpack[Ts]), Unpack)
+ self.assertIs(get_origin(Unpack), None)
+
+ def test_get_args(self):
+ T = TypeVar('T')
+ Ts = TypeVarTuple('Ts')
+ class C(Generic[T]): pass
+ self.assertEqual(get_args(C[int]), (int,))
+ self.assertEqual(get_args(C[T]), (T,))
+ self.assertEqual(get_args(int), ())
+ self.assertEqual(get_args(ClassVar[int]), (int,))
+ self.assertEqual(get_args(Union[int, str]), (int, str))
+ self.assertEqual(get_args(Literal[42, 43]), (42, 43))
+ self.assertEqual(get_args(Final[List[int]]), (List[int],))
+ self.assertEqual(get_args(Union[int, Tuple[T, int]][str]),
+ (int, Tuple[str, int]))
+ self.assertEqual(get_args(typing.Dict[int, Tuple[T, T]][Optional[int]]),
+ (int, Tuple[Optional[int], Optional[int]]))
+ self.assertEqual(get_args(Callable[[], T][int]), ([], int))
+ self.assertEqual(get_args(Callable[..., int]), (..., int))
+ self.assertEqual(get_args(Union[int, Callable[[Tuple[T, ...]], str]]),
+ (int, Callable[[Tuple[T, ...]], str]))
+ self.assertEqual(get_args(Tuple[int, ...]), (int, ...))
+ if TYPING_3_11_0:
+ self.assertEqual(get_args(Tuple[()]), ())
+ else:
+ self.assertEqual(get_args(Tuple[()]), ((),))
+ self.assertEqual(get_args(Annotated[T, 'one', 2, ['three']]), (T, 'one', 2, ['three']))
+ self.assertEqual(get_args(List), ())
+ self.assertEqual(get_args(Tuple), ())
+ self.assertEqual(get_args(Callable), ())
+ if sys.version_info >= (3, 9):
+ self.assertEqual(get_args(list[int]), (int,))
+ self.assertEqual(get_args(list), ())
+ if sys.version_info >= (3, 9):
+ # Support Python versions with and without the fix for
+ # https://bugs.python.org/issue42195
+ # The first variant is for 3.9.2+, the second for 3.9.0 and 1
+ self.assertIn(get_args(collections.abc.Callable[[int], str]),
+ (([int], str), ([[int]], str)))
+ self.assertIn(get_args(collections.abc.Callable[[], str]),
+ (([], str), ([[]], str)))
+ self.assertEqual(get_args(collections.abc.Callable[..., str]), (..., str))
+ P = ParamSpec('P')
+ # In 3.9 and lower we use typing_extensions's hacky implementation
+ # of ParamSpec, which gets incorrectly wrapped in a list
+ self.assertIn(get_args(Callable[P, int]), [(P, int), ([P], int)])
+ self.assertEqual(get_args(Callable[Concatenate[int, P], int]),
+ (Concatenate[int, P], int))
+ self.assertEqual(get_args(Required[int]), (int,))
+ self.assertEqual(get_args(NotRequired[int]), (int,))
+ self.assertEqual(get_args(Unpack[Ts]), (Ts,))
+ self.assertEqual(get_args(Unpack), ())
+
+
+class CollectionsAbcTests(BaseTestCase):
+
+ def test_isinstance_collections(self):
+ self.assertNotIsInstance(1, collections.abc.Mapping)
+ self.assertNotIsInstance(1, collections.abc.Iterable)
+ self.assertNotIsInstance(1, collections.abc.Container)
+ self.assertNotIsInstance(1, collections.abc.Sized)
+ with self.assertRaises(TypeError):
+ isinstance(collections.deque(), typing_extensions.Deque[int])
+ with self.assertRaises(TypeError):
+ issubclass(collections.Counter, typing_extensions.Counter[str])
+
+ def test_awaitable(self):
+ async def foo() -> typing_extensions.Awaitable[int]:
+ return await AwaitableWrapper(42)
+
+ g = foo()
+ self.assertIsInstance(g, typing_extensions.Awaitable)
+ self.assertNotIsInstance(foo, typing_extensions.Awaitable)
+ g.send(None) # Run foo() till completion, to avoid warning.
+
+ def test_coroutine(self):
+ async def foo():
+ return
+
+ g = foo()
+ self.assertIsInstance(g, typing_extensions.Coroutine)
+ with self.assertRaises(TypeError):
+ isinstance(g, typing_extensions.Coroutine[int])
+ self.assertNotIsInstance(foo, typing_extensions.Coroutine)
+ try:
+ g.send(None)
+ except StopIteration:
+ pass
+
+ def test_async_iterable(self):
+ base_it: Iterator[int] = range(10)
+ it = AsyncIteratorWrapper(base_it)
+ self.assertIsInstance(it, typing_extensions.AsyncIterable)
+ self.assertIsInstance(it, typing_extensions.AsyncIterable)
+ self.assertNotIsInstance(42, typing_extensions.AsyncIterable)
+
+ def test_async_iterator(self):
+ base_it: Iterator[int] = range(10)
+ it = AsyncIteratorWrapper(base_it)
+ self.assertIsInstance(it, typing_extensions.AsyncIterator)
+ self.assertNotIsInstance(42, typing_extensions.AsyncIterator)
+
+ def test_deque(self):
+ self.assertIsSubclass(collections.deque, typing_extensions.Deque)
+ class MyDeque(typing_extensions.Deque[int]): ...
+ self.assertIsInstance(MyDeque(), collections.deque)
+
+ def test_counter(self):
+ self.assertIsSubclass(collections.Counter, typing_extensions.Counter)
+
+ def test_defaultdict_instantiation(self):
+ self.assertIs(
+ type(typing_extensions.DefaultDict()),
+ collections.defaultdict)
+ self.assertIs(
+ type(typing_extensions.DefaultDict[KT, VT]()),
+ collections.defaultdict)
+ self.assertIs(
+ type(typing_extensions.DefaultDict[str, int]()),
+ collections.defaultdict)
+
+ def test_defaultdict_subclass(self):
+
+ class MyDefDict(typing_extensions.DefaultDict[str, int]):
+ pass
+
+ dd = MyDefDict()
+ self.assertIsInstance(dd, MyDefDict)
+
+ self.assertIsSubclass(MyDefDict, collections.defaultdict)
+ self.assertNotIsSubclass(collections.defaultdict, MyDefDict)
+
+ def test_ordereddict_instantiation(self):
+ self.assertIs(
+ type(typing_extensions.OrderedDict()),
+ collections.OrderedDict)
+ self.assertIs(
+ type(typing_extensions.OrderedDict[KT, VT]()),
+ collections.OrderedDict)
+ self.assertIs(
+ type(typing_extensions.OrderedDict[str, int]()),
+ collections.OrderedDict)
+
+ def test_ordereddict_subclass(self):
+
+ class MyOrdDict(typing_extensions.OrderedDict[str, int]):
+ pass
+
+ od = MyOrdDict()
+ self.assertIsInstance(od, MyOrdDict)
+
+ self.assertIsSubclass(MyOrdDict, collections.OrderedDict)
+ self.assertNotIsSubclass(collections.OrderedDict, MyOrdDict)
+
+ def test_chainmap_instantiation(self):
+ self.assertIs(type(typing_extensions.ChainMap()), collections.ChainMap)
+ self.assertIs(type(typing_extensions.ChainMap[KT, VT]()), collections.ChainMap)
+ self.assertIs(type(typing_extensions.ChainMap[str, int]()), collections.ChainMap)
+ class CM(typing_extensions.ChainMap[KT, VT]): ...
+ self.assertIs(type(CM[int, str]()), CM)
+
+ def test_chainmap_subclass(self):
+
+ class MyChainMap(typing_extensions.ChainMap[str, int]):
+ pass
+
+ cm = MyChainMap()
+ self.assertIsInstance(cm, MyChainMap)
+
+ self.assertIsSubclass(MyChainMap, collections.ChainMap)
+ self.assertNotIsSubclass(collections.ChainMap, MyChainMap)
+
+ def test_deque_instantiation(self):
+ self.assertIs(type(typing_extensions.Deque()), collections.deque)
+ self.assertIs(type(typing_extensions.Deque[T]()), collections.deque)
+ self.assertIs(type(typing_extensions.Deque[int]()), collections.deque)
+ class D(typing_extensions.Deque[T]): ...
+ self.assertIs(type(D[int]()), D)
+
+ def test_counter_instantiation(self):
+ self.assertIs(type(typing_extensions.Counter()), collections.Counter)
+ self.assertIs(type(typing_extensions.Counter[T]()), collections.Counter)
+ self.assertIs(type(typing_extensions.Counter[int]()), collections.Counter)
+ class C(typing_extensions.Counter[T]): ...
+ self.assertIs(type(C[int]()), C)
+ self.assertEqual(C.__bases__, (collections.Counter, typing.Generic))
+
+ def test_counter_subclass_instantiation(self):
+
+ class MyCounter(typing_extensions.Counter[int]):
+ pass
+
+ d = MyCounter()
+ self.assertIsInstance(d, MyCounter)
+ self.assertIsInstance(d, collections.Counter)
+ self.assertIsInstance(d, typing_extensions.Counter)
+
+
+# These are a separate TestCase class,
+# as (unlike most collections.abc aliases in typing_extensions),
+# these are reimplemented on Python <=3.12 so that we can provide
+# default values for the second and third parameters
+class GeneratorTests(BaseTestCase):
+
+ def test_generator_basics(self):
+ def foo():
+ yield 42
+ g = foo()
+
+ self.assertIsInstance(g, typing_extensions.Generator)
+ self.assertNotIsInstance(foo, typing_extensions.Generator)
+ self.assertIsSubclass(type(g), typing_extensions.Generator)
+ self.assertNotIsSubclass(type(foo), typing_extensions.Generator)
+
+ parameterized = typing_extensions.Generator[int, str, None]
+ with self.assertRaises(TypeError):
+ isinstance(g, parameterized)
+ with self.assertRaises(TypeError):
+ issubclass(type(g), parameterized)
+
+ def test_generator_default(self):
+ g1 = typing_extensions.Generator[int]
+ g2 = typing_extensions.Generator[int, None, None]
+ self.assertEqual(get_args(g1), (int, type(None), type(None)))
+ self.assertEqual(get_args(g1), get_args(g2))
+
+ g3 = typing_extensions.Generator[int, float]
+ g4 = typing_extensions.Generator[int, float, None]
+ self.assertEqual(get_args(g3), (int, float, type(None)))
+ self.assertEqual(get_args(g3), get_args(g4))
+
+ def test_no_generator_instantiation(self):
+ with self.assertRaises(TypeError):
+ typing_extensions.Generator()
+ with self.assertRaises(TypeError):
+ typing_extensions.Generator[T, T, T]()
+ with self.assertRaises(TypeError):
+ typing_extensions.Generator[int, int, int]()
+
+ def test_subclassing_generator(self):
+ class G(typing_extensions.Generator[int, int, None]):
+ def send(self, value):
+ pass
+ def throw(self, typ, val=None, tb=None):
+ pass
+
+ def g(): yield 0
+
+ self.assertIsSubclass(G, typing_extensions.Generator)
+ self.assertIsSubclass(G, typing_extensions.Iterable)
+ self.assertIsSubclass(G, collections.abc.Generator)
+ self.assertIsSubclass(G, collections.abc.Iterable)
+ self.assertNotIsSubclass(type(g), G)
+
+ instance = G()
+ self.assertIsInstance(instance, typing_extensions.Generator)
+ self.assertIsInstance(instance, typing_extensions.Iterable)
+ self.assertIsInstance(instance, collections.abc.Generator)
+ self.assertIsInstance(instance, collections.abc.Iterable)
+ self.assertNotIsInstance(type(g), G)
+ self.assertNotIsInstance(g, G)
+
+ def test_async_generator_basics(self):
+ async def f():
+ yield 42
+ g = f()
+
+ self.assertIsInstance(g, typing_extensions.AsyncGenerator)
+ self.assertIsSubclass(type(g), typing_extensions.AsyncGenerator)
+ self.assertNotIsInstance(f, typing_extensions.AsyncGenerator)
+ self.assertNotIsSubclass(type(f), typing_extensions.AsyncGenerator)
+
+ parameterized = typing_extensions.AsyncGenerator[int, str]
+ with self.assertRaises(TypeError):
+ isinstance(g, parameterized)
+ with self.assertRaises(TypeError):
+ issubclass(type(g), parameterized)
+
+ def test_async_generator_default(self):
+ ag1 = typing_extensions.AsyncGenerator[int]
+ ag2 = typing_extensions.AsyncGenerator[int, None]
+ self.assertEqual(get_args(ag1), (int, type(None)))
+ self.assertEqual(get_args(ag1), get_args(ag2))
+
+ def test_no_async_generator_instantiation(self):
+ with self.assertRaises(TypeError):
+ typing_extensions.AsyncGenerator()
+ with self.assertRaises(TypeError):
+ typing_extensions.AsyncGenerator[T, T]()
+ with self.assertRaises(TypeError):
+ typing_extensions.AsyncGenerator[int, int]()
+
+ def test_subclassing_async_generator(self):
+ class G(typing_extensions.AsyncGenerator[int, int]):
+ def asend(self, value):
+ pass
+ def athrow(self, typ, val=None, tb=None):
+ pass
+
+ async def g(): yield 0
+
+ self.assertIsSubclass(G, typing_extensions.AsyncGenerator)
+ self.assertIsSubclass(G, typing_extensions.AsyncIterable)
+ self.assertIsSubclass(G, collections.abc.AsyncGenerator)
+ self.assertIsSubclass(G, collections.abc.AsyncIterable)
+ self.assertNotIsSubclass(type(g), G)
+
+ instance = G()
+ self.assertIsInstance(instance, typing_extensions.AsyncGenerator)
+ self.assertIsInstance(instance, typing_extensions.AsyncIterable)
+ self.assertIsInstance(instance, collections.abc.AsyncGenerator)
+ self.assertIsInstance(instance, collections.abc.AsyncIterable)
+ self.assertNotIsInstance(type(g), G)
+ self.assertNotIsInstance(g, G)
+
+ def test_subclassing_subclasshook(self):
+
+ class Base(typing_extensions.Generator):
+ @classmethod
+ def __subclasshook__(cls, other):
+ if other.__name__ == 'Foo':
+ return True
+ else:
+ return False
+
+ class C(Base): ...
+ class Foo: ...
+ class Bar: ...
+ self.assertIsSubclass(Foo, Base)
+ self.assertIsSubclass(Foo, C)
+ self.assertNotIsSubclass(Bar, C)
+
+ def test_subclassing_register(self):
+
+ class A(typing_extensions.Generator): ...
+ class B(A): ...
+
+ class C: ...
+ A.register(C)
+ self.assertIsSubclass(C, A)
+ self.assertNotIsSubclass(C, B)
+
+ class D: ...
+ B.register(D)
+ self.assertIsSubclass(D, A)
+ self.assertIsSubclass(D, B)
+
+ class M: ...
+ collections.abc.Generator.register(M)
+ self.assertIsSubclass(M, typing_extensions.Generator)
+
+ def test_collections_as_base(self):
+
+ class M(collections.abc.Generator): ...
+ self.assertIsSubclass(M, typing_extensions.Generator)
+ self.assertIsSubclass(M, typing_extensions.Iterable)
+
+ class S(collections.abc.AsyncGenerator): ...
+ self.assertIsSubclass(S, typing_extensions.AsyncGenerator)
+ self.assertIsSubclass(S, typing_extensions.AsyncIterator)
+
+ class A(collections.abc.Generator, metaclass=abc.ABCMeta): ...
+ class B: ...
+ A.register(B)
+ self.assertIsSubclass(B, typing_extensions.Generator)
+
+ @skipIf(sys.version_info < (3, 10), "PEP 604 has yet to be")
+ def test_or_and_ror(self):
+ self.assertEqual(
+ typing_extensions.Generator | typing_extensions.AsyncGenerator,
+ Union[typing_extensions.Generator, typing_extensions.AsyncGenerator]
+ )
+ self.assertEqual(
+ typing_extensions.Generator | typing.Deque,
+ Union[typing_extensions.Generator, typing.Deque]
+ )
+
+
+class OtherABCTests(BaseTestCase):
+
+ def test_contextmanager(self):
+ @contextlib.contextmanager
+ def manager():
+ yield 42
+
+ cm = manager()
+ self.assertIsInstance(cm, typing_extensions.ContextManager)
+ self.assertNotIsInstance(42, typing_extensions.ContextManager)
+
+ def test_contextmanager_type_params(self):
+ cm1 = typing_extensions.ContextManager[int]
+ self.assertEqual(get_args(cm1), (int, typing.Optional[bool]))
+ cm2 = typing_extensions.ContextManager[int, None]
+ self.assertEqual(get_args(cm2), (int, NoneType))
+
+ def test_async_contextmanager(self):
+ class NotACM:
+ pass
+ self.assertIsInstance(ACM(), typing_extensions.AsyncContextManager)
+ self.assertNotIsInstance(NotACM(), typing_extensions.AsyncContextManager)
+ @contextlib.contextmanager
+ def manager():
+ yield 42
+
+ cm = manager()
+ self.assertNotIsInstance(cm, typing_extensions.AsyncContextManager)
+ self.assertEqual(
+ typing_extensions.AsyncContextManager[int].__args__,
+ (int, typing.Optional[bool])
+ )
+ with self.assertRaises(TypeError):
+ isinstance(42, typing_extensions.AsyncContextManager[int])
+ with self.assertRaises(TypeError):
+ typing_extensions.AsyncContextManager[int, str, float]
+
+ def test_asynccontextmanager_type_params(self):
+ cm1 = typing_extensions.AsyncContextManager[int]
+ self.assertEqual(get_args(cm1), (int, typing.Optional[bool]))
+ cm2 = typing_extensions.AsyncContextManager[int, None]
+ self.assertEqual(get_args(cm2), (int, NoneType))
+
+
+class TypeTests(BaseTestCase):
+
+ def test_type_basic(self):
+
+ class User: pass
+ class BasicUser(User): pass
+ class ProUser(User): pass
+
+ def new_user(user_class: Type[User]) -> User:
+ return user_class()
+
+ new_user(BasicUser)
+
+ def test_type_typevar(self):
+
+ class User: pass
+ class BasicUser(User): pass
+ class ProUser(User): pass
+
+ U = TypeVar('U', bound=User)
+
+ def new_user(user_class: Type[U]) -> U:
+ return user_class()
+
+ new_user(BasicUser)
+
+ def test_type_optional(self):
+ A = Optional[Type[BaseException]]
+
+ def foo(a: A) -> Optional[BaseException]:
+ if a is None:
+ return None
+ else:
+ return a()
+
+ assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt)
+ assert foo(None) is None
+
+
+class NewTypeTests(BaseTestCase):
+ @classmethod
+ def setUpClass(cls):
+ global UserId
+ UserId = NewType('UserId', int)
+ cls.UserName = NewType(cls.__qualname__ + '.UserName', str)
+
+ @classmethod
+ def tearDownClass(cls):
+ global UserId
+ del UserId
+ del cls.UserName
+
+ def test_basic(self):
+ self.assertIsInstance(UserId(5), int)
+ self.assertIsInstance(self.UserName('Joe'), str)
+ self.assertEqual(UserId(5) + 1, 6)
+
+ def test_errors(self):
+ with self.assertRaises(TypeError):
+ issubclass(UserId, int)
+ with self.assertRaises(TypeError):
+ class D(UserId):
+ pass
+
+ @skipUnless(TYPING_3_10_0, "PEP 604 has yet to be")
+ def test_or(self):
+ for cls in (int, self.UserName):
+ with self.subTest(cls=cls):
+ self.assertEqual(UserId | cls, Union[UserId, cls])
+ self.assertEqual(cls | UserId, Union[cls, UserId])
+
+ self.assertEqual(get_args(UserId | cls), (UserId, cls))
+ self.assertEqual(get_args(cls | UserId), (cls, UserId))
+
+ def test_special_attrs(self):
+ self.assertEqual(UserId.__name__, 'UserId')
+ self.assertEqual(UserId.__qualname__, 'UserId')
+ self.assertEqual(UserId.__module__, __name__)
+ self.assertEqual(UserId.__supertype__, int)
+
+ UserName = self.UserName
+ self.assertEqual(UserName.__name__, 'UserName')
+ self.assertEqual(UserName.__qualname__,
+ self.__class__.__qualname__ + '.UserName')
+ self.assertEqual(UserName.__module__, __name__)
+ self.assertEqual(UserName.__supertype__, str)
+
+ def test_repr(self):
+ self.assertEqual(repr(UserId), f'{__name__}.UserId')
+ self.assertEqual(repr(self.UserName),
+ f'{__name__}.{self.__class__.__qualname__}.UserName')
+
+ def test_pickle(self):
+ UserAge = NewType('UserAge', float)
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(proto=proto):
+ pickled = pickle.dumps(UserId, proto)
+ loaded = pickle.loads(pickled)
+ self.assertIs(loaded, UserId)
+
+ pickled = pickle.dumps(self.UserName, proto)
+ loaded = pickle.loads(pickled)
+ self.assertIs(loaded, self.UserName)
+
+ with self.assertRaises(pickle.PicklingError):
+ pickle.dumps(UserAge, proto)
+
+ def test_missing__name__(self):
+ code = ("import typing_extensions\n"
+ "NT = typing_extensions.NewType('NT', int)\n"
+ )
+ exec(code, {})
+
+ def test_error_message_when_subclassing(self):
+ with self.assertRaisesRegex(
+ TypeError,
+ re.escape(
+ "Cannot subclass an instance of NewType. Perhaps you were looking for: "
+ "`ProUserId = NewType('ProUserId', UserId)`"
+ )
+ ):
+ class ProUserId(UserId):
+ ...
+
+
+class Coordinate(Protocol):
+ x: int
+ y: int
+
+@runtime_checkable
+class Point(Coordinate, Protocol):
+ label: str
+
+class MyPoint:
+ x: int
+ y: int
+ label: str
+
+class XAxis(Protocol):
+ x: int
+
+class YAxis(Protocol):
+ y: int
+
+@runtime_checkable
+class Position(XAxis, YAxis, Protocol):
+ pass
+
+@runtime_checkable
+class Proto(Protocol):
+ attr: int
+
+ def meth(self, arg: str) -> int:
+ ...
+
+class Concrete(Proto):
+ pass
+
+class Other:
+ attr: int = 1
+
+ def meth(self, arg: str) -> int:
+ if arg == 'this':
+ return 1
+ return 0
+
+class NT(NamedTuple):
+ x: int
+ y: int
+
+
+skip_if_py312b1 = skipIf(
+ sys.version_info == (3, 12, 0, 'beta', 1),
+ "CPython had bugs in 3.12.0b1"
+)
+
+
+class ProtocolTests(BaseTestCase):
+ def test_runtime_alias(self):
+ self.assertIs(runtime, runtime_checkable)
+
+ def test_basic_protocol(self):
+ @runtime_checkable
+ class P(Protocol):
+ def meth(self):
+ pass
+ class C: pass
+ class D:
+ def meth(self):
+ pass
+ def f():
+ pass
+ self.assertIsSubclass(D, P)
+ self.assertIsInstance(D(), P)
+ self.assertNotIsSubclass(C, P)
+ self.assertNotIsInstance(C(), P)
+ self.assertNotIsSubclass(types.FunctionType, P)
+ self.assertNotIsInstance(f, P)
+
+ def test_everything_implements_empty_protocol(self):
+ @runtime_checkable
+ class Empty(Protocol): pass
+ class C: pass
+ def f():
+ pass
+ for thing in (object, type, tuple, C, types.FunctionType):
+ self.assertIsSubclass(thing, Empty)
+ for thing in (object(), 1, (), typing, f):
+ self.assertIsInstance(thing, Empty)
+
+ def test_function_implements_protocol(self):
+ def f():
+ pass
+ self.assertIsInstance(f, HasCallProtocol)
+
+ def test_no_inheritance_from_nominal(self):
+ class C: pass
+ class BP(Protocol): pass
+ with self.assertRaises(TypeError):
+ class P(C, Protocol):
+ pass
+ with self.assertRaises(TypeError):
+ class Q(Protocol, C):
+ pass
+ with self.assertRaises(TypeError):
+ class R(BP, C, Protocol):
+ pass
+ class D(BP, C): pass
+ class E(C, BP): pass
+ self.assertNotIsInstance(D(), E)
+ self.assertNotIsInstance(E(), D)
+
+ def test_runtimecheckable_on_typing_dot_Protocol(self):
+ @runtime_checkable
+ class Foo(typing.Protocol):
+ x: int
+
+ class Bar:
+ def __init__(self):
+ self.x = 42
+
+ self.assertIsInstance(Bar(), Foo)
+ self.assertNotIsInstance(object(), Foo)
+
+ def test_typing_dot_runtimecheckable_on_Protocol(self):
+ @typing.runtime_checkable
+ class Foo(Protocol):
+ x: int
+
+ class Bar:
+ def __init__(self):
+ self.x = 42
+
+ self.assertIsInstance(Bar(), Foo)
+ self.assertNotIsInstance(object(), Foo)
+
+ def test_typing_Protocol_and_extensions_Protocol_can_mix(self):
+ class TypingProto(typing.Protocol):
+ x: int
+
+ class ExtensionsProto(Protocol):
+ y: int
+
+ class SubProto(TypingProto, ExtensionsProto, typing.Protocol):
+ z: int
+
+ class SubProto2(TypingProto, ExtensionsProto, Protocol):
+ z: int
+
+ class SubProto3(ExtensionsProto, TypingProto, typing.Protocol):
+ z: int
+
+ class SubProto4(ExtensionsProto, TypingProto, Protocol):
+ z: int
+
+ for proto in (
+ ExtensionsProto, SubProto, SubProto2, SubProto3, SubProto4
+ ):
+ with self.subTest(proto=proto.__name__):
+ self.assertTrue(is_protocol(proto))
+ if Protocol is not typing.Protocol:
+ self.assertIsInstance(proto, typing_extensions._ProtocolMeta)
+ self.assertIsInstance(proto.__protocol_attrs__, set)
+ with self.assertRaisesRegex(
+ TypeError, "Protocols cannot be instantiated"
+ ):
+ proto()
+ # check these don't raise
+ runtime_checkable(proto)
+ typing.runtime_checkable(proto)
+
+ class Concrete(SubProto): pass
+ class Concrete2(SubProto2): pass
+ class Concrete3(SubProto3): pass
+ class Concrete4(SubProto4): pass
+
+ for cls in Concrete, Concrete2, Concrete3, Concrete4:
+ with self.subTest(cls=cls.__name__):
+ self.assertFalse(is_protocol(cls))
+ # Check that this doesn't raise:
+ self.assertIsInstance(cls(), cls)
+ with self.assertRaises(TypeError):
+ runtime_checkable(cls)
+ with self.assertRaises(TypeError):
+ typing.runtime_checkable(cls)
+
+ def test_no_instantiation(self):
+ class P(Protocol): pass
+ with self.assertRaises(TypeError):
+ P()
+ class C(P): pass
+ self.assertIsInstance(C(), C)
+ T = TypeVar('T')
+ class PG(Protocol[T]): pass
+ with self.assertRaises(TypeError):
+ PG()
+ with self.assertRaises(TypeError):
+ PG[int]()
+ with self.assertRaises(TypeError):
+ PG[T]()
+ class CG(PG[T]): pass
+ self.assertIsInstance(CG[int](), CG)
+
+ def test_protocol_defining_init_does_not_get_overridden(self):
+ # check that P.__init__ doesn't get clobbered
+ # see https://bugs.python.org/issue44807
+
+ class P(Protocol):
+ x: int
+ def __init__(self, x: int) -> None:
+ self.x = x
+ class C: pass
+
+ c = C()
+ P.__init__(c, 1)
+ self.assertEqual(c.x, 1)
+
+ def test_concrete_class_inheriting_init_from_protocol(self):
+ class P(Protocol):
+ x: int
+ def __init__(self, x: int) -> None:
+ self.x = x
+
+ class C(P): pass
+
+ c = C(1)
+ self.assertIsInstance(c, C)
+ self.assertEqual(c.x, 1)
+
+ def test_cannot_instantiate_abstract(self):
+ @runtime_checkable
+ class P(Protocol):
+ @abc.abstractmethod
+ def ameth(self) -> int:
+ raise NotImplementedError
+ class B(P):
+ pass
+ class C(B):
+ def ameth(self) -> int:
+ return 26
+ with self.assertRaises(TypeError):
+ B()
+ self.assertIsInstance(C(), P)
+
+ def test_subprotocols_extending(self):
+ class P1(Protocol):
+ def meth1(self):
+ pass
+ @runtime_checkable
+ class P2(P1, Protocol):
+ def meth2(self):
+ pass
+ class C:
+ def meth1(self):
+ pass
+ def meth2(self):
+ pass
+ class C1:
+ def meth1(self):
+ pass
+ class C2:
+ def meth2(self):
+ pass
+ self.assertNotIsInstance(C1(), P2)
+ self.assertNotIsInstance(C2(), P2)
+ self.assertNotIsSubclass(C1, P2)
+ self.assertNotIsSubclass(C2, P2)
+ self.assertIsInstance(C(), P2)
+ self.assertIsSubclass(C, P2)
+
+ def test_subprotocols_merging(self):
+ class P1(Protocol):
+ def meth1(self):
+ pass
+ class P2(Protocol):
+ def meth2(self):
+ pass
+ @runtime_checkable
+ class P(P1, P2, Protocol):
+ pass
+ class C:
+ def meth1(self):
+ pass
+ def meth2(self):
+ pass
+ class C1:
+ def meth1(self):
+ pass
+ class C2:
+ def meth2(self):
+ pass
+ self.assertNotIsInstance(C1(), P)
+ self.assertNotIsInstance(C2(), P)
+ self.assertNotIsSubclass(C1, P)
+ self.assertNotIsSubclass(C2, P)
+ self.assertIsInstance(C(), P)
+ self.assertIsSubclass(C, P)
+
+ def test_protocols_issubclass(self):
+ T = TypeVar('T')
+ @runtime_checkable
+ class P(Protocol):
+ def x(self): ...
+ @runtime_checkable
+ class PG(Protocol[T]):
+ def x(self): ...
+ class BadP(Protocol):
+ def x(self): ...
+ class BadPG(Protocol[T]):
+ def x(self): ...
+ class C:
+ def x(self): ...
+ self.assertIsSubclass(C, P)
+ self.assertIsSubclass(C, PG)
+ self.assertIsSubclass(BadP, PG)
+
+ no_subscripted_generics = (
+ "Subscripted generics cannot be used with class and instance checks"
+ )
+
+ with self.assertRaisesRegex(TypeError, no_subscripted_generics):
+ issubclass(C, PG[T])
+ with self.assertRaisesRegex(TypeError, no_subscripted_generics):
+ issubclass(C, PG[C])
+
+ only_runtime_checkable_protocols = (
+ "Instance and class checks can only be used with "
+ "@runtime_checkable protocols"
+ )
+
+ with self.assertRaisesRegex(TypeError, only_runtime_checkable_protocols):
+ issubclass(C, BadP)
+ with self.assertRaisesRegex(TypeError, only_runtime_checkable_protocols):
+ issubclass(C, BadPG)
+
+ with self.assertRaisesRegex(TypeError, no_subscripted_generics):
+ issubclass(P, PG[T])
+ with self.assertRaisesRegex(TypeError, no_subscripted_generics):
+ issubclass(PG, PG[int])
+
+ only_classes_allowed = r"issubclass\(\) arg 1 must be a class"
+
+ with self.assertRaisesRegex(TypeError, only_classes_allowed):
+ issubclass(1, P)
+ with self.assertRaisesRegex(TypeError, only_classes_allowed):
+ issubclass(1, PG)
+ with self.assertRaisesRegex(TypeError, only_classes_allowed):
+ issubclass(1, BadP)
+ with self.assertRaisesRegex(TypeError, only_classes_allowed):
+ issubclass(1, BadPG)
+
+ def test_implicit_issubclass_between_two_protocols(self):
+ @runtime_checkable
+ class CallableMembersProto(Protocol):
+ def meth(self): ...
+
+ # All the below protocols should be considered "subclasses"
+ # of CallableMembersProto at runtime,
+ # even though none of them explicitly subclass CallableMembersProto
+
+ class IdenticalProto(Protocol):
+ def meth(self): ...
+
+ class SupersetProto(Protocol):
+ def meth(self): ...
+ def meth2(self): ...
+
+ class NonCallableMembersProto(Protocol):
+ meth: Callable[[], None]
+
+ class NonCallableMembersSupersetProto(Protocol):
+ meth: Callable[[], None]
+ meth2: Callable[[str, int], bool]
+
+ class MixedMembersProto1(Protocol):
+ meth: Callable[[], None]
+ def meth2(self): ...
+
+ class MixedMembersProto2(Protocol):
+ def meth(self): ...
+ meth2: Callable[[str, int], bool]
+
+ for proto in (
+ IdenticalProto, SupersetProto, NonCallableMembersProto,
+ NonCallableMembersSupersetProto, MixedMembersProto1, MixedMembersProto2
+ ):
+ with self.subTest(proto=proto.__name__):
+ self.assertIsSubclass(proto, CallableMembersProto)
+
+ # These two shouldn't be considered subclasses of CallableMembersProto, however,
+ # since they don't have the `meth` protocol member
+
+ class EmptyProtocol(Protocol): ...
+ class UnrelatedProtocol(Protocol):
+ def wut(self): ...
+
+ self.assertNotIsSubclass(EmptyProtocol, CallableMembersProto)
+ self.assertNotIsSubclass(UnrelatedProtocol, CallableMembersProto)
+
+ # These aren't protocols at all (despite having annotations),
+ # so they should only be considered subclasses of CallableMembersProto
+ # if they *actually have an attribute* matching the `meth` member
+ # (just having an annotation is insufficient)
+
+ class AnnotatedButNotAProtocol:
+ meth: Callable[[], None]
+
+ class NotAProtocolButAnImplicitSubclass:
+ def meth(self): pass
+
+ class NotAProtocolButAnImplicitSubclass2:
+ meth: Callable[[], None]
+ def meth(self): pass
+
+ class NotAProtocolButAnImplicitSubclass3:
+ meth: Callable[[], None]
+ meth2: Callable[[int, str], bool]
+ def meth(self): pass
+ def meth2(self, x, y): return True
+
+ self.assertNotIsSubclass(AnnotatedButNotAProtocol, CallableMembersProto)
+ self.assertIsSubclass(NotAProtocolButAnImplicitSubclass, CallableMembersProto)
+ self.assertIsSubclass(NotAProtocolButAnImplicitSubclass2, CallableMembersProto)
+ self.assertIsSubclass(NotAProtocolButAnImplicitSubclass3, CallableMembersProto)
+
+ @skip_if_py312b1
+ def test_issubclass_and_isinstance_on_Protocol_itself(self):
+ class C:
+ def x(self): pass
+
+ self.assertNotIsSubclass(object, Protocol)
+ self.assertNotIsInstance(object(), Protocol)
+
+ self.assertNotIsSubclass(str, Protocol)
+ self.assertNotIsInstance('foo', Protocol)
+
+ self.assertNotIsSubclass(C, Protocol)
+ self.assertNotIsInstance(C(), Protocol)
+
+ only_classes_allowed = r"issubclass\(\) arg 1 must be a class"
+
+ with self.assertRaisesRegex(TypeError, only_classes_allowed):
+ issubclass(1, Protocol)
+ with self.assertRaisesRegex(TypeError, only_classes_allowed):
+ issubclass('foo', Protocol)
+ with self.assertRaisesRegex(TypeError, only_classes_allowed):
+ issubclass(C(), Protocol)
+
+ T = TypeVar('T')
+
+ @runtime_checkable
+ class EmptyProtocol(Protocol): pass
+
+ @runtime_checkable
+ class SupportsStartsWith(Protocol):
+ def startswith(self, x: str) -> bool: ...
+
+ @runtime_checkable
+ class SupportsX(Protocol[T]):
+ def x(self): ...
+
+ for proto in EmptyProtocol, SupportsStartsWith, SupportsX:
+ with self.subTest(proto=proto.__name__):
+ self.assertIsSubclass(proto, Protocol)
+
+ # gh-105237 / PR #105239:
+ # check that the presence of Protocol subclasses
+ # where `issubclass(X, <subclass>)` evaluates to True
+ # doesn't influence the result of `issubclass(X, Protocol)`
+
+ self.assertIsSubclass(object, EmptyProtocol)
+ self.assertIsInstance(object(), EmptyProtocol)
+ self.assertNotIsSubclass(object, Protocol)
+ self.assertNotIsInstance(object(), Protocol)
+
+ self.assertIsSubclass(str, SupportsStartsWith)
+ self.assertIsInstance('foo', SupportsStartsWith)
+ self.assertNotIsSubclass(str, Protocol)
+ self.assertNotIsInstance('foo', Protocol)
+
+ self.assertIsSubclass(C, SupportsX)
+ self.assertIsInstance(C(), SupportsX)
+ self.assertNotIsSubclass(C, Protocol)
+ self.assertNotIsInstance(C(), Protocol)
+
+ @skip_if_py312b1
+ def test_isinstance_checks_not_at_whim_of_gc(self):
+ self.addCleanup(gc.enable)
+ gc.disable()
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "Protocols can only inherit from other protocols"
+ ):
+ class Foo(collections.abc.Mapping, Protocol):
+ pass
+
+ self.assertNotIsInstance([], collections.abc.Mapping)
+
+ def test_protocols_issubclass_non_callable(self):
+ class C:
+ x = 1
+
+ @runtime_checkable
+ class PNonCall(Protocol):
+ x = 1
+
+ non_callable_members_illegal = (
+ "Protocols with non-method members don't support issubclass()"
+ )
+
+ with self.assertRaisesRegex(TypeError, non_callable_members_illegal):
+ issubclass(C, PNonCall)
+
+ self.assertIsInstance(C(), PNonCall)
+ PNonCall.register(C)
+
+ with self.assertRaisesRegex(TypeError, non_callable_members_illegal):
+ issubclass(C, PNonCall)
+
+ self.assertIsInstance(C(), PNonCall)
+
+ # check that non-protocol subclasses are not affected
+ class D(PNonCall): ...
+
+ self.assertNotIsSubclass(C, D)
+ self.assertNotIsInstance(C(), D)
+ D.register(C)
+ self.assertIsSubclass(C, D)
+ self.assertIsInstance(C(), D)
+
+ with self.assertRaisesRegex(TypeError, non_callable_members_illegal):
+ issubclass(D, PNonCall)
+
+ def test_no_weird_caching_with_issubclass_after_isinstance(self):
+ @runtime_checkable
+ class Spam(Protocol):
+ x: int
+
+ class Eggs:
+ def __init__(self) -> None:
+ self.x = 42
+
+ self.assertIsInstance(Eggs(), Spam)
+
+ # gh-104555: If we didn't override ABCMeta.__subclasscheck__ in _ProtocolMeta,
+ # TypeError wouldn't be raised here,
+ # as the cached result of the isinstance() check immediately above
+ # would mean the issubclass() call would short-circuit
+ # before we got to the "raise TypeError" line
+ with self.assertRaisesRegex(
+ TypeError,
+ "Protocols with non-method members don't support issubclass()"
+ ):
+ issubclass(Eggs, Spam)
+
+ def test_no_weird_caching_with_issubclass_after_isinstance_2(self):
+ @runtime_checkable
+ class Spam(Protocol):
+ x: int
+
+ class Eggs: ...
+
+ self.assertNotIsInstance(Eggs(), Spam)
+
+ # gh-104555: If we didn't override ABCMeta.__subclasscheck__ in _ProtocolMeta,
+ # TypeError wouldn't be raised here,
+ # as the cached result of the isinstance() check immediately above
+ # would mean the issubclass() call would short-circuit
+ # before we got to the "raise TypeError" line
+ with self.assertRaisesRegex(
+ TypeError,
+ "Protocols with non-method members don't support issubclass()"
+ ):
+ issubclass(Eggs, Spam)
+
+ def test_no_weird_caching_with_issubclass_after_isinstance_3(self):
+ @runtime_checkable
+ class Spam(Protocol):
+ x: int
+
+ class Eggs:
+ def __getattr__(self, attr):
+ if attr == "x":
+ return 42
+ raise AttributeError(attr)
+
+ self.assertNotIsInstance(Eggs(), Spam)
+
+ # gh-104555: If we didn't override ABCMeta.__subclasscheck__ in _ProtocolMeta,
+ # TypeError wouldn't be raised here,
+ # as the cached result of the isinstance() check immediately above
+ # would mean the issubclass() call would short-circuit
+ # before we got to the "raise TypeError" line
+ with self.assertRaisesRegex(
+ TypeError,
+ "Protocols with non-method members don't support issubclass()"
+ ):
+ issubclass(Eggs, Spam)
+
+ def test_protocols_isinstance(self):
+ T = TypeVar('T')
+ @runtime_checkable
+ class P(Protocol):
+ def meth(x): ...
+ @runtime_checkable
+ class PG(Protocol[T]):
+ def meth(x): ...
+ @runtime_checkable
+ class WeirdProto(Protocol):
+ meth = str.maketrans
+ @runtime_checkable
+ class WeirdProto2(Protocol):
+ meth = lambda *args, **kwargs: None # noqa: E731
+ class CustomCallable:
+ def __call__(self, *args, **kwargs):
+ pass
+ @runtime_checkable
+ class WeirderProto(Protocol):
+ meth = CustomCallable()
+ class BadP(Protocol):
+ def meth(x): ...
+ class BadPG(Protocol[T]):
+ def meth(x): ...
+ class C:
+ def meth(x): ...
+ class C2:
+ def __init__(self):
+ self.meth = lambda: None
+ for klass in C, C2:
+ for proto in P, PG, WeirdProto, WeirdProto2, WeirderProto:
+ with self.subTest(klass=klass.__name__, proto=proto.__name__):
+ self.assertIsInstance(klass(), proto)
+
+ no_subscripted_generics = (
+ "Subscripted generics cannot be used with class and instance checks"
+ )
+
+ with self.assertRaisesRegex(TypeError, no_subscripted_generics):
+ isinstance(C(), PG[T])
+ with self.assertRaisesRegex(TypeError, no_subscripted_generics):
+ isinstance(C(), PG[C])
+
+ only_runtime_checkable_msg = (
+ "Instance and class checks can only be used "
+ "with @runtime_checkable protocols"
+ )
+
+ with self.assertRaisesRegex(TypeError, only_runtime_checkable_msg):
+ isinstance(C(), BadP)
+ with self.assertRaisesRegex(TypeError, only_runtime_checkable_msg):
+ isinstance(C(), BadPG)
+
+ def test_protocols_isinstance_properties_and_descriptors(self):
+ class C:
+ @property
+ def attr(self):
+ return 42
+
+ class CustomDescriptor:
+ def __get__(self, obj, objtype=None):
+ return 42
+
+ class D:
+ attr = CustomDescriptor()
+
+ # Check that properties set on superclasses
+ # are still found by the isinstance() logic
+ class E(C): ...
+ class F(D): ...
+
+ class Empty: ...
+
+ T = TypeVar('T')
+
+ @runtime_checkable
+ class P(Protocol):
+ @property
+ def attr(self): ...
+
+ @runtime_checkable
+ class P1(Protocol):
+ attr: int
+
+ @runtime_checkable
+ class PG(Protocol[T]):
+ @property
+ def attr(self): ...
+
+ @runtime_checkable
+ class PG1(Protocol[T]):
+ attr: T
+
+ @runtime_checkable
+ class MethodP(Protocol):
+ def attr(self): ...
+
+ @runtime_checkable
+ class MethodPG(Protocol[T]):
+ def attr(self) -> T: ...
+
+ for protocol_class in P, P1, PG, PG1, MethodP, MethodPG:
+ for klass in C, D, E, F:
+ with self.subTest(
+ klass=klass.__name__,
+ protocol_class=protocol_class.__name__
+ ):
+ self.assertIsInstance(klass(), protocol_class)
+
+ with self.subTest(klass="Empty", protocol_class=protocol_class.__name__):
+ self.assertNotIsInstance(Empty(), protocol_class)
+
+ class BadP(Protocol):
+ @property
+ def attr(self): ...
+
+ class BadP1(Protocol):
+ attr: int
+
+ class BadPG(Protocol[T]):
+ @property
+ def attr(self): ...
+
+ class BadPG1(Protocol[T]):
+ attr: T
+
+ cases = (
+ PG[T], PG[C], PG1[T], PG1[C], MethodPG[T],
+ MethodPG[C], BadP, BadP1, BadPG, BadPG1
+ )
+
+ for obj in cases:
+ for klass in C, D, E, F, Empty:
+ with self.subTest(klass=klass.__name__, obj=obj):
+ with self.assertRaises(TypeError):
+ isinstance(klass(), obj)
+
+ def test_protocols_isinstance_not_fooled_by_custom_dir(self):
+ @runtime_checkable
+ class HasX(Protocol):
+ x: int
+
+ class CustomDirWithX:
+ x = 10
+ def __dir__(self):
+ return []
+
+ class CustomDirWithoutX:
+ def __dir__(self):
+ return ["x"]
+
+ self.assertIsInstance(CustomDirWithX(), HasX)
+ self.assertNotIsInstance(CustomDirWithoutX(), HasX)
+
+ def test_protocols_isinstance_attribute_access_with_side_effects(self):
+ class C:
+ @property
+ def attr(self):
+ raise AttributeError('no')
+
+ class CustomDescriptor:
+ def __get__(self, obj, objtype=None):
+ raise RuntimeError("NO")
+
+ class D:
+ attr = CustomDescriptor()
+
+ # Check that properties set on superclasses
+ # are still found by the isinstance() logic
+ class E(C): ...
+ class F(D): ...
+
+ class WhyWouldYouDoThis:
+ def __getattr__(self, name):
+ raise RuntimeError("wut")
+
+ T = TypeVar('T')
+
+ @runtime_checkable
+ class P(Protocol):
+ @property
+ def attr(self): ...
+
+ @runtime_checkable
+ class P1(Protocol):
+ attr: int
+
+ @runtime_checkable
+ class PG(Protocol[T]):
+ @property
+ def attr(self): ...
+
+ @runtime_checkable
+ class PG1(Protocol[T]):
+ attr: T
+
+ @runtime_checkable
+ class MethodP(Protocol):
+ def attr(self): ...
+
+ @runtime_checkable
+ class MethodPG(Protocol[T]):
+ def attr(self) -> T: ...
+
+ for protocol_class in P, P1, PG, PG1, MethodP, MethodPG:
+ for klass in C, D, E, F:
+ with self.subTest(
+ klass=klass.__name__,
+ protocol_class=protocol_class.__name__
+ ):
+ self.assertIsInstance(klass(), protocol_class)
+
+ with self.subTest(
+ klass="WhyWouldYouDoThis",
+ protocol_class=protocol_class.__name__
+ ):
+ self.assertNotIsInstance(WhyWouldYouDoThis(), protocol_class)
+
+ def test_protocols_isinstance___slots__(self):
+ # As per the consensus in https://github.com/python/typing/issues/1367,
+ # this is desirable behaviour
+ @runtime_checkable
+ class HasX(Protocol):
+ x: int
+
+ class HasNothingButSlots:
+ __slots__ = ("x",)
+
+ self.assertIsInstance(HasNothingButSlots(), HasX)
+
+ def test_protocols_isinstance_py36(self):
+ class APoint:
+ def __init__(self, x, y, label):
+ self.x = x
+ self.y = y
+ self.label = label
+ class BPoint:
+ label = 'B'
+ def __init__(self, x, y):
+ self.x = x
+ self.y = y
+ class C:
+ def __init__(self, attr):
+ self.attr = attr
+ def meth(self, arg):
+ return 0
+ class Bad: pass
+ self.assertIsInstance(APoint(1, 2, 'A'), Point)
+ self.assertIsInstance(BPoint(1, 2), Point)
+ self.assertNotIsInstance(MyPoint(), Point)
+ self.assertIsInstance(BPoint(1, 2), Position)
+ self.assertIsInstance(Other(), Proto)
+ self.assertIsInstance(Concrete(), Proto)
+ self.assertIsInstance(C(42), Proto)
+ self.assertNotIsInstance(Bad(), Proto)
+ self.assertNotIsInstance(Bad(), Point)
+ self.assertNotIsInstance(Bad(), Position)
+ self.assertNotIsInstance(Bad(), Concrete)
+ self.assertNotIsInstance(Other(), Concrete)
+ self.assertIsInstance(NT(1, 2), Position)
+
+ def test_runtime_checkable_with_match_args(self):
+ @runtime_checkable
+ class P_regular(Protocol):
+ x: int
+ y: int
+
+ @runtime_checkable
+ class P_match(Protocol):
+ __match_args__ = ("x", "y")
+ x: int
+ y: int
+
+ class Regular:
+ def __init__(self, x: int, y: int):
+ self.x = x
+ self.y = y
+
+ class WithMatch:
+ __match_args__ = ("x", "y", "z")
+ def __init__(self, x: int, y: int, z: int):
+ self.x = x
+ self.y = y
+ self.z = z
+
+ class Nope: ...
+
+ self.assertIsInstance(Regular(1, 2), P_regular)
+ self.assertIsInstance(Regular(1, 2), P_match)
+ self.assertIsInstance(WithMatch(1, 2, 3), P_regular)
+ self.assertIsInstance(WithMatch(1, 2, 3), P_match)
+ self.assertNotIsInstance(Nope(), P_regular)
+ self.assertNotIsInstance(Nope(), P_match)
+
+ def test_protocols_isinstance_init(self):
+ T = TypeVar('T')
+ @runtime_checkable
+ class P(Protocol):
+ x = 1
+ @runtime_checkable
+ class PG(Protocol[T]):
+ x = 1
+ class C:
+ def __init__(self, x):
+ self.x = x
+ self.assertIsInstance(C(1), P)
+ self.assertIsInstance(C(1), PG)
+
+ def test_protocols_isinstance_monkeypatching(self):
+ @runtime_checkable
+ class HasX(Protocol):
+ x: int
+
+ class Foo: ...
+
+ f = Foo()
+ self.assertNotIsInstance(f, HasX)
+ f.x = 42
+ self.assertIsInstance(f, HasX)
+ del f.x
+ self.assertNotIsInstance(f, HasX)
+
+ @skip_if_py312b1
+ def test_runtime_checkable_generic_non_protocol(self):
+ # Make sure this doesn't raise AttributeError
+ with self.assertRaisesRegex(
+ TypeError,
+ "@runtime_checkable can be only applied to protocol classes",
+ ):
+ @runtime_checkable
+ class Foo(Generic[T]): ...
+
+ def test_runtime_checkable_generic(self):
+ @runtime_checkable
+ class Foo(Protocol[T]):
+ def meth(self) -> T: ...
+
+ class Impl:
+ def meth(self) -> int: ...
+
+ self.assertIsSubclass(Impl, Foo)
+
+ class NotImpl:
+ def method(self) -> int: ...
+
+ self.assertNotIsSubclass(NotImpl, Foo)
+
+ if sys.version_info >= (3, 12):
+ exec(textwrap.dedent(
+ """
+ @skip_if_py312b1
+ def test_pep695_generics_can_be_runtime_checkable(self):
+ @runtime_checkable
+ class HasX(Protocol):
+ x: int
+
+ class Bar[T]:
+ x: T
+ def __init__(self, x):
+ self.x = x
+
+ class Capybara[T]:
+ y: str
+ def __init__(self, y):
+ self.y = y
+
+ self.assertIsInstance(Bar(1), HasX)
+ self.assertNotIsInstance(Capybara('a'), HasX)
+ """
+ ))
+
+ @skip_if_py312b1
+ def test_protocols_isinstance_generic_classes(self):
+ T = TypeVar("T")
+
+ class Foo(Generic[T]):
+ x: T
+
+ def __init__(self, x):
+ self.x = x
+
+ class Bar(Foo[int]):
+ ...
+
+ @runtime_checkable
+ class HasX(Protocol):
+ x: int
+
+ foo = Foo(1)
+ self.assertIsInstance(foo, HasX)
+
+ bar = Bar(2)
+ self.assertIsInstance(bar, HasX)
+
+ def test_protocols_support_register(self):
+ @runtime_checkable
+ class P(Protocol):
+ x = 1
+ class PM(Protocol):
+ def meth(self): pass
+ class D(PM): pass
+ class C: pass
+ D.register(C)
+ P.register(C)
+ self.assertIsInstance(C(), P)
+ self.assertIsInstance(C(), D)
+
+ def test_none_on_non_callable_doesnt_block_implementation(self):
+ @runtime_checkable
+ class P(Protocol):
+ x = 1
+ class A:
+ x = 1
+ class B(A):
+ x = None
+ class C:
+ def __init__(self):
+ self.x = None
+ self.assertIsInstance(B(), P)
+ self.assertIsInstance(C(), P)
+
+ def test_none_on_callable_blocks_implementation(self):
+ @runtime_checkable
+ class P(Protocol):
+ def x(self): ...
+ class A:
+ def x(self): ...
+ class B(A):
+ x = None
+ class C:
+ def __init__(self):
+ self.x = None
+ self.assertNotIsInstance(B(), P)
+ self.assertNotIsInstance(C(), P)
+
+ def test_non_protocol_subclasses(self):
+ class P(Protocol):
+ x = 1
+ @runtime_checkable
+ class PR(Protocol):
+ def meth(self): pass
+ class NonP(P):
+ x = 1
+ class NonPR(PR): pass
+ class C(metaclass=abc.ABCMeta):
+ x = 1
+ class D(metaclass=abc.ABCMeta):
+ def meth(self): pass # noqa: B027
+ self.assertNotIsInstance(C(), NonP)
+ self.assertNotIsInstance(D(), NonPR)
+ self.assertNotIsSubclass(C, NonP)
+ self.assertNotIsSubclass(D, NonPR)
+ self.assertIsInstance(NonPR(), PR)
+ self.assertIsSubclass(NonPR, PR)
+
+ self.assertNotIn("__protocol_attrs__", vars(NonP))
+ self.assertNotIn("__protocol_attrs__", vars(NonPR))
+ self.assertNotIn("__non_callable_proto_members__", vars(NonP))
+ self.assertNotIn("__non_callable_proto_members__", vars(NonPR))
+
+ acceptable_extra_attrs = {
+ '_is_protocol', '_is_runtime_protocol', '__parameters__',
+ '__init__', '__annotations__', '__subclasshook__', '__annotate__'
+ }
+ self.assertLessEqual(vars(NonP).keys(), vars(C).keys() | acceptable_extra_attrs)
+ self.assertLessEqual(
+ vars(NonPR).keys(), vars(D).keys() | acceptable_extra_attrs
+ )
+
+ def test_custom_subclasshook(self):
+ class P(Protocol):
+ x = 1
+ class OKClass: pass
+ class BadClass:
+ x = 1
+ class C(P):
+ @classmethod
+ def __subclasshook__(cls, other):
+ return other.__name__.startswith("OK")
+ self.assertIsInstance(OKClass(), C)
+ self.assertNotIsInstance(BadClass(), C)
+ self.assertIsSubclass(OKClass, C)
+ self.assertNotIsSubclass(BadClass, C)
+
+ @skipIf(
+ sys.version_info[:4] == (3, 12, 0, 'beta') and sys.version_info[4] < 4,
+ "Early betas of Python 3.12 had a bug"
+ )
+ def test_custom_subclasshook_2(self):
+ @runtime_checkable
+ class HasX(Protocol):
+ # The presence of a non-callable member
+ # would mean issubclass() checks would fail with TypeError
+ # if it weren't for the custom `__subclasshook__` method
+ x = 1
+
+ @classmethod
+ def __subclasshook__(cls, other):
+ return hasattr(other, 'x')
+
+ class Empty: pass
+
+ class ImplementsHasX:
+ x = 1
+
+ self.assertIsInstance(ImplementsHasX(), HasX)
+ self.assertNotIsInstance(Empty(), HasX)
+ self.assertIsSubclass(ImplementsHasX, HasX)
+ self.assertNotIsSubclass(Empty, HasX)
+
+ # isinstance() and issubclass() checks against this still raise TypeError,
+ # despite the presence of the custom __subclasshook__ method,
+ # as it's not decorated with @runtime_checkable
+ class NotRuntimeCheckable(Protocol):
+ @classmethod
+ def __subclasshook__(cls, other):
+ return hasattr(other, 'x')
+
+ must_be_runtime_checkable = (
+ "Instance and class checks can only be used "
+ "with @runtime_checkable protocols"
+ )
+
+ with self.assertRaisesRegex(TypeError, must_be_runtime_checkable):
+ issubclass(object, NotRuntimeCheckable)
+ with self.assertRaisesRegex(TypeError, must_be_runtime_checkable):
+ isinstance(object(), NotRuntimeCheckable)
+
+ @skip_if_py312b1
+ def test_issubclass_fails_correctly(self):
+ @runtime_checkable
+ class NonCallableMembers(Protocol):
+ x = 1
+
+ class NotRuntimeCheckable(Protocol):
+ def callable_member(self) -> int: ...
+
+ @runtime_checkable
+ class RuntimeCheckable(Protocol):
+ def callable_member(self) -> int: ...
+
+ class C: pass
+
+ # These three all exercise different code paths,
+ # but should result in the same error message:
+ for protocol in NonCallableMembers, NotRuntimeCheckable, RuntimeCheckable:
+ with self.subTest(proto_name=protocol.__name__):
+ with self.assertRaisesRegex(
+ TypeError, r"issubclass\(\) arg 1 must be a class"
+ ):
+ issubclass(C(), protocol)
+
+ def test_defining_generic_protocols(self):
+ T = TypeVar('T')
+ S = TypeVar('S')
+ @runtime_checkable
+ class PR(Protocol[T, S]):
+ def meth(self): pass
+ class P(PR[int, T], Protocol[T]):
+ y = 1
+ with self.assertRaises(TypeError):
+ issubclass(PR[int, T], PR)
+ with self.assertRaises(TypeError):
+ issubclass(P[str], PR)
+ with self.assertRaises(TypeError):
+ PR[int]
+ with self.assertRaises(TypeError):
+ P[int, str]
+ if not TYPING_3_10_0:
+ with self.assertRaises(TypeError):
+ PR[int, 1]
+ with self.assertRaises(TypeError):
+ PR[int, ClassVar]
+ class C(PR[int, T]): pass
+ self.assertIsInstance(C[str](), C)
+
+ def test_defining_generic_protocols_old_style(self):
+ T = TypeVar('T')
+ S = TypeVar('S')
+ @runtime_checkable
+ class PR(Protocol, Generic[T, S]):
+ def meth(self): pass
+ class P(PR[int, str], Protocol):
+ y = 1
+ with self.assertRaises(TypeError):
+ self.assertIsSubclass(PR[int, str], PR)
+ self.assertIsSubclass(P, PR)
+ with self.assertRaises(TypeError):
+ PR[int]
+ if not TYPING_3_10_0:
+ with self.assertRaises(TypeError):
+ PR[int, 1]
+ class P1(Protocol, Generic[T]):
+ def bar(self, x: T) -> str: ...
+ class P2(Generic[T], Protocol):
+ def bar(self, x: T) -> str: ...
+ @runtime_checkable
+ class PSub(P1[str], Protocol):
+ x = 1
+ class Test:
+ x = 1
+ def bar(self, x: str) -> str:
+ return x
+ self.assertIsInstance(Test(), PSub)
+ if not TYPING_3_10_0:
+ with self.assertRaises(TypeError):
+ PR[int, ClassVar]
+
+ if hasattr(typing, "TypeAliasType"):
+ exec(textwrap.dedent(
+ """
+ def test_pep695_generic_protocol_callable_members(self):
+ @runtime_checkable
+ class Foo[T](Protocol):
+ def meth(self, x: T) -> None: ...
+
+ class Bar[T]:
+ def meth(self, x: T) -> None: ...
+
+ self.assertIsInstance(Bar(), Foo)
+ self.assertIsSubclass(Bar, Foo)
+
+ @runtime_checkable
+ class SupportsTrunc[T](Protocol):
+ def __trunc__(self) -> T: ...
+
+ self.assertIsInstance(0.0, SupportsTrunc)
+ self.assertIsSubclass(float, SupportsTrunc)
+
+ def test_no_weird_caching_with_issubclass_after_isinstance_pep695(self):
+ @runtime_checkable
+ class Spam[T](Protocol):
+ x: T
+
+ class Eggs[T]:
+ def __init__(self, x: T) -> None:
+ self.x = x
+
+ self.assertIsInstance(Eggs(42), Spam)
+
+ # gh-104555: If we didn't override ABCMeta.__subclasscheck__ in _ProtocolMeta,
+ # TypeError wouldn't be raised here,
+ # as the cached result of the isinstance() check immediately above
+ # would mean the issubclass() call would short-circuit
+ # before we got to the "raise TypeError" line
+ with self.assertRaises(TypeError):
+ issubclass(Eggs, Spam)
+ """
+ ))
+
+ def test_init_called(self):
+ T = TypeVar('T')
+ class P(Protocol[T]): pass
+ class C(P[T]):
+ def __init__(self):
+ self.test = 'OK'
+ self.assertEqual(C[int]().test, 'OK')
+
+ def test_protocols_bad_subscripts(self):
+ T = TypeVar('T')
+ S = TypeVar('S')
+ with self.assertRaises(TypeError):
+ class P(Protocol[T, T]): pass
+ with self.assertRaises(TypeError):
+ class P2(Protocol[int]): pass
+ with self.assertRaises(TypeError):
+ class P3(Protocol[T], Protocol[S]): pass
+ with self.assertRaises(TypeError):
+ class P4(typing.Mapping[T, S], Protocol[T]): pass
+
+ def test_generic_protocols_repr(self):
+ T = TypeVar('T')
+ S = TypeVar('S')
+ class P(Protocol[T, S]): pass
+ self.assertTrue(repr(P[T, S]).endswith('P[~T, ~S]'))
+ self.assertTrue(repr(P[int, str]).endswith('P[int, str]'))
+
+ def test_generic_protocols_eq(self):
+ T = TypeVar('T')
+ S = TypeVar('S')
+ class P(Protocol[T, S]): pass
+ self.assertEqual(P, P)
+ self.assertEqual(P[int, T], P[int, T])
+ self.assertEqual(P[T, T][Tuple[T, S]][int, str],
+ P[Tuple[int, str], Tuple[int, str]])
+
+ def test_generic_protocols_special_from_generic(self):
+ T = TypeVar('T')
+ class P(Protocol[T]): pass
+ self.assertEqual(P.__parameters__, (T,))
+ self.assertEqual(P[int].__parameters__, ())
+ self.assertEqual(P[int].__args__, (int,))
+ self.assertIs(P[int].__origin__, P)
+
+ def test_generic_protocols_special_from_protocol(self):
+ @runtime_checkable
+ class PR(Protocol):
+ x = 1
+ class P(Protocol):
+ def meth(self):
+ pass
+ T = TypeVar('T')
+ class PG(Protocol[T]):
+ x = 1
+ def meth(self):
+ pass
+ self.assertTrue(P._is_protocol)
+ self.assertTrue(PR._is_protocol)
+ self.assertTrue(PG._is_protocol)
+ self.assertFalse(P._is_runtime_protocol)
+ self.assertTrue(PR._is_runtime_protocol)
+ self.assertTrue(PG[int]._is_protocol)
+ self.assertEqual(typing_extensions._get_protocol_attrs(P), {'meth'})
+ self.assertEqual(typing_extensions._get_protocol_attrs(PR), {'x'})
+ self.assertEqual(frozenset(typing_extensions._get_protocol_attrs(PG)),
+ frozenset({'x', 'meth'}))
+
+ def test_no_runtime_deco_on_nominal(self):
+ with self.assertRaises(TypeError):
+ @runtime_checkable
+ class C: pass
+ class Proto(Protocol):
+ x = 1
+ with self.assertRaises(TypeError):
+ @runtime_checkable
+ class Concrete(Proto):
+ pass
+
+ def test_none_treated_correctly(self):
+ @runtime_checkable
+ class P(Protocol):
+ x: int = None
+ class B: pass
+ self.assertNotIsInstance(B(), P)
+ class C:
+ x = 1
+ class D:
+ x = None
+ self.assertIsInstance(C(), P)
+ self.assertIsInstance(D(), P)
+ class CI:
+ def __init__(self):
+ self.x = 1
+ class DI:
+ def __init__(self):
+ self.x = None
+ self.assertIsInstance(CI(), P)
+ self.assertIsInstance(DI(), P)
+
+ def test_protocols_in_unions(self):
+ class P(Protocol):
+ x: int = None
+ Alias = typing.Union[typing.Iterable, P]
+ Alias2 = typing.Union[P, typing.Iterable]
+ self.assertEqual(Alias, Alias2)
+
+ def test_protocols_pickleable(self):
+ global P, CP # pickle wants to reference the class by name
+ T = TypeVar('T')
+
+ @runtime_checkable
+ class P(Protocol[T]):
+ x = 1
+ class CP(P[int]):
+ pass
+
+ c = CP()
+ c.foo = 42
+ c.bar = 'abc'
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(c, proto)
+ x = pickle.loads(z)
+ self.assertEqual(x.foo, 42)
+ self.assertEqual(x.bar, 'abc')
+ self.assertEqual(x.x, 1)
+ self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'})
+ s = pickle.dumps(P)
+ D = pickle.loads(s)
+ class E:
+ x = 1
+ self.assertIsInstance(E(), D)
+
+ def test_collections_protocols_allowed(self):
+ @runtime_checkable
+ class Custom(collections.abc.Iterable, Protocol):
+ def close(self): pass
+
+ class A: ...
+ class B:
+ def __iter__(self):
+ return []
+ def close(self):
+ return 0
+
+ self.assertIsSubclass(B, Custom)
+ self.assertNotIsSubclass(A, Custom)
+
+ @skipUnless(
+ hasattr(collections.abc, "Buffer"),
+ "needs collections.abc.Buffer to exist"
+ )
+ @skip_if_py312b1
+ def test_collections_abc_buffer_protocol_allowed(self):
+ @runtime_checkable
+ class ReleasableBuffer(collections.abc.Buffer, Protocol):
+ def __release_buffer__(self, mv: memoryview) -> None: ...
+
+ class C: pass
+ class D:
+ def __buffer__(self, flags: int) -> memoryview:
+ return memoryview(b'')
+ def __release_buffer__(self, mv: memoryview) -> None:
+ pass
+
+ self.assertIsSubclass(D, ReleasableBuffer)
+ self.assertIsInstance(D(), ReleasableBuffer)
+ self.assertNotIsSubclass(C, ReleasableBuffer)
+ self.assertNotIsInstance(C(), ReleasableBuffer)
+
+ def test_builtin_protocol_allowlist(self):
+ with self.assertRaises(TypeError):
+ class CustomProtocol(TestCase, Protocol):
+ pass
+
+ class CustomContextManager(typing.ContextManager, Protocol):
+ pass
+
+ @skip_if_py312b1
+ def test_typing_extensions_protocol_allowlist(self):
+ @runtime_checkable
+ class ReleasableBuffer(Buffer, Protocol):
+ def __release_buffer__(self, mv: memoryview) -> None: ...
+
+ class C: pass
+ class D:
+ def __buffer__(self, flags: int) -> memoryview:
+ return memoryview(b'')
+ def __release_buffer__(self, mv: memoryview) -> None:
+ pass
+
+ self.assertIsSubclass(D, ReleasableBuffer)
+ self.assertIsInstance(D(), ReleasableBuffer)
+ self.assertNotIsSubclass(C, ReleasableBuffer)
+ self.assertNotIsInstance(C(), ReleasableBuffer)
+
+ def test_non_runtime_protocol_isinstance_check(self):
+ class P(Protocol):
+ x: int
+
+ with self.assertRaisesRegex(TypeError, "@runtime_checkable"):
+ isinstance(1, P)
+
+ def test_no_init_same_for_different_protocol_implementations(self):
+ class CustomProtocolWithoutInitA(Protocol):
+ pass
+
+ class CustomProtocolWithoutInitB(Protocol):
+ pass
+
+ self.assertEqual(CustomProtocolWithoutInitA.__init__, CustomProtocolWithoutInitB.__init__)
+
+ def test_protocol_generic_over_paramspec(self):
+ P = ParamSpec("P")
+ T = TypeVar("T")
+ T2 = TypeVar("T2")
+
+ class MemoizedFunc(Protocol[P, T, T2]):
+ cache: typing.Dict[T2, T]
+ def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: ...
+
+ self.assertEqual(MemoizedFunc.__parameters__, (P, T, T2))
+ self.assertTrue(MemoizedFunc._is_protocol)
+
+ with self.assertRaises(TypeError):
+ MemoizedFunc[[int, str, str]]
+
+ if sys.version_info >= (3, 10):
+ # These unfortunately don't pass on <=3.9,
+ # due to typing._type_check on older Python versions
+ X = MemoizedFunc[[int, str, str], T, T2]
+ self.assertEqual(X.__parameters__, (T, T2))
+ self.assertEqual(X.__args__, ((int, str, str), T, T2))
+
+ Y = X[bytes, memoryview]
+ self.assertEqual(Y.__parameters__, ())
+ self.assertEqual(Y.__args__, ((int, str, str), bytes, memoryview))
+
+ def test_protocol_generic_over_typevartuple(self):
+ Ts = TypeVarTuple("Ts")
+ T = TypeVar("T")
+ T2 = TypeVar("T2")
+
+ class MemoizedFunc(Protocol[Unpack[Ts], T, T2]):
+ cache: typing.Dict[T2, T]
+ def __call__(self, *args: Unpack[Ts]) -> T: ...
+
+ self.assertEqual(MemoizedFunc.__parameters__, (Ts, T, T2))
+ self.assertTrue(MemoizedFunc._is_protocol)
+
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+
+ # A bug was fixed in 3.11.1
+ # (https://github.com/python/cpython/commit/74920aa27d0c57443dd7f704d6272cca9c507ab3)
+ # That means this assertion doesn't pass on 3.11.0,
+ # but it passes on all other Python versions
+ if sys.version_info[:3] != (3, 11, 0):
+ with self.assertRaisesRegex(TypeError, f"Too few {things}"):
+ MemoizedFunc[int]
+
+ X = MemoizedFunc[int, T, T2]
+ self.assertEqual(X.__parameters__, (T, T2))
+ self.assertEqual(X.__args__, (int, T, T2))
+
+ Y = X[bytes, memoryview]
+ self.assertEqual(Y.__parameters__, ())
+ self.assertEqual(Y.__args__, (int, bytes, memoryview))
+
+ def test_get_protocol_members(self):
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(object)
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(object())
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(Protocol)
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(Generic)
+
+ class P(Protocol):
+ a: int
+ def b(self) -> str: ...
+ @property
+ def c(self) -> int: ...
+
+ self.assertEqual(get_protocol_members(P), {'a', 'b', 'c'})
+ self.assertIsInstance(get_protocol_members(P), frozenset)
+ self.assertIsNot(get_protocol_members(P), P.__protocol_attrs__)
+
+ class Concrete:
+ a: int
+ def b(self) -> str: return "capybara"
+ @property
+ def c(self) -> int: return 5
+
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(Concrete)
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(Concrete())
+
+ class ConcreteInherit(P):
+ a: int = 42
+ def b(self) -> str: return "capybara"
+ @property
+ def c(self) -> int: return 5
+
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(ConcreteInherit)
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(ConcreteInherit())
+
+ def test_get_protocol_members_typing(self):
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(typing.Protocol)
+
+ class P(typing.Protocol):
+ a: int
+ def b(self) -> str: ...
+ @property
+ def c(self) -> int: ...
+
+ self.assertEqual(get_protocol_members(P), {'a', 'b', 'c'})
+ self.assertIsInstance(get_protocol_members(P), frozenset)
+ if hasattr(P, "__protocol_attrs__"):
+ self.assertIsNot(get_protocol_members(P), P.__protocol_attrs__)
+
+ class Concrete:
+ a: int
+ def b(self) -> str: return "capybara"
+ @property
+ def c(self) -> int: return 5
+
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(Concrete)
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(Concrete())
+
+ class ConcreteInherit(P):
+ a: int = 42
+ def b(self) -> str: return "capybara"
+ @property
+ def c(self) -> int: return 5
+
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(ConcreteInherit)
+ with self.assertRaisesRegex(TypeError, "not a Protocol"):
+ get_protocol_members(ConcreteInherit())
+
+ def test_is_protocol(self):
+ self.assertTrue(is_protocol(Proto))
+ self.assertTrue(is_protocol(Point))
+ self.assertFalse(is_protocol(Concrete))
+ self.assertFalse(is_protocol(Concrete()))
+ self.assertFalse(is_protocol(Generic))
+ self.assertFalse(is_protocol(object))
+
+ # Protocol is not itself a protocol
+ self.assertFalse(is_protocol(Protocol))
+
+ def test_is_protocol_with_typing(self):
+ self.assertFalse(is_protocol(typing.Protocol))
+
+ class TypingProto(typing.Protocol):
+ a: int
+
+ self.assertTrue(is_protocol(TypingProto))
+
+ class Concrete(TypingProto):
+ a: int
+
+ self.assertFalse(is_protocol(Concrete))
+
+ @skip_if_py312b1
+ def test_interaction_with_isinstance_checks_on_superclasses_with_ABCMeta(self):
+ # Ensure the cache is empty, or this test won't work correctly
+ collections.abc.Sized._abc_registry_clear()
+
+ class Foo(collections.abc.Sized, Protocol): pass
+
+ # CPython gh-105144: this previously raised TypeError
+ # if a Protocol subclass of Sized had been created
+ # before any isinstance() checks against Sized
+ self.assertNotIsInstance(1, collections.abc.Sized)
+
+ @skip_if_py312b1
+ def test_interaction_with_isinstance_checks_on_superclasses_with_ABCMeta_2(self):
+ # Ensure the cache is empty, or this test won't work correctly
+ collections.abc.Sized._abc_registry_clear()
+
+ class Foo(typing.Sized, Protocol): pass
+
+ # CPython gh-105144: this previously raised TypeError
+ # if a Protocol subclass of Sized had been created
+ # before any isinstance() checks against Sized
+ self.assertNotIsInstance(1, typing.Sized)
+
+ def test_empty_protocol_decorated_with_final(self):
+ @final
+ @runtime_checkable
+ class EmptyProtocol(Protocol): ...
+
+ self.assertIsSubclass(object, EmptyProtocol)
+ self.assertIsInstance(object(), EmptyProtocol)
+
+ def test_protocol_decorated_with_final_callable_members(self):
+ @final
+ @runtime_checkable
+ class ProtocolWithMethod(Protocol):
+ def startswith(self, string: str) -> bool: ...
+
+ self.assertIsSubclass(str, ProtocolWithMethod)
+ self.assertNotIsSubclass(int, ProtocolWithMethod)
+ self.assertIsInstance('foo', ProtocolWithMethod)
+ self.assertNotIsInstance(42, ProtocolWithMethod)
+
+ def test_protocol_decorated_with_final_noncallable_members(self):
+ @final
+ @runtime_checkable
+ class ProtocolWithNonCallableMember(Protocol):
+ x: int
+
+ class Foo:
+ x = 42
+
+ only_callable_members_please = (
+ r"Protocols with non-method members don't support issubclass()"
+ )
+
+ with self.assertRaisesRegex(TypeError, only_callable_members_please):
+ issubclass(Foo, ProtocolWithNonCallableMember)
+
+ with self.assertRaisesRegex(TypeError, only_callable_members_please):
+ issubclass(int, ProtocolWithNonCallableMember)
+
+ self.assertIsInstance(Foo(), ProtocolWithNonCallableMember)
+ self.assertNotIsInstance(42, ProtocolWithNonCallableMember)
+
+ def test_protocol_decorated_with_final_mixed_members(self):
+ @final
+ @runtime_checkable
+ class ProtocolWithMixedMembers(Protocol):
+ x: int
+ def method(self) -> None: ...
+
+ class Foo:
+ x = 42
+ def method(self) -> None: ...
+
+ only_callable_members_please = (
+ r"Protocols with non-method members don't support issubclass()"
+ )
+
+ with self.assertRaisesRegex(TypeError, only_callable_members_please):
+ issubclass(Foo, ProtocolWithMixedMembers)
+
+ with self.assertRaisesRegex(TypeError, only_callable_members_please):
+ issubclass(int, ProtocolWithMixedMembers)
+
+ self.assertIsInstance(Foo(), ProtocolWithMixedMembers)
+ self.assertNotIsInstance(42, ProtocolWithMixedMembers)
+
+ def test_protocol_issubclass_error_message(self):
+ @runtime_checkable
+ class Vec2D(Protocol):
+ x: float
+ y: float
+
+ def square_norm(self) -> float:
+ return self.x ** 2 + self.y ** 2
+
+ self.assertEqual(Vec2D.__protocol_attrs__, {'x', 'y', 'square_norm'})
+ expected_error_message = (
+ "Protocols with non-method members don't support issubclass()."
+ " Non-method members: 'x', 'y'."
+ )
+ with self.assertRaisesRegex(TypeError, re.escape(expected_error_message)):
+ issubclass(int, Vec2D)
+
+ def test_nonruntime_protocol_interaction_with_evil_classproperty(self):
+ class classproperty:
+ def __get__(self, instance, type):
+ raise RuntimeError("NO")
+
+ class Commentable(Protocol):
+ evil = classproperty()
+
+ # recognised as a protocol attr,
+ # but not actually accessed by the protocol metaclass
+ # (which would raise RuntimeError) for non-runtime protocols.
+ # See gh-113320
+ self.assertEqual(get_protocol_members(Commentable), {"evil"})
+
+ def test_runtime_protocol_interaction_with_evil_classproperty(self):
+ class CustomError(Exception): pass
+
+ class classproperty:
+ def __get__(self, instance, type):
+ raise CustomError
+
+ with self.assertRaises(TypeError) as cm:
+ @runtime_checkable
+ class Commentable(Protocol):
+ evil = classproperty()
+
+ exc = cm.exception
+ self.assertEqual(
+ exc.args[0],
+ "Failed to determine whether protocol member 'evil' is a method member"
+ )
+ self.assertIs(type(exc.__cause__), CustomError)
+
+ def test_extensions_runtimecheckable_on_typing_Protocol(self):
+ @runtime_checkable
+ class Functor(typing.Protocol):
+ def foo(self) -> None: ...
+
+ self.assertNotIsSubclass(object, Functor)
+
+ class Bar:
+ def foo(self): pass
+
+ self.assertIsSubclass(Bar, Functor)
+
+
+class Point2DGeneric(Generic[T], TypedDict):
+ a: T
+ b: T
+
+
+class Bar(Foo):
+ b: int
+
+
+class BarGeneric(FooGeneric[T], total=False):
+ b: int
+
+
+class TypedDictTests(BaseTestCase):
+ def test_basics_functional_syntax(self):
+ Emp = TypedDict('Emp', {'name': str, 'id': int})
+ self.assertIsSubclass(Emp, dict)
+ self.assertIsSubclass(Emp, typing.MutableMapping)
+ self.assertNotIsSubclass(Emp, collections.abc.Sequence)
+ jim = Emp(name='Jim', id=1)
+ self.assertIs(type(jim), dict)
+ self.assertEqual(jim['name'], 'Jim')
+ self.assertEqual(jim['id'], 1)
+ self.assertEqual(Emp.__name__, 'Emp')
+ self.assertEqual(Emp.__module__, __name__)
+ self.assertEqual(Emp.__bases__, (dict,))
+ self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
+ self.assertEqual(Emp.__total__, True)
+
+ @skipIf(sys.version_info < (3, 13), "Change in behavior in 3.13")
+ def test_keywords_syntax_raises_on_3_13(self):
+ with self.assertRaises(TypeError), self.assertWarns(DeprecationWarning):
+ TypedDict('Emp', name=str, id=int)
+
+ @skipIf(sys.version_info >= (3, 13), "3.13 removes support for kwargs")
+ def test_basics_keywords_syntax(self):
+ with self.assertWarns(DeprecationWarning):
+ Emp = TypedDict('Emp', name=str, id=int)
+ self.assertIsSubclass(Emp, dict)
+ self.assertIsSubclass(Emp, typing.MutableMapping)
+ self.assertNotIsSubclass(Emp, collections.abc.Sequence)
+ jim = Emp(name='Jim', id=1)
+ self.assertIs(type(jim), dict)
+ self.assertEqual(jim['name'], 'Jim')
+ self.assertEqual(jim['id'], 1)
+ self.assertEqual(Emp.__name__, 'Emp')
+ self.assertEqual(Emp.__module__, __name__)
+ self.assertEqual(Emp.__bases__, (dict,))
+ self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
+ self.assertEqual(Emp.__total__, True)
+
+ @skipIf(sys.version_info >= (3, 13), "3.13 removes support for kwargs")
+ def test_typeddict_special_keyword_names(self):
+ with self.assertWarns(DeprecationWarning):
+ TD = TypedDict("TD", cls=type, self=object, typename=str, _typename=int,
+ fields=list, _fields=dict)
+ self.assertEqual(TD.__name__, 'TD')
+ self.assertEqual(TD.__annotations__, {'cls': type, 'self': object, 'typename': str,
+ '_typename': int, 'fields': list, '_fields': dict})
+ a = TD(cls=str, self=42, typename='foo', _typename=53,
+ fields=[('bar', tuple)], _fields={'baz', set})
+ self.assertEqual(a['cls'], str)
+ self.assertEqual(a['self'], 42)
+ self.assertEqual(a['typename'], 'foo')
+ self.assertEqual(a['_typename'], 53)
+ self.assertEqual(a['fields'], [('bar', tuple)])
+ self.assertEqual(a['_fields'], {'baz', set})
+
+ def test_typeddict_create_errors(self):
+ with self.assertRaises(TypeError):
+ TypedDict.__new__()
+ with self.assertRaises(TypeError):
+ TypedDict()
+ with self.assertRaises(TypeError):
+ TypedDict('Emp', [('name', str)], None)
+
+ def test_typeddict_errors(self):
+ Emp = TypedDict('Emp', {'name': str, 'id': int})
+ self.assertEqual(TypedDict.__module__, 'typing_extensions')
+ jim = Emp(name='Jim', id=1)
+ with self.assertRaises(TypeError):
+ isinstance({}, Emp)
+ with self.assertRaises(TypeError):
+ isinstance(jim, Emp)
+ with self.assertRaises(TypeError):
+ issubclass(dict, Emp)
+
+ if not TYPING_3_11_0:
+ with self.assertRaises(TypeError), self.assertWarns(DeprecationWarning):
+ TypedDict('Hi', x=1)
+ with self.assertRaises(TypeError):
+ TypedDict('Hi', [('x', int), ('y', 1)])
+ with self.assertRaises(TypeError):
+ TypedDict('Hi', [('x', int)], y=int)
+
+ def test_py36_class_syntax_usage(self):
+ self.assertEqual(LabelPoint2D.__name__, 'LabelPoint2D')
+ self.assertEqual(LabelPoint2D.__module__, __name__)
+ self.assertEqual(LabelPoint2D.__annotations__, {'x': int, 'y': int, 'label': str})
+ self.assertEqual(LabelPoint2D.__bases__, (dict,))
+ self.assertEqual(LabelPoint2D.__total__, True)
+ self.assertNotIsSubclass(LabelPoint2D, typing.Sequence)
+ not_origin = Point2D(x=0, y=1)
+ self.assertEqual(not_origin['x'], 0)
+ self.assertEqual(not_origin['y'], 1)
+ other = LabelPoint2D(x=0, y=1, label='hi')
+ self.assertEqual(other['label'], 'hi')
+
+ def test_pickle(self):
+ global EmpD # pickle wants to reference the class by name
+ EmpD = TypedDict('EmpD', {'name': str, 'id': int})
+ jane = EmpD({'name': 'jane', 'id': 37})
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(jane, proto)
+ jane2 = pickle.loads(z)
+ self.assertEqual(jane2, jane)
+ self.assertEqual(jane2, {'name': 'jane', 'id': 37})
+ ZZ = pickle.dumps(EmpD, proto)
+ EmpDnew = pickle.loads(ZZ)
+ self.assertEqual(EmpDnew({'name': 'jane', 'id': 37}), jane)
+
+ def test_pickle_generic(self):
+ point = Point2DGeneric(a=5.0, b=3.0)
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(point, proto)
+ point2 = pickle.loads(z)
+ self.assertEqual(point2, point)
+ self.assertEqual(point2, {'a': 5.0, 'b': 3.0})
+ ZZ = pickle.dumps(Point2DGeneric, proto)
+ Point2DGenericNew = pickle.loads(ZZ)
+ self.assertEqual(Point2DGenericNew({'a': 5.0, 'b': 3.0}), point)
+
+ def test_optional(self):
+ EmpD = TypedDict('EmpD', {'name': str, 'id': int})
+
+ self.assertEqual(typing.Optional[EmpD], typing.Union[None, EmpD])
+ self.assertNotEqual(typing.List[EmpD], typing.Tuple[EmpD])
+
+ def test_total(self):
+ D = TypedDict('D', {'x': int}, total=False)
+ self.assertEqual(D(), {})
+ self.assertEqual(D(x=1), {'x': 1})
+ self.assertEqual(D.__total__, False)
+ self.assertEqual(D.__required_keys__, frozenset())
+ self.assertEqual(D.__optional_keys__, {'x'})
+
+ self.assertEqual(Options(), {})
+ self.assertEqual(Options(log_level=2), {'log_level': 2})
+ self.assertEqual(Options.__total__, False)
+ self.assertEqual(Options.__required_keys__, frozenset())
+ self.assertEqual(Options.__optional_keys__, {'log_level', 'log_path'})
+
+ def test_optional_keys(self):
+ class Point2Dor3D(Point2D, total=False):
+ z: int
+
+ assert Point2Dor3D.__required_keys__ == frozenset(['x', 'y'])
+ assert Point2Dor3D.__optional_keys__ == frozenset(['z'])
+
+ def test_keys_inheritance(self):
+ class BaseAnimal(TypedDict):
+ name: str
+
+ class Animal(BaseAnimal, total=False):
+ voice: str
+ tail: bool
+
+ class Cat(Animal):
+ fur_color: str
+
+ assert BaseAnimal.__required_keys__ == frozenset(['name'])
+ assert BaseAnimal.__optional_keys__ == frozenset([])
+ assert BaseAnimal.__annotations__ == {'name': str}
+
+ assert Animal.__required_keys__ == frozenset(['name'])
+ assert Animal.__optional_keys__ == frozenset(['tail', 'voice'])
+ assert Animal.__annotations__ == {
+ 'name': str,
+ 'tail': bool,
+ 'voice': str,
+ }
+
+ assert Cat.__required_keys__ == frozenset(['name', 'fur_color'])
+ assert Cat.__optional_keys__ == frozenset(['tail', 'voice'])
+ assert Cat.__annotations__ == {
+ 'fur_color': str,
+ 'name': str,
+ 'tail': bool,
+ 'voice': str,
+ }
+
+ def test_required_notrequired_keys(self):
+ self.assertEqual(NontotalMovie.__required_keys__,
+ frozenset({"title"}))
+ self.assertEqual(NontotalMovie.__optional_keys__,
+ frozenset({"year"}))
+
+ self.assertEqual(TotalMovie.__required_keys__,
+ frozenset({"title"}))
+ self.assertEqual(TotalMovie.__optional_keys__,
+ frozenset({"year"}))
+
+ self.assertEqual(VeryAnnotated.__required_keys__,
+ frozenset())
+ self.assertEqual(VeryAnnotated.__optional_keys__,
+ frozenset({"a"}))
+
+ self.assertEqual(AnnotatedMovie.__required_keys__,
+ frozenset({"title"}))
+ self.assertEqual(AnnotatedMovie.__optional_keys__,
+ frozenset({"year"}))
+
+ self.assertEqual(WeirdlyQuotedMovie.__required_keys__,
+ frozenset({"title"}))
+ self.assertEqual(WeirdlyQuotedMovie.__optional_keys__,
+ frozenset({"year"}))
+
+ self.assertEqual(ChildTotalMovie.__required_keys__,
+ frozenset({"title"}))
+ self.assertEqual(ChildTotalMovie.__optional_keys__,
+ frozenset({"year"}))
+
+ self.assertEqual(ChildDeeplyAnnotatedMovie.__required_keys__,
+ frozenset({"title"}))
+ self.assertEqual(ChildDeeplyAnnotatedMovie.__optional_keys__,
+ frozenset({"year"}))
+
+ def test_multiple_inheritance(self):
+ class One(TypedDict):
+ one: int
+ class Two(TypedDict):
+ two: str
+ class Untotal(TypedDict, total=False):
+ untotal: str
+ Inline = TypedDict('Inline', {'inline': bool})
+ class Regular:
+ pass
+
+ class Child(One, Two):
+ child: bool
+ self.assertEqual(
+ Child.__required_keys__,
+ frozenset(['one', 'two', 'child']),
+ )
+ self.assertEqual(
+ Child.__optional_keys__,
+ frozenset([]),
+ )
+ self.assertEqual(
+ Child.__annotations__,
+ {'one': int, 'two': str, 'child': bool},
+ )
+
+ class ChildWithOptional(One, Untotal):
+ child: bool
+ self.assertEqual(
+ ChildWithOptional.__required_keys__,
+ frozenset(['one', 'child']),
+ )
+ self.assertEqual(
+ ChildWithOptional.__optional_keys__,
+ frozenset(['untotal']),
+ )
+ self.assertEqual(
+ ChildWithOptional.__annotations__,
+ {'one': int, 'untotal': str, 'child': bool},
+ )
+
+ class ChildWithTotalFalse(One, Untotal, total=False):
+ child: bool
+ self.assertEqual(
+ ChildWithTotalFalse.__required_keys__,
+ frozenset(['one']),
+ )
+ self.assertEqual(
+ ChildWithTotalFalse.__optional_keys__,
+ frozenset(['untotal', 'child']),
+ )
+ self.assertEqual(
+ ChildWithTotalFalse.__annotations__,
+ {'one': int, 'untotal': str, 'child': bool},
+ )
+
+ class ChildWithInlineAndOptional(Untotal, Inline):
+ child: bool
+ self.assertEqual(
+ ChildWithInlineAndOptional.__required_keys__,
+ frozenset(['inline', 'child']),
+ )
+ self.assertEqual(
+ ChildWithInlineAndOptional.__optional_keys__,
+ frozenset(['untotal']),
+ )
+ self.assertEqual(
+ ChildWithInlineAndOptional.__annotations__,
+ {'inline': bool, 'untotal': str, 'child': bool},
+ )
+
+ class Closed(TypedDict, closed=True):
+ __extra_items__: None
+
+ class Unclosed(TypedDict, closed=False):
+ ...
+
+ class ChildUnclosed(Closed, Unclosed):
+ ...
+
+ self.assertFalse(ChildUnclosed.__closed__)
+ self.assertEqual(ChildUnclosed.__extra_items__, type(None))
+
+ class ChildClosed(Unclosed, Closed):
+ ...
+
+ self.assertFalse(ChildClosed.__closed__)
+ self.assertEqual(ChildClosed.__extra_items__, type(None))
+
+ wrong_bases = [
+ (One, Regular),
+ (Regular, One),
+ (One, Two, Regular),
+ (Inline, Regular),
+ (Untotal, Regular),
+ ]
+ for bases in wrong_bases:
+ with self.subTest(bases=bases):
+ with self.assertRaisesRegex(
+ TypeError,
+ 'cannot inherit from both a TypedDict type and a non-TypedDict',
+ ):
+ class Wrong(*bases):
+ pass
+
+ def test_is_typeddict(self):
+ self.assertIs(is_typeddict(Point2D), True)
+ self.assertIs(is_typeddict(Point2Dor3D), True)
+ self.assertIs(is_typeddict(Union[str, int]), False)
+ # classes, not instances
+ self.assertIs(is_typeddict(Point2D()), False)
+ call_based = TypedDict('call_based', {'a': int})
+ self.assertIs(is_typeddict(call_based), True)
+ self.assertIs(is_typeddict(call_based()), False)
+
+ T = TypeVar("T")
+ class BarGeneric(TypedDict, Generic[T]):
+ a: T
+ self.assertIs(is_typeddict(BarGeneric), True)
+ self.assertIs(is_typeddict(BarGeneric[int]), False)
+ self.assertIs(is_typeddict(BarGeneric()), False)
+
+ if hasattr(typing, "TypeAliasType"):
+ ns = {"TypedDict": TypedDict}
+ exec("""if True:
+ class NewGeneric[T](TypedDict):
+ a: T
+ """, ns)
+ NewGeneric = ns["NewGeneric"]
+ self.assertIs(is_typeddict(NewGeneric), True)
+ self.assertIs(is_typeddict(NewGeneric[int]), False)
+ self.assertIs(is_typeddict(NewGeneric()), False)
+
+ # The TypedDict constructor is not itself a TypedDict
+ self.assertIs(is_typeddict(TypedDict), False)
+ if hasattr(typing, "TypedDict"):
+ self.assertIs(is_typeddict(typing.TypedDict), False)
+
+ def test_is_typeddict_against_typeddict_from_typing(self):
+ Point = typing.TypedDict('Point', {'x': int, 'y': int})
+
+ class PointDict2D(typing.TypedDict):
+ x: int
+ y: int
+
+ class PointDict3D(PointDict2D, total=False):
+ z: int
+
+ assert is_typeddict(Point) is True
+ assert is_typeddict(PointDict2D) is True
+ assert is_typeddict(PointDict3D) is True
+
+ @skipUnless(HAS_FORWARD_MODULE, "ForwardRef.__forward_module__ was added in 3.9")
+ def test_get_type_hints_cross_module_subclass(self):
+ self.assertNotIn("_DoNotImport", globals())
+ self.assertEqual(
+ {k: v.__name__ for k, v in get_type_hints(Bar).items()},
+ {'a': "_DoNotImport", 'b': "int"}
+ )
+
+ def test_get_type_hints_generic(self):
+ self.assertEqual(
+ get_type_hints(BarGeneric),
+ {'a': typing.Optional[T], 'b': int}
+ )
+
+ class FooBarGeneric(BarGeneric[int]):
+ c: str
+
+ self.assertEqual(
+ get_type_hints(FooBarGeneric),
+ {'a': typing.Optional[T], 'b': int, 'c': str}
+ )
+
+ @skipUnless(TYPING_3_12_0, "PEP 695 required")
+ def test_pep695_generic_typeddict(self):
+ ns = {"TypedDict": TypedDict}
+ exec("""if True:
+ class A[T](TypedDict):
+ a: T
+ """, ns)
+ A = ns["A"]
+ T, = A.__type_params__
+ self.assertIsInstance(T, TypeVar)
+ self.assertEqual(T.__name__, 'T')
+ self.assertEqual(A.__bases__, (Generic, dict))
+ self.assertEqual(A.__orig_bases__, (TypedDict, Generic[T]))
+ self.assertEqual(A.__mro__, (A, Generic, dict, object))
+ self.assertEqual(A.__parameters__, (T,))
+ self.assertEqual(A[str].__parameters__, ())
+ self.assertEqual(A[str].__args__, (str,))
+
+ def test_generic_inheritance(self):
+ class A(TypedDict, Generic[T]):
+ a: T
+
+ self.assertEqual(A.__bases__, (Generic, dict))
+ self.assertEqual(A.__orig_bases__, (TypedDict, Generic[T]))
+ self.assertEqual(A.__mro__, (A, Generic, dict, object))
+ self.assertEqual(A.__parameters__, (T,))
+ self.assertEqual(A[str].__parameters__, ())
+ self.assertEqual(A[str].__args__, (str,))
+
+ class A2(Generic[T], TypedDict):
+ a: T
+
+ self.assertEqual(A2.__bases__, (Generic, dict))
+ self.assertEqual(A2.__orig_bases__, (Generic[T], TypedDict))
+ self.assertEqual(A2.__mro__, (A2, Generic, dict, object))
+ self.assertEqual(A2.__parameters__, (T,))
+ self.assertEqual(A2[str].__parameters__, ())
+ self.assertEqual(A2[str].__args__, (str,))
+
+ class B(A[KT], total=False):
+ b: KT
+
+ self.assertEqual(B.__bases__, (Generic, dict))
+ self.assertEqual(B.__orig_bases__, (A[KT],))
+ self.assertEqual(B.__mro__, (B, Generic, dict, object))
+ self.assertEqual(B.__parameters__, (KT,))
+ self.assertEqual(B.__total__, False)
+ self.assertEqual(B.__optional_keys__, frozenset(['b']))
+ self.assertEqual(B.__required_keys__, frozenset(['a']))
+
+ self.assertEqual(B[str].__parameters__, ())
+ self.assertEqual(B[str].__args__, (str,))
+ self.assertEqual(B[str].__origin__, B)
+
+ class C(B[int]):
+ c: int
+
+ self.assertEqual(C.__bases__, (Generic, dict))
+ self.assertEqual(C.__orig_bases__, (B[int],))
+ self.assertEqual(C.__mro__, (C, Generic, dict, object))
+ self.assertEqual(C.__parameters__, ())
+ self.assertEqual(C.__total__, True)
+ self.assertEqual(C.__optional_keys__, frozenset(['b']))
+ self.assertEqual(C.__required_keys__, frozenset(['a', 'c']))
+ assert C.__annotations__ == {
+ 'a': T,
+ 'b': KT,
+ 'c': int,
+ }
+ with self.assertRaises(TypeError):
+ C[str]
+
+ class Point3D(Point2DGeneric[T], Generic[T, KT]):
+ c: KT
+
+ self.assertEqual(Point3D.__bases__, (Generic, dict))
+ self.assertEqual(Point3D.__orig_bases__, (Point2DGeneric[T], Generic[T, KT]))
+ self.assertEqual(Point3D.__mro__, (Point3D, Generic, dict, object))
+ self.assertEqual(Point3D.__parameters__, (T, KT))
+ self.assertEqual(Point3D.__total__, True)
+ self.assertEqual(Point3D.__optional_keys__, frozenset())
+ self.assertEqual(Point3D.__required_keys__, frozenset(['a', 'b', 'c']))
+ self.assertEqual(Point3D.__annotations__, {
+ 'a': T,
+ 'b': T,
+ 'c': KT,
+ })
+ self.assertEqual(Point3D[int, str].__origin__, Point3D)
+
+ with self.assertRaises(TypeError):
+ Point3D[int]
+
+ with self.assertRaises(TypeError):
+ class Point3D(Point2DGeneric[T], Generic[KT]):
+ c: KT
+
+ def test_implicit_any_inheritance(self):
+ class A(TypedDict, Generic[T]):
+ a: T
+
+ class B(A[KT], total=False):
+ b: KT
+
+ class WithImplicitAny(B):
+ c: int
+
+ self.assertEqual(WithImplicitAny.__bases__, (Generic, dict,))
+ self.assertEqual(WithImplicitAny.__mro__, (WithImplicitAny, Generic, dict, object))
+ # Consistent with GenericTests.test_implicit_any
+ self.assertEqual(WithImplicitAny.__parameters__, ())
+ self.assertEqual(WithImplicitAny.__total__, True)
+ self.assertEqual(WithImplicitAny.__optional_keys__, frozenset(['b']))
+ self.assertEqual(WithImplicitAny.__required_keys__, frozenset(['a', 'c']))
+ assert WithImplicitAny.__annotations__ == {
+ 'a': T,
+ 'b': KT,
+ 'c': int,
+ }
+ with self.assertRaises(TypeError):
+ WithImplicitAny[str]
+
+ @skipUnless(TYPING_3_9_0, "Was changed in 3.9")
+ def test_non_generic_subscript(self):
+ # For backward compatibility, subscription works
+ # on arbitrary TypedDict types.
+ # (But we don't attempt to backport this misfeature onto 3.8.)
+ class TD(TypedDict):
+ a: T
+ A = TD[int]
+ self.assertEqual(A.__origin__, TD)
+ self.assertEqual(A.__parameters__, ())
+ self.assertEqual(A.__args__, (int,))
+ a = A(a=1)
+ self.assertIs(type(a), dict)
+ self.assertEqual(a, {'a': 1})
+
+ def test_orig_bases(self):
+ T = TypeVar('T')
+
+ class Parent(TypedDict):
+ pass
+
+ class Child(Parent):
+ pass
+
+ class OtherChild(Parent):
+ pass
+
+ class MixedChild(Child, OtherChild, Parent):
+ pass
+
+ class GenericParent(TypedDict, Generic[T]):
+ pass
+
+ class GenericChild(GenericParent[int]):
+ pass
+
+ class OtherGenericChild(GenericParent[str]):
+ pass
+
+ class MixedGenericChild(GenericChild, OtherGenericChild, GenericParent[float]):
+ pass
+
+ class MultipleGenericBases(GenericParent[int], GenericParent[float]):
+ pass
+
+ CallTypedDict = TypedDict('CallTypedDict', {})
+
+ self.assertEqual(Parent.__orig_bases__, (TypedDict,))
+ self.assertEqual(Child.__orig_bases__, (Parent,))
+ self.assertEqual(OtherChild.__orig_bases__, (Parent,))
+ self.assertEqual(MixedChild.__orig_bases__, (Child, OtherChild, Parent,))
+ self.assertEqual(GenericParent.__orig_bases__, (TypedDict, Generic[T]))
+ self.assertEqual(GenericChild.__orig_bases__, (GenericParent[int],))
+ self.assertEqual(OtherGenericChild.__orig_bases__, (GenericParent[str],))
+ self.assertEqual(MixedGenericChild.__orig_bases__, (GenericChild, OtherGenericChild, GenericParent[float]))
+ self.assertEqual(MultipleGenericBases.__orig_bases__, (GenericParent[int], GenericParent[float]))
+ self.assertEqual(CallTypedDict.__orig_bases__, (TypedDict,))
+
+ def test_zero_fields_typeddicts(self):
+ T1 = TypedDict("T1", {})
+ class T2(TypedDict): pass
+ try:
+ ns = {"TypedDict": TypedDict}
+ exec("class T3[tvar](TypedDict): pass", ns)
+ T3 = ns["T3"]
+ except SyntaxError:
+ class T3(TypedDict): pass
+ S = TypeVar("S")
+ class T4(TypedDict, Generic[S]): pass
+
+ expected_warning = re.escape(
+ "Failing to pass a value for the 'fields' parameter is deprecated "
+ "and will be disallowed in Python 3.15. "
+ "To create a TypedDict class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty dictionary, e.g. `T5 = TypedDict('T5', {})`."
+ )
+ with self.assertWarnsRegex(DeprecationWarning, fr"^{expected_warning}$"):
+ T5 = TypedDict('T5')
+
+ expected_warning = re.escape(
+ "Passing `None` as the 'fields' parameter is deprecated "
+ "and will be disallowed in Python 3.15. "
+ "To create a TypedDict class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty dictionary, e.g. `T6 = TypedDict('T6', {})`."
+ )
+ with self.assertWarnsRegex(DeprecationWarning, fr"^{expected_warning}$"):
+ T6 = TypedDict('T6', None)
+
+ for klass in T1, T2, T3, T4, T5, T6:
+ with self.subTest(klass=klass.__name__):
+ self.assertEqual(klass.__annotations__, {})
+ self.assertEqual(klass.__required_keys__, set())
+ self.assertEqual(klass.__optional_keys__, set())
+ self.assertIsInstance(klass(), dict)
+
+ def test_readonly_inheritance(self):
+ class Base1(TypedDict):
+ a: ReadOnly[int]
+
+ class Child1(Base1):
+ b: str
+
+ self.assertEqual(Child1.__readonly_keys__, frozenset({'a'}))
+ self.assertEqual(Child1.__mutable_keys__, frozenset({'b'}))
+
+ class Base2(TypedDict):
+ a: ReadOnly[int]
+
+ class Child2(Base2):
+ b: str
+
+ self.assertEqual(Child1.__readonly_keys__, frozenset({'a'}))
+ self.assertEqual(Child1.__mutable_keys__, frozenset({'b'}))
+
+ def test_make_mutable_key_readonly(self):
+ class Base(TypedDict):
+ a: int
+
+ self.assertEqual(Base.__readonly_keys__, frozenset())
+ self.assertEqual(Base.__mutable_keys__, frozenset({'a'}))
+
+ class Child(Base):
+ a: ReadOnly[int] # type checker error, but allowed at runtime
+
+ self.assertEqual(Child.__readonly_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__mutable_keys__, frozenset())
+
+ def test_can_make_readonly_key_mutable(self):
+ class Base(TypedDict):
+ a: ReadOnly[int]
+
+ class Child(Base):
+ a: int
+
+ self.assertEqual(Child.__readonly_keys__, frozenset())
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a'}))
+
+ def test_combine_qualifiers(self):
+ class AllTheThings(TypedDict):
+ a: Annotated[Required[ReadOnly[int]], "why not"]
+ b: Required[Annotated[ReadOnly[int], "why not"]]
+ c: ReadOnly[NotRequired[Annotated[int, "why not"]]]
+ d: NotRequired[Annotated[int, "why not"]]
+
+ self.assertEqual(AllTheThings.__required_keys__, frozenset({'a', 'b'}))
+ self.assertEqual(AllTheThings.__optional_keys__, frozenset({'c', 'd'}))
+ self.assertEqual(AllTheThings.__readonly_keys__, frozenset({'a', 'b', 'c'}))
+ self.assertEqual(AllTheThings.__mutable_keys__, frozenset({'d'}))
+
+ self.assertEqual(
+ get_type_hints(AllTheThings, include_extras=False),
+ {'a': int, 'b': int, 'c': int, 'd': int},
+ )
+ self.assertEqual(
+ get_type_hints(AllTheThings, include_extras=True),
+ {
+ 'a': Annotated[Required[ReadOnly[int]], 'why not'],
+ 'b': Required[Annotated[ReadOnly[int], 'why not']],
+ 'c': ReadOnly[NotRequired[Annotated[int, 'why not']]],
+ 'd': NotRequired[Annotated[int, 'why not']],
+ },
+ )
+
+ def test_extra_keys_non_readonly(self):
+ class Base(TypedDict, closed=True):
+ __extra_items__: str
+
+ class Child(Base):
+ a: NotRequired[int]
+
+ self.assertEqual(Child.__required_keys__, frozenset({}))
+ self.assertEqual(Child.__optional_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__readonly_keys__, frozenset({}))
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a'}))
+
+ def test_extra_keys_readonly(self):
+ class Base(TypedDict, closed=True):
+ __extra_items__: ReadOnly[str]
+
+ class Child(Base):
+ a: NotRequired[str]
+
+ self.assertEqual(Child.__required_keys__, frozenset({}))
+ self.assertEqual(Child.__optional_keys__, frozenset({'a'}))
+ self.assertEqual(Child.__readonly_keys__, frozenset({}))
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a'}))
+
+ def test_extra_key_required(self):
+ with self.assertRaisesRegex(
+ TypeError,
+ "Special key __extra_items__ does not support Required"
+ ):
+ TypedDict("A", {"__extra_items__": Required[int]}, closed=True)
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "Special key __extra_items__ does not support NotRequired"
+ ):
+ TypedDict("A", {"__extra_items__": NotRequired[int]}, closed=True)
+
+ def test_regular_extra_items(self):
+ class ExtraReadOnly(TypedDict):
+ __extra_items__: ReadOnly[str]
+
+ self.assertEqual(ExtraReadOnly.__required_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraReadOnly.__optional_keys__, frozenset({}))
+ self.assertEqual(ExtraReadOnly.__readonly_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraReadOnly.__mutable_keys__, frozenset({}))
+ self.assertEqual(ExtraReadOnly.__extra_items__, None)
+ self.assertFalse(ExtraReadOnly.__closed__)
+
+ class ExtraRequired(TypedDict):
+ __extra_items__: Required[str]
+
+ self.assertEqual(ExtraRequired.__required_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraRequired.__optional_keys__, frozenset({}))
+ self.assertEqual(ExtraRequired.__readonly_keys__, frozenset({}))
+ self.assertEqual(ExtraRequired.__mutable_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraRequired.__extra_items__, None)
+ self.assertFalse(ExtraRequired.__closed__)
+
+ class ExtraNotRequired(TypedDict):
+ __extra_items__: NotRequired[str]
+
+ self.assertEqual(ExtraNotRequired.__required_keys__, frozenset({}))
+ self.assertEqual(ExtraNotRequired.__optional_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraNotRequired.__readonly_keys__, frozenset({}))
+ self.assertEqual(ExtraNotRequired.__mutable_keys__, frozenset({'__extra_items__'}))
+ self.assertEqual(ExtraNotRequired.__extra_items__, None)
+ self.assertFalse(ExtraNotRequired.__closed__)
+
+ def test_closed_inheritance(self):
+ class Base(TypedDict, closed=True):
+ __extra_items__: ReadOnly[Union[str, None]]
+
+ self.assertEqual(Base.__required_keys__, frozenset({}))
+ self.assertEqual(Base.__optional_keys__, frozenset({}))
+ self.assertEqual(Base.__readonly_keys__, frozenset({}))
+ self.assertEqual(Base.__mutable_keys__, frozenset({}))
+ self.assertEqual(Base.__annotations__, {})
+ self.assertEqual(Base.__extra_items__, ReadOnly[Union[str, None]])
+ self.assertTrue(Base.__closed__)
+
+ class Child(Base):
+ a: int
+ __extra_items__: int
+
+ self.assertEqual(Child.__required_keys__, frozenset({'a', "__extra_items__"}))
+ self.assertEqual(Child.__optional_keys__, frozenset({}))
+ self.assertEqual(Child.__readonly_keys__, frozenset({}))
+ self.assertEqual(Child.__mutable_keys__, frozenset({'a', "__extra_items__"}))
+ self.assertEqual(Child.__annotations__, {"__extra_items__": int, "a": int})
+ self.assertEqual(Child.__extra_items__, ReadOnly[Union[str, None]])
+ self.assertFalse(Child.__closed__)
+
+ class GrandChild(Child, closed=True):
+ __extra_items__: str
+
+ self.assertEqual(GrandChild.__required_keys__, frozenset({'a', "__extra_items__"}))
+ self.assertEqual(GrandChild.__optional_keys__, frozenset({}))
+ self.assertEqual(GrandChild.__readonly_keys__, frozenset({}))
+ self.assertEqual(GrandChild.__mutable_keys__, frozenset({'a', "__extra_items__"}))
+ self.assertEqual(GrandChild.__annotations__, {"__extra_items__": int, "a": int})
+ self.assertEqual(GrandChild.__extra_items__, str)
+ self.assertTrue(GrandChild.__closed__)
+
+ def test_implicit_extra_items(self):
+ class Base(TypedDict):
+ a: int
+
+ self.assertEqual(Base.__extra_items__, None)
+ self.assertFalse(Base.__closed__)
+
+ class ChildA(Base, closed=True):
+ ...
+
+ self.assertEqual(ChildA.__extra_items__, Never)
+ self.assertTrue(ChildA.__closed__)
+
+ class ChildB(Base, closed=True):
+ __extra_items__: None
+
+ self.assertEqual(ChildB.__extra_items__, type(None))
+ self.assertTrue(ChildB.__closed__)
+
+ @skipIf(
+ TYPING_3_13_0,
+ "The keyword argument alternative to define a "
+ "TypedDict type using the functional syntax is no longer supported"
+ )
+ def test_backwards_compatibility(self):
+ with self.assertWarns(DeprecationWarning):
+ TD = TypedDict("TD", closed=int)
+ self.assertFalse(TD.__closed__)
+ self.assertEqual(TD.__annotations__, {"closed": int})
+
+
+class AnnotatedTests(BaseTestCase):
+
+ def test_repr(self):
+ if hasattr(typing, 'Annotated'):
+ mod_name = 'typing'
+ else:
+ mod_name = "typing_extensions"
+ self.assertEqual(
+ repr(Annotated[int, 4, 5]),
+ mod_name + ".Annotated[int, 4, 5]"
+ )
+ self.assertEqual(
+ repr(Annotated[List[int], 4, 5]),
+ mod_name + ".Annotated[typing.List[int], 4, 5]"
+ )
+
+ def test_flatten(self):
+ A = Annotated[Annotated[int, 4], 5]
+ self.assertEqual(A, Annotated[int, 4, 5])
+ self.assertEqual(A.__metadata__, (4, 5))
+ self.assertEqual(A.__origin__, int)
+
+ def test_specialize(self):
+ L = Annotated[List[T], "my decoration"]
+ LI = Annotated[List[int], "my decoration"]
+ self.assertEqual(L[int], Annotated[List[int], "my decoration"])
+ self.assertEqual(L[int].__metadata__, ("my decoration",))
+ self.assertEqual(L[int].__origin__, List[int])
+ with self.assertRaises(TypeError):
+ LI[int]
+ with self.assertRaises(TypeError):
+ L[int, float]
+
+ def test_hash_eq(self):
+ self.assertEqual(len({Annotated[int, 4, 5], Annotated[int, 4, 5]}), 1)
+ self.assertNotEqual(Annotated[int, 4, 5], Annotated[int, 5, 4])
+ self.assertNotEqual(Annotated[int, 4, 5], Annotated[str, 4, 5])
+ self.assertNotEqual(Annotated[int, 4], Annotated[int, 4, 4])
+ self.assertEqual(
+ {Annotated[int, 4, 5], Annotated[int, 4, 5], Annotated[T, 4, 5]},
+ {Annotated[int, 4, 5], Annotated[T, 4, 5]}
+ )
+
+ def test_instantiate(self):
+ class C:
+ classvar = 4
+
+ def __init__(self, x):
+ self.x = x
+
+ def __eq__(self, other):
+ if not isinstance(other, C):
+ return NotImplemented
+ return other.x == self.x
+
+ A = Annotated[C, "a decoration"]
+ a = A(5)
+ c = C(5)
+ self.assertEqual(a, c)
+ self.assertEqual(a.x, c.x)
+ self.assertEqual(a.classvar, c.classvar)
+
+ def test_instantiate_generic(self):
+ MyCount = Annotated[typing_extensions.Counter[T], "my decoration"]
+ self.assertEqual(MyCount([4, 4, 5]), {4: 2, 5: 1})
+ self.assertEqual(MyCount[int]([4, 4, 5]), {4: 2, 5: 1})
+
+ def test_cannot_instantiate_forward(self):
+ A = Annotated["int", (5, 6)]
+ with self.assertRaises(TypeError):
+ A(5)
+
+ def test_cannot_instantiate_type_var(self):
+ A = Annotated[T, (5, 6)]
+ with self.assertRaises(TypeError):
+ A(5)
+
+ def test_cannot_getattr_typevar(self):
+ with self.assertRaises(AttributeError):
+ Annotated[T, (5, 7)].x
+
+ def test_attr_passthrough(self):
+ class C:
+ classvar = 4
+
+ A = Annotated[C, "a decoration"]
+ self.assertEqual(A.classvar, 4)
+ A.x = 5
+ self.assertEqual(C.x, 5)
+
+ @skipIf(sys.version_info[:2] in ((3, 9), (3, 10)), "Waiting for bpo-46491 bugfix.")
+ def test_special_form_containment(self):
+ class C:
+ classvar: Annotated[ClassVar[int], "a decoration"] = 4
+ const: Annotated[Final[int], "Const"] = 4
+
+ self.assertEqual(get_type_hints(C, globals())["classvar"], ClassVar[int])
+ self.assertEqual(get_type_hints(C, globals())["const"], Final[int])
+
+ def test_cannot_subclass(self):
+ with self.assertRaisesRegex(TypeError, "Cannot subclass .*Annotated"):
+ class C(Annotated):
+ pass
+
+ def test_cannot_check_instance(self):
+ with self.assertRaises(TypeError):
+ isinstance(5, Annotated[int, "positive"])
+
+ def test_cannot_check_subclass(self):
+ with self.assertRaises(TypeError):
+ issubclass(int, Annotated[int, "positive"])
+
+ def test_pickle(self):
+ samples = [typing.Any, typing.Union[int, str],
+ typing.Optional[str], Tuple[int, ...],
+ typing.Callable[[str], bytes],
+ Self, LiteralString, Never]
+
+ for t in samples:
+ x = Annotated[t, "a"]
+
+ for prot in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(protocol=prot, type=t):
+ pickled = pickle.dumps(x, prot)
+ restored = pickle.loads(pickled)
+ self.assertEqual(x, restored)
+
+ global _Annotated_test_G
+
+ class _Annotated_test_G(Generic[T]):
+ x = 1
+
+ G = Annotated[_Annotated_test_G[int], "A decoration"]
+ G.foo = 42
+ G.bar = 'abc'
+
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(G, proto)
+ x = pickle.loads(z)
+ self.assertEqual(x.foo, 42)
+ self.assertEqual(x.bar, 'abc')
+ self.assertEqual(x.x, 1)
+
+ def test_subst(self):
+ dec = "a decoration"
+ dec2 = "another decoration"
+
+ S = Annotated[T, dec2]
+ self.assertEqual(S[int], Annotated[int, dec2])
+
+ self.assertEqual(S[Annotated[int, dec]], Annotated[int, dec, dec2])
+ L = Annotated[List[T], dec]
+
+ self.assertEqual(L[int], Annotated[List[int], dec])
+ with self.assertRaises(TypeError):
+ L[int, int]
+
+ self.assertEqual(S[L[int]], Annotated[List[int], dec, dec2])
+
+ D = Annotated[Dict[KT, VT], dec]
+ self.assertEqual(D[str, int], Annotated[Dict[str, int], dec])
+ with self.assertRaises(TypeError):
+ D[int]
+
+ It = Annotated[int, dec]
+ with self.assertRaises(TypeError):
+ It[None]
+
+ LI = L[int]
+ with self.assertRaises(TypeError):
+ LI[None]
+
+ def test_annotated_in_other_types(self):
+ X = List[Annotated[T, 5]]
+ self.assertEqual(X[int], List[Annotated[int, 5]])
+
+ def test_nested_annotated_with_unhashable_metadata(self):
+ X = Annotated[
+ List[Annotated[str, {"unhashable_metadata"}]],
+ "metadata"
+ ]
+ self.assertEqual(X.__origin__, List[Annotated[str, {"unhashable_metadata"}]])
+ self.assertEqual(X.__metadata__, ("metadata",))
+
+
+class GetTypeHintsTests(BaseTestCase):
+ def test_get_type_hints(self):
+ def foobar(x: List['X']): ...
+ X = Annotated[int, (1, 10)]
+ self.assertEqual(
+ get_type_hints(foobar, globals(), locals()),
+ {'x': List[int]}
+ )
+ self.assertEqual(
+ get_type_hints(foobar, globals(), locals(), include_extras=True),
+ {'x': List[Annotated[int, (1, 10)]]}
+ )
+ BA = Tuple[Annotated[T, (1, 0)], ...]
+ def barfoo(x: BA): ...
+ self.assertEqual(get_type_hints(barfoo, globals(), locals())['x'], Tuple[T, ...])
+ self.assertIs(
+ get_type_hints(barfoo, globals(), locals(), include_extras=True)['x'],
+ BA
+ )
+ def barfoo2(x: typing.Callable[..., Annotated[List[T], "const"]],
+ y: typing.Union[int, Annotated[T, "mutable"]]): ...
+ self.assertEqual(
+ get_type_hints(barfoo2, globals(), locals()),
+ {'x': typing.Callable[..., List[T]], 'y': typing.Union[int, T]}
+ )
+ BA2 = typing.Callable[..., List[T]]
+ def barfoo3(x: BA2): ...
+ self.assertIs(
+ get_type_hints(barfoo3, globals(), locals(), include_extras=True)["x"],
+ BA2
+ )
+
+ def test_get_type_hints_refs(self):
+
+ Const = Annotated[T, "Const"]
+
+ class MySet(Generic[T]):
+
+ def __ior__(self, other: "Const[MySet[T]]") -> "MySet[T]":
+ ...
+
+ def __iand__(self, other: Const["MySet[T]"]) -> "MySet[T]":
+ ...
+
+ self.assertEqual(
+ get_type_hints(MySet.__iand__, globals(), locals()),
+ {'other': MySet[T], 'return': MySet[T]}
+ )
+
+ self.assertEqual(
+ get_type_hints(MySet.__iand__, globals(), locals(), include_extras=True),
+ {'other': Const[MySet[T]], 'return': MySet[T]}
+ )
+
+ self.assertEqual(
+ get_type_hints(MySet.__ior__, globals(), locals()),
+ {'other': MySet[T], 'return': MySet[T]}
+ )
+
+ def test_get_type_hints_typeddict(self):
+ assert get_type_hints(TotalMovie) == {'title': str, 'year': int}
+ assert get_type_hints(TotalMovie, include_extras=True) == {
+ 'title': str,
+ 'year': NotRequired[int],
+ }
+
+ assert get_type_hints(AnnotatedMovie) == {'title': str, 'year': int}
+ assert get_type_hints(AnnotatedMovie, include_extras=True) == {
+ 'title': Annotated[Required[str], "foobar"],
+ 'year': NotRequired[Annotated[int, 2000]],
+ }
+
+ def test_orig_bases(self):
+ T = TypeVar('T')
+
+ class Parent(TypedDict):
+ pass
+
+ class Child(Parent):
+ pass
+
+ class OtherChild(Parent):
+ pass
+
+ class MixedChild(Child, OtherChild, Parent):
+ pass
+
+ class GenericParent(TypedDict, Generic[T]):
+ pass
+
+ class GenericChild(GenericParent[int]):
+ pass
+
+ class OtherGenericChild(GenericParent[str]):
+ pass
+
+ class MixedGenericChild(GenericChild, OtherGenericChild, GenericParent[float]):
+ pass
+
+ class MultipleGenericBases(GenericParent[int], GenericParent[float]):
+ pass
+
+ CallTypedDict = TypedDict('CallTypedDict', {})
+
+ self.assertEqual(Parent.__orig_bases__, (TypedDict,))
+ self.assertEqual(Child.__orig_bases__, (Parent,))
+ self.assertEqual(OtherChild.__orig_bases__, (Parent,))
+ self.assertEqual(MixedChild.__orig_bases__, (Child, OtherChild, Parent,))
+ self.assertEqual(GenericParent.__orig_bases__, (TypedDict, Generic[T]))
+ self.assertEqual(GenericChild.__orig_bases__, (GenericParent[int],))
+ self.assertEqual(OtherGenericChild.__orig_bases__, (GenericParent[str],))
+ self.assertEqual(MixedGenericChild.__orig_bases__, (GenericChild, OtherGenericChild, GenericParent[float]))
+ self.assertEqual(MultipleGenericBases.__orig_bases__, (GenericParent[int], GenericParent[float]))
+ self.assertEqual(CallTypedDict.__orig_bases__, (TypedDict,))
+
+
+class TypeAliasTests(BaseTestCase):
+ def test_canonical_usage_with_variable_annotation(self):
+ ns = {}
+ exec('Alias: TypeAlias = Employee', globals(), ns)
+
+ def test_canonical_usage_with_type_comment(self):
+ Alias: TypeAlias = Employee # noqa: F841
+
+ def test_cannot_instantiate(self):
+ with self.assertRaises(TypeError):
+ TypeAlias()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(42, TypeAlias)
+
+ def test_no_issubclass(self):
+ with self.assertRaises(TypeError):
+ issubclass(Employee, TypeAlias)
+
+ with self.assertRaises(TypeError):
+ issubclass(TypeAlias, Employee)
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(TypeAlias):
+ pass
+
+ with self.assertRaises(TypeError):
+ class D(type(TypeAlias)):
+ pass
+
+ def test_repr(self):
+ if hasattr(typing, 'TypeAlias'):
+ self.assertEqual(repr(TypeAlias), 'typing.TypeAlias')
+ else:
+ self.assertEqual(repr(TypeAlias), 'typing_extensions.TypeAlias')
+
+ def test_cannot_subscript(self):
+ with self.assertRaises(TypeError):
+ TypeAlias[int]
+
+class ParamSpecTests(BaseTestCase):
+
+ def test_basic_plain(self):
+ P = ParamSpec('P')
+ self.assertEqual(P, P)
+ self.assertIsInstance(P, ParamSpec)
+ self.assertEqual(P.__name__, 'P')
+ # Should be hashable
+ hash(P)
+
+ def test_repr(self):
+ P = ParamSpec('P')
+ P_co = ParamSpec('P_co', covariant=True)
+ P_contra = ParamSpec('P_contra', contravariant=True)
+ P_infer = ParamSpec('P_infer', infer_variance=True)
+ P_2 = ParamSpec('P_2')
+ self.assertEqual(repr(P), '~P')
+ self.assertEqual(repr(P_2), '~P_2')
+
+ # Note: PEP 612 doesn't require these to be repr-ed correctly, but
+ # just follow CPython.
+ self.assertEqual(repr(P_co), '+P_co')
+ self.assertEqual(repr(P_contra), '-P_contra')
+ # On other versions we use typing.ParamSpec, but it is not aware of
+ # infer_variance=. Not worth creating our own version of ParamSpec
+ # for this.
+ if hasattr(typing, 'TypeAliasType') or not hasattr(typing, 'ParamSpec'):
+ self.assertEqual(repr(P_infer), 'P_infer')
+ else:
+ self.assertEqual(repr(P_infer), '~P_infer')
+
+ def test_variance(self):
+ P_co = ParamSpec('P_co', covariant=True)
+ P_contra = ParamSpec('P_contra', contravariant=True)
+ P_infer = ParamSpec('P_infer', infer_variance=True)
+
+ self.assertIs(P_co.__covariant__, True)
+ self.assertIs(P_co.__contravariant__, False)
+ self.assertIs(P_co.__infer_variance__, False)
+
+ self.assertIs(P_contra.__covariant__, False)
+ self.assertIs(P_contra.__contravariant__, True)
+ self.assertIs(P_contra.__infer_variance__, False)
+
+ self.assertIs(P_infer.__covariant__, False)
+ self.assertIs(P_infer.__contravariant__, False)
+ self.assertIs(P_infer.__infer_variance__, True)
+
+ def test_valid_uses(self):
+ P = ParamSpec('P')
+ T = TypeVar('T')
+ C1 = typing.Callable[P, int]
+ self.assertEqual(C1.__args__, (P, int))
+ self.assertEqual(C1.__parameters__, (P,))
+ C2 = typing.Callable[P, T]
+ self.assertEqual(C2.__args__, (P, T))
+ self.assertEqual(C2.__parameters__, (P, T))
+
+ # Test collections.abc.Callable too.
+ if sys.version_info[:2] >= (3, 9):
+ # Note: no tests for Callable.__parameters__ here
+ # because types.GenericAlias Callable is hardcoded to search
+ # for tp_name "TypeVar" in C. This was changed in 3.10.
+ C3 = collections.abc.Callable[P, int]
+ self.assertEqual(C3.__args__, (P, int))
+ C4 = collections.abc.Callable[P, T]
+ self.assertEqual(C4.__args__, (P, T))
+
+ # ParamSpec instances should also have args and kwargs attributes.
+ # Note: not in dir(P) because of __class__ hacks
+ self.assertTrue(hasattr(P, 'args'))
+ self.assertTrue(hasattr(P, 'kwargs'))
+
+ @skipIf((3, 10, 0) <= sys.version_info[:3] <= (3, 10, 2), "Needs bpo-46676.")
+ def test_args_kwargs(self):
+ P = ParamSpec('P')
+ P_2 = ParamSpec('P_2')
+ # Note: not in dir(P) because of __class__ hacks
+ self.assertTrue(hasattr(P, 'args'))
+ self.assertTrue(hasattr(P, 'kwargs'))
+ self.assertIsInstance(P.args, ParamSpecArgs)
+ self.assertIsInstance(P.kwargs, ParamSpecKwargs)
+ self.assertIs(P.args.__origin__, P)
+ self.assertIs(P.kwargs.__origin__, P)
+ self.assertEqual(P.args, P.args)
+ self.assertEqual(P.kwargs, P.kwargs)
+ self.assertNotEqual(P.args, P_2.args)
+ self.assertNotEqual(P.kwargs, P_2.kwargs)
+ self.assertNotEqual(P.args, P.kwargs)
+ self.assertNotEqual(P.kwargs, P.args)
+ self.assertNotEqual(P.args, P_2.kwargs)
+ self.assertEqual(repr(P.args), "P.args")
+ self.assertEqual(repr(P.kwargs), "P.kwargs")
+
+ def test_user_generics(self):
+ T = TypeVar("T")
+ P = ParamSpec("P")
+ P_2 = ParamSpec("P_2")
+
+ class X(Generic[T, P]):
+ pass
+
+ class Y(Protocol[T, P]):
+ pass
+
+ for klass in X, Y:
+ with self.subTest(klass=klass.__name__):
+ G1 = klass[int, P_2]
+ self.assertEqual(G1.__args__, (int, P_2))
+ self.assertEqual(G1.__parameters__, (P_2,))
+
+ G2 = klass[int, Concatenate[int, P_2]]
+ self.assertEqual(G2.__args__, (int, Concatenate[int, P_2]))
+ self.assertEqual(G2.__parameters__, (P_2,))
+
+ # The following are some valid uses cases in PEP 612 that don't work:
+ # These do not work in 3.9, _type_check blocks the list and ellipsis.
+ # G3 = X[int, [int, bool]]
+ # G4 = X[int, ...]
+ # G5 = Z[[int, str, bool]]
+ # Not working because this is special-cased in 3.10.
+ # G6 = Z[int, str, bool]
+
+ class Z(Generic[P]):
+ pass
+
+ class ProtoZ(Protocol[P]):
+ pass
+
+ def test_pickle(self):
+ global P, P_co, P_contra, P_default
+ P = ParamSpec('P')
+ P_co = ParamSpec('P_co', covariant=True)
+ P_contra = ParamSpec('P_contra', contravariant=True)
+ P_default = ParamSpec('P_default', default=[int])
+ for proto in range(pickle.HIGHEST_PROTOCOL):
+ with self.subTest(f'Pickle protocol {proto}'):
+ for paramspec in (P, P_co, P_contra, P_default):
+ z = pickle.loads(pickle.dumps(paramspec, proto))
+ self.assertEqual(z.__name__, paramspec.__name__)
+ self.assertEqual(z.__covariant__, paramspec.__covariant__)
+ self.assertEqual(z.__contravariant__, paramspec.__contravariant__)
+ self.assertEqual(z.__bound__, paramspec.__bound__)
+ self.assertEqual(z.__default__, paramspec.__default__)
+
+ def test_eq(self):
+ P = ParamSpec('P')
+ self.assertEqual(P, P)
+ self.assertEqual(hash(P), hash(P))
+ # ParamSpec should compare by id similar to TypeVar in CPython
+ self.assertNotEqual(ParamSpec('P'), P)
+ self.assertIsNot(ParamSpec('P'), P)
+ # Note: normally you don't test this as it breaks when there's
+ # a hash collision. However, ParamSpec *must* guarantee that
+ # as long as two objects don't have the same ID, their hashes
+ # won't be the same.
+ self.assertNotEqual(hash(ParamSpec('P')), hash(P))
+
+ def test_isinstance_results_unaffected_by_presence_of_tracing_function(self):
+ # See https://github.com/python/typing_extensions/issues/318
+
+ code = textwrap.dedent(
+ """\
+ import sys, typing
+
+ def trace_call(*args):
+ return trace_call
+
+ def run():
+ sys.modules.pop("typing_extensions", None)
+ from typing_extensions import ParamSpec
+ return isinstance(ParamSpec("P"), typing.TypeVar)
+
+ isinstance_result_1 = run()
+ sys.setprofile(trace_call)
+ isinstance_result_2 = run()
+ sys.stdout.write(f"{isinstance_result_1} {isinstance_result_2}")
+ """
+ )
+
+ # Run this in an isolated process or it pollutes the environment
+ # and makes other tests fail:
+ try:
+ proc = subprocess.run(
+ [sys.executable, "-c", code], check=True, capture_output=True, text=True,
+ )
+ except subprocess.CalledProcessError as exc:
+ print("stdout", exc.stdout, sep="\n")
+ print("stderr", exc.stderr, sep="\n")
+ raise
+
+ # Sanity checks that assert the test is working as expected
+ self.assertIsInstance(proc.stdout, str)
+ result1, result2 = proc.stdout.split(" ")
+ self.assertIn(result1, {"True", "False"})
+ self.assertIn(result2, {"True", "False"})
+
+ # The actual test:
+ self.assertEqual(result1, result2)
+
+
+class ConcatenateTests(BaseTestCase):
+ def test_basics(self):
+ P = ParamSpec('P')
+
+ class MyClass: ...
+
+ c = Concatenate[MyClass, P]
+ self.assertNotEqual(c, Concatenate)
+
+ def test_valid_uses(self):
+ P = ParamSpec('P')
+ T = TypeVar('T')
+
+ C1 = Callable[Concatenate[int, P], int]
+ C2 = Callable[Concatenate[int, T, P], T]
+ self.assertEqual(C1.__origin__, C2.__origin__)
+ self.assertNotEqual(C1, C2)
+
+ # Test collections.abc.Callable too.
+ if sys.version_info[:2] >= (3, 9):
+ C3 = collections.abc.Callable[Concatenate[int, P], int]
+ C4 = collections.abc.Callable[Concatenate[int, T, P], T]
+ self.assertEqual(C3.__origin__, C4.__origin__)
+ self.assertNotEqual(C3, C4)
+
+ def test_invalid_uses(self):
+ P = ParamSpec('P')
+ T = TypeVar('T')
+
+ with self.assertRaisesRegex(
+ TypeError,
+ 'Cannot take a Concatenate of no types',
+ ):
+ Concatenate[()]
+
+ with self.assertRaisesRegex(
+ TypeError,
+ 'The last parameter to Concatenate should be a ParamSpec variable',
+ ):
+ Concatenate[P, T]
+
+ if not TYPING_3_11_0:
+ with self.assertRaisesRegex(
+ TypeError,
+ 'each arg must be a type',
+ ):
+ Concatenate[1, P]
+
+ def test_basic_introspection(self):
+ P = ParamSpec('P')
+ C1 = Concatenate[int, P]
+ C2 = Concatenate[int, T, P]
+ self.assertEqual(C1.__origin__, Concatenate)
+ self.assertEqual(C1.__args__, (int, P))
+ self.assertEqual(C2.__origin__, Concatenate)
+ self.assertEqual(C2.__args__, (int, T, P))
+
+ def test_eq(self):
+ P = ParamSpec('P')
+ C1 = Concatenate[int, P]
+ C2 = Concatenate[int, P]
+ C3 = Concatenate[int, T, P]
+ self.assertEqual(C1, C2)
+ self.assertEqual(hash(C1), hash(C2))
+ self.assertNotEqual(C1, C3)
+
+
+class TypeGuardTests(BaseTestCase):
+ def test_basics(self):
+ TypeGuard[int] # OK
+ self.assertEqual(TypeGuard[int], TypeGuard[int])
+
+ def foo(arg) -> TypeGuard[int]: ...
+ self.assertEqual(gth(foo), {'return': TypeGuard[int]})
+
+ def test_repr(self):
+ if hasattr(typing, 'TypeGuard'):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(TypeGuard), f'{mod_name}.TypeGuard')
+ cv = TypeGuard[int]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeGuard[int]')
+ cv = TypeGuard[Employee]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeGuard[{__name__}.Employee]')
+ cv = TypeGuard[Tuple[int]]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeGuard[typing.Tuple[int]]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(TypeGuard)):
+ pass
+ with self.assertRaises(TypeError):
+ class D(type(TypeGuard[int])):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ TypeGuard()
+ with self.assertRaises(TypeError):
+ type(TypeGuard)()
+ with self.assertRaises(TypeError):
+ type(TypeGuard[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, TypeGuard[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, TypeGuard)
+
+
+class TypeIsTests(BaseTestCase):
+ def test_basics(self):
+ TypeIs[int] # OK
+ self.assertEqual(TypeIs[int], TypeIs[int])
+
+ def foo(arg) -> TypeIs[int]: ...
+ self.assertEqual(gth(foo), {'return': TypeIs[int]})
+
+ def test_repr(self):
+ if hasattr(typing, 'TypeIs'):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(TypeIs), f'{mod_name}.TypeIs')
+ cv = TypeIs[int]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeIs[int]')
+ cv = TypeIs[Employee]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeIs[{__name__}.Employee]')
+ cv = TypeIs[Tuple[int]]
+ self.assertEqual(repr(cv), f'{mod_name}.TypeIs[typing.Tuple[int]]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(TypeIs)):
+ pass
+ with self.assertRaises(TypeError):
+ class D(type(TypeIs[int])):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ TypeIs()
+ with self.assertRaises(TypeError):
+ type(TypeIs)()
+ with self.assertRaises(TypeError):
+ type(TypeIs[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, TypeIs[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, TypeIs)
+
+
+class LiteralStringTests(BaseTestCase):
+ def test_basics(self):
+ class Foo:
+ def bar(self) -> LiteralString: ...
+ def baz(self) -> "LiteralString": ...
+
+ self.assertEqual(gth(Foo.bar), {'return': LiteralString})
+ self.assertEqual(gth(Foo.baz), {'return': LiteralString})
+
+ def test_get_origin(self):
+ self.assertIsNone(get_origin(LiteralString))
+
+ def test_repr(self):
+ if hasattr(typing, 'LiteralString'):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(LiteralString), f'{mod_name}.LiteralString')
+
+ def test_cannot_subscript(self):
+ with self.assertRaises(TypeError):
+ LiteralString[int]
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(LiteralString)):
+ pass
+ with self.assertRaises(TypeError):
+ class D(LiteralString):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ LiteralString()
+ with self.assertRaises(TypeError):
+ type(LiteralString)()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, LiteralString)
+ with self.assertRaises(TypeError):
+ issubclass(int, LiteralString)
+
+ def test_alias(self):
+ StringTuple = Tuple[LiteralString, LiteralString]
+ class Alias:
+ def return_tuple(self) -> StringTuple:
+ return ("foo", "pep" + "675")
+
+ def test_typevar(self):
+ StrT = TypeVar("StrT", bound=LiteralString)
+ self.assertIs(StrT.__bound__, LiteralString)
+
+ def test_pickle(self):
+ for proto in range(pickle.HIGHEST_PROTOCOL):
+ pickled = pickle.dumps(LiteralString, protocol=proto)
+ self.assertIs(LiteralString, pickle.loads(pickled))
+
+
+class SelfTests(BaseTestCase):
+ def test_basics(self):
+ class Foo:
+ def bar(self) -> Self: ...
+
+ self.assertEqual(gth(Foo.bar), {'return': Self})
+
+ def test_repr(self):
+ if hasattr(typing, 'Self'):
+ mod_name = 'typing'
+ else:
+ mod_name = 'typing_extensions'
+ self.assertEqual(repr(Self), f'{mod_name}.Self')
+
+ def test_cannot_subscript(self):
+ with self.assertRaises(TypeError):
+ Self[int]
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(Self)):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ Self()
+ with self.assertRaises(TypeError):
+ type(Self)()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, Self)
+ with self.assertRaises(TypeError):
+ issubclass(int, Self)
+
+ def test_alias(self):
+ TupleSelf = Tuple[Self, Self]
+ class Alias:
+ def return_tuple(self) -> TupleSelf:
+ return (self, self)
+
+ def test_pickle(self):
+ for proto in range(pickle.HIGHEST_PROTOCOL):
+ pickled = pickle.dumps(Self, protocol=proto)
+ self.assertIs(Self, pickle.loads(pickled))
+
+
+class UnpackTests(BaseTestCase):
+ def test_basic_plain(self):
+ Ts = TypeVarTuple('Ts')
+ self.assertEqual(Unpack[Ts], Unpack[Ts])
+ with self.assertRaises(TypeError):
+ Unpack()
+
+ def test_repr(self):
+ Ts = TypeVarTuple('Ts')
+ self.assertEqual(repr(Unpack[Ts]), f'{Unpack.__module__}.Unpack[Ts]')
+
+ def test_cannot_subclass_vars(self):
+ with self.assertRaises(TypeError):
+ class V(Unpack[TypeVarTuple('Ts')]):
+ pass
+
+ def test_tuple(self):
+ Ts = TypeVarTuple('Ts')
+ Tuple[Unpack[Ts]]
+
+ def test_union(self):
+ Xs = TypeVarTuple('Xs')
+ Ys = TypeVarTuple('Ys')
+ self.assertEqual(
+ Union[Unpack[Xs]],
+ Unpack[Xs]
+ )
+ self.assertNotEqual(
+ Union[Unpack[Xs]],
+ Union[Unpack[Xs], Unpack[Ys]]
+ )
+ self.assertEqual(
+ Union[Unpack[Xs], Unpack[Xs]],
+ Unpack[Xs]
+ )
+ self.assertNotEqual(
+ Union[Unpack[Xs], int],
+ Union[Unpack[Xs]]
+ )
+ self.assertNotEqual(
+ Union[Unpack[Xs], int],
+ Union[int]
+ )
+ self.assertEqual(
+ Union[Unpack[Xs], int].__args__,
+ (Unpack[Xs], int)
+ )
+ self.assertEqual(
+ Union[Unpack[Xs], int].__parameters__,
+ (Xs,)
+ )
+ self.assertIs(
+ Union[Unpack[Xs], int].__origin__,
+ Union
+ )
+
+ def test_concatenation(self):
+ Xs = TypeVarTuple('Xs')
+ self.assertEqual(Tuple[int, Unpack[Xs]].__args__, (int, Unpack[Xs]))
+ self.assertEqual(Tuple[Unpack[Xs], int].__args__, (Unpack[Xs], int))
+ self.assertEqual(Tuple[int, Unpack[Xs], str].__args__,
+ (int, Unpack[Xs], str))
+ class C(Generic[Unpack[Xs]]): pass
+ class D(Protocol[Unpack[Xs]]): pass
+ for klass in C, D:
+ with self.subTest(klass=klass.__name__):
+ self.assertEqual(klass[int, Unpack[Xs]].__args__, (int, Unpack[Xs]))
+ self.assertEqual(klass[Unpack[Xs], int].__args__, (Unpack[Xs], int))
+ self.assertEqual(klass[int, Unpack[Xs], str].__args__,
+ (int, Unpack[Xs], str))
+
+ def test_class(self):
+ Ts = TypeVarTuple('Ts')
+
+ class C(Generic[Unpack[Ts]]): pass
+ class D(Protocol[Unpack[Ts]]): pass
+
+ for klass in C, D:
+ with self.subTest(klass=klass.__name__):
+ self.assertEqual(klass[int].__args__, (int,))
+ self.assertEqual(klass[int, str].__args__, (int, str))
+
+ with self.assertRaises(TypeError):
+ class C(Generic[Unpack[Ts], int]): pass
+
+ with self.assertRaises(TypeError):
+ class D(Protocol[Unpack[Ts], int]): pass
+
+ T1 = TypeVar('T')
+ T2 = TypeVar('T')
+ class C(Generic[T1, T2, Unpack[Ts]]): pass
+ class D(Protocol[T1, T2, Unpack[Ts]]): pass
+ for klass in C, D:
+ with self.subTest(klass=klass.__name__):
+ self.assertEqual(klass[int, str].__args__, (int, str))
+ self.assertEqual(klass[int, str, float].__args__, (int, str, float))
+ self.assertEqual(
+ klass[int, str, float, bool].__args__, (int, str, float, bool)
+ )
+ # A bug was fixed in 3.11.1
+ # (https://github.com/python/cpython/commit/74920aa27d0c57443dd7f704d6272cca9c507ab3)
+ # That means this assertion doesn't pass on 3.11.0,
+ # but it passes on all other Python versions
+ if sys.version_info[:3] != (3, 11, 0):
+ with self.assertRaises(TypeError):
+ klass[int]
+
+
+class TypeVarTupleTests(BaseTestCase):
+
+ def test_basic_plain(self):
+ Ts = TypeVarTuple('Ts')
+ self.assertEqual(Ts, Ts)
+ self.assertIsInstance(Ts, TypeVarTuple)
+ Xs = TypeVarTuple('Xs')
+ Ys = TypeVarTuple('Ys')
+ self.assertNotEqual(Xs, Ys)
+
+ def test_repr(self):
+ Ts = TypeVarTuple('Ts')
+ self.assertEqual(repr(Ts), 'Ts')
+
+ def test_no_redefinition(self):
+ self.assertNotEqual(TypeVarTuple('Ts'), TypeVarTuple('Ts'))
+
+ def test_cannot_subclass_vars(self):
+ with self.assertRaises(TypeError):
+ class V(TypeVarTuple('Ts')):
+ pass
+
+ def test_cannot_subclass_var_itself(self):
+ with self.assertRaises(TypeError):
+ class V(TypeVarTuple):
+ pass
+
+ def test_cannot_instantiate_vars(self):
+ Ts = TypeVarTuple('Ts')
+ with self.assertRaises(TypeError):
+ Ts()
+
+ def test_tuple(self):
+ Ts = TypeVarTuple('Ts')
+ # Not legal at type checking time but we can't really check against it.
+ Tuple[Ts]
+
+ def test_args_and_parameters(self):
+ Ts = TypeVarTuple('Ts')
+
+ t = Tuple[tuple(Ts)]
+ if sys.version_info >= (3, 11):
+ self.assertEqual(t.__args__, (typing.Unpack[Ts],))
+ else:
+ self.assertEqual(t.__args__, (Unpack[Ts],))
+ self.assertEqual(t.__parameters__, (Ts,))
+
+ def test_pickle(self):
+ global Ts, Ts_default # pickle wants to reference the class by name
+ Ts = TypeVarTuple('Ts')
+ Ts_default = TypeVarTuple('Ts_default', default=Unpack[Tuple[int, str]])
+
+ for proto in range(pickle.HIGHEST_PROTOCOL):
+ for typevartuple in (Ts, Ts_default):
+ z = pickle.loads(pickle.dumps(typevartuple, proto))
+ self.assertEqual(z.__name__, typevartuple.__name__)
+ self.assertEqual(z.__default__, typevartuple.__default__)
+
+
+class FinalDecoratorTests(BaseTestCase):
+ def test_final_unmodified(self):
+ def func(x): ...
+ self.assertIs(func, final(func))
+
+ def test_dunder_final(self):
+ @final
+ def func(): ...
+ @final
+ class Cls: ...
+ self.assertIs(True, func.__final__)
+ self.assertIs(True, Cls.__final__)
+
+ class Wrapper:
+ __slots__ = ("func",)
+ def __init__(self, func):
+ self.func = func
+ def __call__(self, *args, **kwargs):
+ return self.func(*args, **kwargs)
+
+ # Check that no error is thrown if the attribute
+ # is not writable.
+ @final
+ @Wrapper
+ def wrapped(): ...
+ self.assertIsInstance(wrapped, Wrapper)
+ self.assertIs(False, hasattr(wrapped, "__final__"))
+
+ class Meta(type):
+ @property
+ def __final__(self): return "can't set me"
+ @final
+ class WithMeta(metaclass=Meta): ...
+ self.assertEqual(WithMeta.__final__, "can't set me")
+
+ # Builtin classes throw TypeError if you try to set an
+ # attribute.
+ final(int)
+ self.assertIs(False, hasattr(int, "__final__"))
+
+ # Make sure it works with common builtin decorators
+ class Methods:
+ @final
+ @classmethod
+ def clsmethod(cls): ...
+
+ @final
+ @staticmethod
+ def stmethod(): ...
+
+ # The other order doesn't work because property objects
+ # don't allow attribute assignment.
+ @property
+ @final
+ def prop(self): ...
+
+ @final
+ @lru_cache # noqa: B019
+ def cached(self): ...
+
+ # Use getattr_static because the descriptor returns the
+ # underlying function, which doesn't have __final__.
+ self.assertIs(
+ True,
+ inspect.getattr_static(Methods, "clsmethod").__final__
+ )
+ self.assertIs(
+ True,
+ inspect.getattr_static(Methods, "stmethod").__final__
+ )
+ self.assertIs(True, Methods.prop.fget.__final__)
+ self.assertIs(True, Methods.cached.__final__)
+
+
+class RevealTypeTests(BaseTestCase):
+ def test_reveal_type(self):
+ obj = object()
+
+ with contextlib.redirect_stderr(io.StringIO()) as stderr:
+ self.assertIs(obj, reveal_type(obj))
+ self.assertEqual("Runtime type is 'object'", stderr.getvalue().strip())
+
+
+class DataclassTransformTests(BaseTestCase):
+ def test_decorator(self):
+ def create_model(*, frozen: bool = False, kw_only: bool = True):
+ return lambda cls: cls
+
+ decorated = dataclass_transform(kw_only_default=True, order_default=False)(create_model)
+
+ class CustomerModel:
+ id: int
+
+ self.assertIs(decorated, create_model)
+ self.assertEqual(
+ decorated.__dataclass_transform__,
+ {
+ "eq_default": True,
+ "order_default": False,
+ "kw_only_default": True,
+ "frozen_default": False,
+ "field_specifiers": (),
+ "kwargs": {},
+ }
+ )
+ self.assertIs(
+ decorated(frozen=True, kw_only=False)(CustomerModel),
+ CustomerModel
+ )
+
+ def test_base_class(self):
+ class ModelBase:
+ def __init_subclass__(cls, *, frozen: bool = False): ...
+
+ Decorated = dataclass_transform(
+ eq_default=True,
+ order_default=True,
+ # Arbitrary unrecognized kwargs are accepted at runtime.
+ make_everything_awesome=True,
+ )(ModelBase)
+
+ class CustomerModel(Decorated, frozen=True):
+ id: int
+
+ self.assertIs(Decorated, ModelBase)
+ self.assertEqual(
+ Decorated.__dataclass_transform__,
+ {
+ "eq_default": True,
+ "order_default": True,
+ "kw_only_default": False,
+ "frozen_default": False,
+ "field_specifiers": (),
+ "kwargs": {"make_everything_awesome": True},
+ }
+ )
+ self.assertIsSubclass(CustomerModel, Decorated)
+
+ def test_metaclass(self):
+ class Field: ...
+
+ class ModelMeta(type):
+ def __new__(
+ cls, name, bases, namespace, *, init: bool = True,
+ ):
+ return super().__new__(cls, name, bases, namespace)
+
+ Decorated = dataclass_transform(
+ order_default=True, field_specifiers=(Field,)
+ )(ModelMeta)
+
+ class ModelBase(metaclass=Decorated): ...
+
+ class CustomerModel(ModelBase, init=False):
+ id: int
+
+ self.assertIs(Decorated, ModelMeta)
+ self.assertEqual(
+ Decorated.__dataclass_transform__,
+ {
+ "eq_default": True,
+ "order_default": True,
+ "kw_only_default": False,
+ "frozen_default": False,
+ "field_specifiers": (Field,),
+ "kwargs": {},
+ }
+ )
+ self.assertIsInstance(CustomerModel, Decorated)
+
+
+class AllTests(BaseTestCase):
+
+ def test_drop_in_for_typing(self):
+ # Check that the typing_extensions.__all__ is a superset of
+ # typing.__all__.
+ t_all = set(typing.__all__)
+ te_all = set(typing_extensions.__all__)
+ exceptions = {"ByteString"}
+ self.assertGreaterEqual(te_all, t_all - exceptions)
+ # Deprecated, to be removed in 3.14
+ self.assertFalse(hasattr(typing_extensions, "ByteString"))
+ # These were never included in `typing.__all__`,
+ # and have been removed in Python 3.13
+ self.assertNotIn('re', te_all)
+ self.assertNotIn('io', te_all)
+
+ def test_typing_extensions_includes_standard(self):
+ a = typing_extensions.__all__
+ self.assertIn('ClassVar', a)
+ self.assertIn('Type', a)
+ self.assertIn('ChainMap', a)
+ self.assertIn('ContextManager', a)
+ self.assertIn('Counter', a)
+ self.assertIn('DefaultDict', a)
+ self.assertIn('Deque', a)
+ self.assertIn('NewType', a)
+ self.assertIn('overload', a)
+ self.assertIn('Text', a)
+ self.assertIn('TYPE_CHECKING', a)
+ self.assertIn('TypeAlias', a)
+ self.assertIn('ParamSpec', a)
+ self.assertIn("Concatenate", a)
+
+ self.assertIn('Annotated', a)
+ self.assertIn('get_type_hints', a)
+
+ self.assertIn('Awaitable', a)
+ self.assertIn('AsyncIterator', a)
+ self.assertIn('AsyncIterable', a)
+ self.assertIn('Coroutine', a)
+ self.assertIn('AsyncContextManager', a)
+
+ self.assertIn('AsyncGenerator', a)
+
+ self.assertIn('Protocol', a)
+ self.assertIn('runtime', a)
+
+ # Check that all objects in `__all__` are present in the module
+ for name in a:
+ self.assertTrue(hasattr(typing_extensions, name))
+
+ def test_all_names_in___all__(self):
+ exclude = {
+ 'GenericMeta',
+ 'KT',
+ 'PEP_560',
+ 'T',
+ 'T_co',
+ 'T_contra',
+ 'VT',
+ }
+ actual_names = {
+ name for name in dir(typing_extensions)
+ if not name.startswith("_")
+ and not isinstance(getattr(typing_extensions, name), types.ModuleType)
+ }
+ # Make sure all public names are in __all__
+ self.assertEqual({*exclude, *typing_extensions.__all__},
+ actual_names)
+ # Make sure all excluded names actually exist
+ self.assertLessEqual(exclude, actual_names)
+
+ def test_typing_extensions_defers_when_possible(self):
+ exclude = set()
+ if sys.version_info < (3, 10):
+ exclude |= {'get_args', 'get_origin'}
+ if sys.version_info < (3, 10, 1):
+ exclude |= {"Literal"}
+ if sys.version_info < (3, 11):
+ exclude |= {'final', 'Any', 'NewType', 'overload'}
+ if sys.version_info < (3, 12):
+ exclude |= {
+ 'SupportsAbs', 'SupportsBytes',
+ 'SupportsComplex', 'SupportsFloat', 'SupportsIndex', 'SupportsInt',
+ 'SupportsRound', 'Unpack', 'dataclass_transform',
+ }
+ if sys.version_info < (3, 13):
+ exclude |= {
+ 'NamedTuple', 'Protocol', 'runtime_checkable', 'Generator',
+ 'AsyncGenerator', 'ContextManager', 'AsyncContextManager',
+ 'ParamSpec', 'TypeVar', 'TypeVarTuple', 'get_type_hints',
+ }
+ if not typing_extensions._PEP_728_IMPLEMENTED:
+ exclude |= {'TypedDict', 'is_typeddict'}
+ for item in typing_extensions.__all__:
+ if item not in exclude and hasattr(typing, item):
+ self.assertIs(
+ getattr(typing_extensions, item),
+ getattr(typing, item))
+
+ def test_typing_extensions_compiles_with_opt(self):
+ file_path = typing_extensions.__file__
+ try:
+ subprocess.check_output(f'{sys.executable} -OO {file_path}',
+ stderr=subprocess.STDOUT,
+ shell=True)
+ except subprocess.CalledProcessError:
+ self.fail('Module does not compile with optimize=2 (-OO flag).')
+
+
+class CoolEmployee(NamedTuple):
+ name: str
+ cool: int
+
+
+class CoolEmployeeWithDefault(NamedTuple):
+ name: str
+ cool: int = 0
+
+
+class XMeth(NamedTuple):
+ x: int
+
+ def double(self):
+ return 2 * self.x
+
+
+class NamedTupleTests(BaseTestCase):
+ class NestedEmployee(NamedTuple):
+ name: str
+ cool: int
+
+ def test_basics(self):
+ Emp = NamedTuple('Emp', [('name', str), ('id', int)])
+ self.assertIsSubclass(Emp, tuple)
+ joe = Emp('Joe', 42)
+ jim = Emp(name='Jim', id=1)
+ self.assertIsInstance(joe, Emp)
+ self.assertIsInstance(joe, tuple)
+ self.assertEqual(joe.name, 'Joe')
+ self.assertEqual(joe.id, 42)
+ self.assertEqual(jim.name, 'Jim')
+ self.assertEqual(jim.id, 1)
+ self.assertEqual(Emp.__name__, 'Emp')
+ self.assertEqual(Emp._fields, ('name', 'id'))
+ self.assertEqual(Emp.__annotations__,
+ collections.OrderedDict([('name', str), ('id', int)]))
+
+ def test_annotation_usage(self):
+ tim = CoolEmployee('Tim', 9000)
+ self.assertIsInstance(tim, CoolEmployee)
+ self.assertIsInstance(tim, tuple)
+ self.assertEqual(tim.name, 'Tim')
+ self.assertEqual(tim.cool, 9000)
+ self.assertEqual(CoolEmployee.__name__, 'CoolEmployee')
+ self.assertEqual(CoolEmployee._fields, ('name', 'cool'))
+ self.assertEqual(CoolEmployee.__annotations__,
+ collections.OrderedDict(name=str, cool=int))
+
+ def test_annotation_usage_with_default(self):
+ jelle = CoolEmployeeWithDefault('Jelle')
+ self.assertIsInstance(jelle, CoolEmployeeWithDefault)
+ self.assertIsInstance(jelle, tuple)
+ self.assertEqual(jelle.name, 'Jelle')
+ self.assertEqual(jelle.cool, 0)
+ cooler_employee = CoolEmployeeWithDefault('Sjoerd', 1)
+ self.assertEqual(cooler_employee.cool, 1)
+
+ self.assertEqual(CoolEmployeeWithDefault.__name__, 'CoolEmployeeWithDefault')
+ self.assertEqual(CoolEmployeeWithDefault._fields, ('name', 'cool'))
+ self.assertEqual(CoolEmployeeWithDefault.__annotations__,
+ dict(name=str, cool=int))
+
+ with self.assertRaisesRegex(
+ TypeError,
+ 'Non-default namedtuple field y cannot follow default field x'
+ ):
+ class NonDefaultAfterDefault(NamedTuple):
+ x: int = 3
+ y: int
+
+ def test_field_defaults(self):
+ self.assertEqual(CoolEmployeeWithDefault._field_defaults, dict(cool=0))
+
+ def test_annotation_usage_with_methods(self):
+ self.assertEqual(XMeth(1).double(), 2)
+ self.assertEqual(XMeth(42).x, XMeth(42)[0])
+ self.assertEqual(str(XRepr(42)), '42 -> 1')
+ self.assertEqual(XRepr(1, 2) + XRepr(3), 0)
+
+ bad_overwrite_error_message = 'Cannot overwrite NamedTuple attribute'
+
+ with self.assertRaisesRegex(AttributeError, bad_overwrite_error_message):
+ class XMethBad(NamedTuple):
+ x: int
+ def _fields(self):
+ return 'no chance for this'
+
+ with self.assertRaisesRegex(AttributeError, bad_overwrite_error_message):
+ class XMethBad2(NamedTuple):
+ x: int
+ def _source(self):
+ return 'no chance for this as well'
+
+ def test_multiple_inheritance(self):
+ class A:
+ pass
+ with self.assertRaisesRegex(
+ TypeError,
+ 'can only inherit from a NamedTuple type and Generic'
+ ):
+ class X(NamedTuple, A):
+ x: int
+
+ with self.assertRaisesRegex(
+ TypeError,
+ 'can only inherit from a NamedTuple type and Generic'
+ ):
+ class Y(NamedTuple, tuple):
+ x: int
+
+ with self.assertRaisesRegex(TypeError, 'duplicate base class'):
+ class Z(NamedTuple, NamedTuple):
+ x: int
+
+ class A(NamedTuple):
+ x: int
+ with self.assertRaisesRegex(
+ TypeError,
+ 'can only inherit from a NamedTuple type and Generic'
+ ):
+ class XX(NamedTuple, A):
+ y: str
+
+ def test_generic(self):
+ class X(NamedTuple, Generic[T]):
+ x: T
+ self.assertEqual(X.__bases__, (tuple, Generic))
+ self.assertEqual(X.__orig_bases__, (NamedTuple, Generic[T]))
+ self.assertEqual(X.__mro__, (X, tuple, Generic, object))
+
+ class Y(Generic[T], NamedTuple):
+ x: T
+ self.assertEqual(Y.__bases__, (Generic, tuple))
+ self.assertEqual(Y.__orig_bases__, (Generic[T], NamedTuple))
+ self.assertEqual(Y.__mro__, (Y, Generic, tuple, object))
+
+ for G in X, Y:
+ with self.subTest(type=G):
+ self.assertEqual(G.__parameters__, (T,))
+ A = G[int]
+ self.assertIs(A.__origin__, G)
+ self.assertEqual(A.__args__, (int,))
+ self.assertEqual(A.__parameters__, ())
+
+ a = A(3)
+ self.assertIs(type(a), G)
+ self.assertIsInstance(a, G)
+ self.assertEqual(a.x, 3)
+
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+ with self.assertRaisesRegex(TypeError, f'Too many {things}'):
+ G[int, str]
+
+ @skipUnless(TYPING_3_9_0, "tuple.__class_getitem__ was added in 3.9")
+ def test_non_generic_subscript_py39_plus(self):
+ # For backward compatibility, subscription works
+ # on arbitrary NamedTuple types.
+ class Group(NamedTuple):
+ key: T
+ group: list[T]
+ A = Group[int]
+ self.assertEqual(A.__origin__, Group)
+ self.assertEqual(A.__parameters__, ())
+ self.assertEqual(A.__args__, (int,))
+ a = A(1, [2])
+ self.assertIs(type(a), Group)
+ self.assertEqual(a, (1, [2]))
+
+ @skipIf(TYPING_3_9_0, "Test isn't relevant to 3.9+")
+ def test_non_generic_subscript_error_message_py38(self):
+ class Group(NamedTuple):
+ key: T
+ group: List[T]
+
+ with self.assertRaisesRegex(TypeError, 'not subscriptable'):
+ Group[int]
+
+ for attr in ('__args__', '__origin__', '__parameters__'):
+ with self.subTest(attr=attr):
+ self.assertFalse(hasattr(Group, attr))
+
+ def test_namedtuple_keyword_usage(self):
+ with self.assertWarnsRegex(
+ DeprecationWarning,
+ "Creating NamedTuple classes using keyword arguments is deprecated"
+ ):
+ LocalEmployee = NamedTuple("LocalEmployee", name=str, age=int)
+
+ nick = LocalEmployee('Nick', 25)
+ self.assertIsInstance(nick, tuple)
+ self.assertEqual(nick.name, 'Nick')
+ self.assertEqual(LocalEmployee.__name__, 'LocalEmployee')
+ self.assertEqual(LocalEmployee._fields, ('name', 'age'))
+ self.assertEqual(LocalEmployee.__annotations__, dict(name=str, age=int))
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "Either list of fields or keywords can be provided to NamedTuple, not both"
+ ):
+ NamedTuple('Name', [('x', int)], y=str)
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "Either list of fields or keywords can be provided to NamedTuple, not both"
+ ):
+ NamedTuple('Name', [], y=str)
+
+ with self.assertRaisesRegex(
+ TypeError,
+ (
+ r"Cannot pass `None` as the 'fields' parameter "
+ r"and also specify fields using keyword arguments"
+ )
+ ):
+ NamedTuple('Name', None, x=int)
+
+ def test_namedtuple_special_keyword_names(self):
+ with self.assertWarnsRegex(
+ DeprecationWarning,
+ "Creating NamedTuple classes using keyword arguments is deprecated"
+ ):
+ NT = NamedTuple("NT", cls=type, self=object, typename=str, fields=list)
+
+ self.assertEqual(NT.__name__, 'NT')
+ self.assertEqual(NT._fields, ('cls', 'self', 'typename', 'fields'))
+ a = NT(cls=str, self=42, typename='foo', fields=[('bar', tuple)])
+ self.assertEqual(a.cls, str)
+ self.assertEqual(a.self, 42)
+ self.assertEqual(a.typename, 'foo')
+ self.assertEqual(a.fields, [('bar', tuple)])
+
+ def test_empty_namedtuple(self):
+ expected_warning = re.escape(
+ "Failing to pass a value for the 'fields' parameter is deprecated "
+ "and will be disallowed in Python 3.15. "
+ "To create a NamedTuple class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty list, e.g. `NT1 = NamedTuple('NT1', [])`."
+ )
+ with self.assertWarnsRegex(DeprecationWarning, fr"^{expected_warning}$"):
+ NT1 = NamedTuple('NT1')
+
+ expected_warning = re.escape(
+ "Passing `None` as the 'fields' parameter is deprecated "
+ "and will be disallowed in Python 3.15. "
+ "To create a NamedTuple class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty list, e.g. `NT2 = NamedTuple('NT2', [])`."
+ )
+ with self.assertWarnsRegex(DeprecationWarning, fr"^{expected_warning}$"):
+ NT2 = NamedTuple('NT2', None)
+
+ NT3 = NamedTuple('NT2', [])
+
+ class CNT(NamedTuple):
+ pass # empty body
+
+ for struct in NT1, NT2, NT3, CNT:
+ with self.subTest(struct=struct):
+ self.assertEqual(struct._fields, ())
+ self.assertEqual(struct.__annotations__, {})
+ self.assertIsInstance(struct(), struct)
+ self.assertEqual(struct._field_defaults, {})
+
+ def test_namedtuple_errors(self):
+ with self.assertRaises(TypeError):
+ NamedTuple.__new__()
+ with self.assertRaises(TypeError):
+ NamedTuple()
+ with self.assertRaises(TypeError):
+ NamedTuple('Emp', [('name', str)], None)
+ with self.assertRaisesRegex(ValueError, 'cannot start with an underscore'):
+ NamedTuple('Emp', [('_name', str)])
+ with self.assertRaises(TypeError):
+ NamedTuple(typename='Emp', name=str, id=int)
+
+ def test_copy_and_pickle(self):
+ global Emp # pickle wants to reference the class by name
+ Emp = NamedTuple('Emp', [('name', str), ('cool', int)])
+ for cls in Emp, CoolEmployee, self.NestedEmployee:
+ with self.subTest(cls=cls):
+ jane = cls('jane', 37)
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(jane, proto)
+ jane2 = pickle.loads(z)
+ self.assertEqual(jane2, jane)
+ self.assertIsInstance(jane2, cls)
+
+ jane2 = copy.copy(jane)
+ self.assertEqual(jane2, jane)
+ self.assertIsInstance(jane2, cls)
+
+ jane2 = copy.deepcopy(jane)
+ self.assertEqual(jane2, jane)
+ self.assertIsInstance(jane2, cls)
+
+ def test_docstring(self):
+ self.assertIsInstance(NamedTuple.__doc__, str)
+
+ @skipUnless(TYPING_3_9_0, "NamedTuple was a class on 3.8 and lower")
+ def test_same_as_typing_NamedTuple_39_plus(self):
+ self.assertEqual(
+ set(dir(NamedTuple)) - {"__text_signature__"},
+ set(dir(typing.NamedTuple))
+ )
+ self.assertIs(type(NamedTuple), type(typing.NamedTuple))
+
+ @skipIf(TYPING_3_9_0, "tests are only relevant to <=3.8")
+ def test_same_as_typing_NamedTuple_38_minus(self):
+ self.assertEqual(
+ self.NestedEmployee.__annotations__,
+ self.NestedEmployee._field_types
+ )
+
+ def test_orig_bases(self):
+ T = TypeVar('T')
+
+ class SimpleNamedTuple(NamedTuple):
+ pass
+
+ class GenericNamedTuple(NamedTuple, Generic[T]):
+ pass
+
+ self.assertEqual(SimpleNamedTuple.__orig_bases__, (NamedTuple,))
+ self.assertEqual(GenericNamedTuple.__orig_bases__, (NamedTuple, Generic[T]))
+
+ CallNamedTuple = NamedTuple('CallNamedTuple', [])
+
+ self.assertEqual(CallNamedTuple.__orig_bases__, (NamedTuple,))
+
+ def test_setname_called_on_values_in_class_dictionary(self):
+ class Vanilla:
+ def __set_name__(self, owner, name):
+ self.name = name
+
+ class Foo(NamedTuple):
+ attr = Vanilla()
+
+ foo = Foo()
+ self.assertEqual(len(foo), 0)
+ self.assertNotIn('attr', Foo._fields)
+ self.assertIsInstance(foo.attr, Vanilla)
+ self.assertEqual(foo.attr.name, "attr")
+
+ class Bar(NamedTuple):
+ attr: Vanilla = Vanilla()
+
+ bar = Bar()
+ self.assertEqual(len(bar), 1)
+ self.assertIn('attr', Bar._fields)
+ self.assertIsInstance(bar.attr, Vanilla)
+ self.assertEqual(bar.attr.name, "attr")
+
+ @skipIf(
+ TYPING_3_12_0,
+ "__set_name__ behaviour changed on py312+ to use BaseException.add_note()"
+ )
+ def test_setname_raises_the_same_as_on_other_classes_py311_minus(self):
+ class CustomException(BaseException): pass
+
+ class Annoying:
+ def __set_name__(self, owner, name):
+ raise CustomException
+
+ annoying = Annoying()
+
+ with self.assertRaises(RuntimeError) as cm:
+ class NormalClass:
+ attr = annoying
+ normal_exception = cm.exception
+
+ with self.assertRaises(RuntimeError) as cm:
+ class NamedTupleClass(NamedTuple):
+ attr = annoying
+ namedtuple_exception = cm.exception
+
+ self.assertIs(type(namedtuple_exception), RuntimeError)
+ self.assertIs(type(namedtuple_exception), type(normal_exception))
+ self.assertEqual(len(namedtuple_exception.args), len(normal_exception.args))
+ self.assertEqual(
+ namedtuple_exception.args[0],
+ normal_exception.args[0].replace("NormalClass", "NamedTupleClass")
+ )
+
+ self.assertIs(type(namedtuple_exception.__cause__), CustomException)
+ self.assertIs(
+ type(namedtuple_exception.__cause__), type(normal_exception.__cause__)
+ )
+ self.assertEqual(
+ namedtuple_exception.__cause__.args, normal_exception.__cause__.args
+ )
+
+ @skipUnless(
+ TYPING_3_12_0,
+ "__set_name__ behaviour changed on py312+ to use BaseException.add_note()"
+ )
+ def test_setname_raises_the_same_as_on_other_classes_py312_plus(self):
+ class CustomException(BaseException): pass
+
+ class Annoying:
+ def __set_name__(self, owner, name):
+ raise CustomException
+
+ annoying = Annoying()
+
+ with self.assertRaises(CustomException) as cm:
+ class NormalClass:
+ attr = annoying
+ normal_exception = cm.exception
+
+ with self.assertRaises(CustomException) as cm:
+ class NamedTupleClass(NamedTuple):
+ attr = annoying
+ namedtuple_exception = cm.exception
+
+ expected_note = (
+ "Error calling __set_name__ on 'Annoying' instance "
+ "'attr' in 'NamedTupleClass'"
+ )
+
+ self.assertIs(type(namedtuple_exception), CustomException)
+ self.assertIs(type(namedtuple_exception), type(normal_exception))
+ self.assertEqual(namedtuple_exception.args, normal_exception.args)
+
+ self.assertEqual(len(namedtuple_exception.__notes__), 1)
+ self.assertEqual(
+ len(namedtuple_exception.__notes__), len(normal_exception.__notes__)
+ )
+
+ self.assertEqual(namedtuple_exception.__notes__[0], expected_note)
+ self.assertEqual(
+ namedtuple_exception.__notes__[0],
+ normal_exception.__notes__[0].replace("NormalClass", "NamedTupleClass")
+ )
+
+ def test_strange_errors_when_accessing_set_name_itself(self):
+ class CustomException(Exception): pass
+
+ class Meta(type):
+ def __getattribute__(self, attr):
+ if attr == "__set_name__":
+ raise CustomException
+ return object.__getattribute__(self, attr)
+
+ class VeryAnnoying(metaclass=Meta): pass
+
+ very_annoying = VeryAnnoying()
+
+ with self.assertRaises(CustomException):
+ class Foo(NamedTuple):
+ attr = very_annoying
+
+
+class TypeVarTests(BaseTestCase):
+ def test_basic_plain(self):
+ T = TypeVar('T')
+ # T equals itself.
+ self.assertEqual(T, T)
+ # T is an instance of TypeVar
+ self.assertIsInstance(T, TypeVar)
+ self.assertEqual(T.__name__, 'T')
+ self.assertEqual(T.__constraints__, ())
+ self.assertIs(T.__bound__, None)
+ self.assertIs(T.__covariant__, False)
+ self.assertIs(T.__contravariant__, False)
+ self.assertIs(T.__infer_variance__, False)
+
+ def test_attributes(self):
+ T_bound = TypeVar('T_bound', bound=int)
+ self.assertEqual(T_bound.__name__, 'T_bound')
+ self.assertEqual(T_bound.__constraints__, ())
+ self.assertIs(T_bound.__bound__, int)
+
+ T_constraints = TypeVar('T_constraints', int, str)
+ self.assertEqual(T_constraints.__name__, 'T_constraints')
+ self.assertEqual(T_constraints.__constraints__, (int, str))
+ self.assertIs(T_constraints.__bound__, None)
+
+ T_co = TypeVar('T_co', covariant=True)
+ self.assertEqual(T_co.__name__, 'T_co')
+ self.assertIs(T_co.__covariant__, True)
+ self.assertIs(T_co.__contravariant__, False)
+ self.assertIs(T_co.__infer_variance__, False)
+
+ T_contra = TypeVar('T_contra', contravariant=True)
+ self.assertEqual(T_contra.__name__, 'T_contra')
+ self.assertIs(T_contra.__covariant__, False)
+ self.assertIs(T_contra.__contravariant__, True)
+ self.assertIs(T_contra.__infer_variance__, False)
+
+ T_infer = TypeVar('T_infer', infer_variance=True)
+ self.assertEqual(T_infer.__name__, 'T_infer')
+ self.assertIs(T_infer.__covariant__, False)
+ self.assertIs(T_infer.__contravariant__, False)
+ self.assertIs(T_infer.__infer_variance__, True)
+
+ def test_typevar_instance_type_error(self):
+ T = TypeVar('T')
+ with self.assertRaises(TypeError):
+ isinstance(42, T)
+
+ def test_typevar_subclass_type_error(self):
+ T = TypeVar('T')
+ with self.assertRaises(TypeError):
+ issubclass(int, T)
+ with self.assertRaises(TypeError):
+ issubclass(T, int)
+
+ def test_constrained_error(self):
+ with self.assertRaises(TypeError):
+ X = TypeVar('X', int)
+ X
+
+ def test_union_unique(self):
+ X = TypeVar('X')
+ Y = TypeVar('Y')
+ self.assertNotEqual(X, Y)
+ self.assertEqual(Union[X], X)
+ self.assertNotEqual(Union[X], Union[X, Y])
+ self.assertEqual(Union[X, X], X)
+ self.assertNotEqual(Union[X, int], Union[X])
+ self.assertNotEqual(Union[X, int], Union[int])
+ self.assertEqual(Union[X, int].__args__, (X, int))
+ self.assertEqual(Union[X, int].__parameters__, (X,))
+ self.assertIs(Union[X, int].__origin__, Union)
+
+ if hasattr(types, "UnionType"):
+ def test_or(self):
+ X = TypeVar('X')
+ # use a string because str doesn't implement
+ # __or__/__ror__ itself
+ self.assertEqual(X | "x", Union[X, "x"])
+ self.assertEqual("x" | X, Union["x", X])
+ # make sure the order is correct
+ self.assertEqual(get_args(X | "x"), (X, typing.ForwardRef("x")))
+ self.assertEqual(get_args("x" | X), (typing.ForwardRef("x"), X))
+
+ def test_union_constrained(self):
+ A = TypeVar('A', str, bytes)
+ self.assertNotEqual(Union[A, str], Union[A])
+
+ def test_repr(self):
+ self.assertEqual(repr(T), '~T')
+ self.assertEqual(repr(KT), '~KT')
+ self.assertEqual(repr(VT), '~VT')
+ self.assertEqual(repr(AnyStr), '~AnyStr')
+ T_co = TypeVar('T_co', covariant=True)
+ self.assertEqual(repr(T_co), '+T_co')
+ T_contra = TypeVar('T_contra', contravariant=True)
+ self.assertEqual(repr(T_contra), '-T_contra')
+
+ def test_no_redefinition(self):
+ self.assertNotEqual(TypeVar('T'), TypeVar('T'))
+ self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str))
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class V(TypeVar): pass
+ T = TypeVar("T")
+ with self.assertRaises(TypeError):
+ class W(T): pass
+
+ def test_cannot_instantiate_vars(self):
+ with self.assertRaises(TypeError):
+ TypeVar('A')()
+
+ def test_bound_errors(self):
+ with self.assertRaises(TypeError):
+ TypeVar('X', bound=Union)
+ with self.assertRaises(TypeError):
+ TypeVar('X', str, float, bound=Employee)
+ with self.assertRaisesRegex(TypeError,
+ r"Bound must be a type\. Got \(1, 2\)\."):
+ TypeVar('X', bound=(1, 2))
+
+ # Technically we could run it on later versions of 3.8,
+ # but that's not worth the effort.
+ @skipUnless(TYPING_3_9_0, "Fix was not backported")
+ def test_missing__name__(self):
+ # See bpo-39942
+ code = ("import typing\n"
+ "T = typing.TypeVar('T')\n"
+ )
+ exec(code, {})
+
+ def test_no_bivariant(self):
+ with self.assertRaises(ValueError):
+ TypeVar('T', covariant=True, contravariant=True)
+
+ def test_cannot_combine_explicit_and_infer(self):
+ with self.assertRaises(ValueError):
+ TypeVar('T', covariant=True, infer_variance=True)
+ with self.assertRaises(ValueError):
+ TypeVar('T', contravariant=True, infer_variance=True)
+
+
+class TypeVarLikeDefaultsTests(BaseTestCase):
+ def test_typevar(self):
+ T = typing_extensions.TypeVar('T', default=int)
+ typing_T = typing.TypeVar('T')
+ self.assertEqual(T.__default__, int)
+ self.assertIsInstance(T, typing_extensions.TypeVar)
+ self.assertIsInstance(T, typing.TypeVar)
+ self.assertIsInstance(typing_T, typing.TypeVar)
+ self.assertIsInstance(typing_T, typing_extensions.TypeVar)
+
+ class A(Generic[T]): ...
+ self.assertEqual(Optional[T].__args__, (T, type(None)))
+
+ def test_typevar_none(self):
+ U = typing_extensions.TypeVar('U')
+ U_None = typing_extensions.TypeVar('U_None', default=None)
+ self.assertIs(U.__default__, NoDefault)
+ self.assertFalse(U.has_default())
+ self.assertEqual(U_None.__default__, None)
+ self.assertTrue(U_None.has_default())
+
+ def test_paramspec(self):
+ P = ParamSpec('P', default=[str, int])
+ self.assertEqual(P.__default__, [str, int])
+ self.assertTrue(P.has_default())
+ self.assertIsInstance(P, ParamSpec)
+ if hasattr(typing, "ParamSpec"):
+ self.assertIsInstance(P, typing.ParamSpec)
+ typing_P = typing.ParamSpec('P')
+ self.assertIsInstance(typing_P, typing.ParamSpec)
+ self.assertIsInstance(typing_P, ParamSpec)
+
+ class A(Generic[P]): ...
+ self.assertEqual(typing.Callable[P, None].__args__, (P, type(None)))
+
+ P_default = ParamSpec('P_default', default=...)
+ self.assertIs(P_default.__default__, ...)
+ self.assertTrue(P_default.has_default())
+
+ def test_paramspec_none(self):
+ U = ParamSpec('U')
+ U_None = ParamSpec('U_None', default=None)
+ self.assertIs(U.__default__, NoDefault)
+ self.assertFalse(U.has_default())
+ self.assertIs(U_None.__default__, None)
+ self.assertTrue(U_None.has_default())
+
+ def test_typevartuple(self):
+ Ts = TypeVarTuple('Ts', default=Unpack[Tuple[str, int]])
+ self.assertEqual(Ts.__default__, Unpack[Tuple[str, int]])
+ self.assertIsInstance(Ts, TypeVarTuple)
+ self.assertTrue(Ts.has_default())
+ if hasattr(typing, "TypeVarTuple"):
+ self.assertIsInstance(Ts, typing.TypeVarTuple)
+ typing_Ts = typing.TypeVarTuple('Ts')
+ self.assertIsInstance(typing_Ts, typing.TypeVarTuple)
+ self.assertIsInstance(typing_Ts, TypeVarTuple)
+
+ class A(Generic[Unpack[Ts]]): ...
+ self.assertEqual(Optional[Unpack[Ts]].__args__, (Unpack[Ts], type(None)))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_typevartuple_specialization(self):
+ T = TypeVar("T")
+ Ts = TypeVarTuple('Ts', default=Unpack[Tuple[str, int]])
+ self.assertEqual(Ts.__default__, Unpack[Tuple[str, int]])
+ class A(Generic[T, Unpack[Ts]]): ...
+ self.assertEqual(A[float].__args__, (float, str, int))
+ self.assertEqual(A[float, range].__args__, (float, range))
+ self.assertEqual(A[float, Unpack[tuple[int, ...]]].__args__, (float, Unpack[tuple[int, ...]]))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_typevar_and_typevartuple_specialization(self):
+ T = TypeVar("T")
+ U = TypeVar("U", default=float)
+ Ts = TypeVarTuple('Ts', default=Unpack[Tuple[str, int]])
+ self.assertEqual(Ts.__default__, Unpack[Tuple[str, int]])
+ class A(Generic[T, U, Unpack[Ts]]): ...
+ self.assertEqual(A[int].__args__, (int, float, str, int))
+ self.assertEqual(A[int, str].__args__, (int, str, str, int))
+ self.assertEqual(A[int, str, range].__args__, (int, str, range))
+ self.assertEqual(A[int, str, Unpack[tuple[int, ...]]].__args__, (int, str, Unpack[tuple[int, ...]]))
+
+ def test_no_default_after_typevar_tuple(self):
+ T = TypeVar("T", default=int)
+ Ts = TypeVarTuple("Ts")
+ Ts_default = TypeVarTuple("Ts_default", default=Unpack[Tuple[str, int]])
+
+ with self.assertRaises(TypeError):
+ class X(Generic[Unpack[Ts], T]): ...
+
+ with self.assertRaises(TypeError):
+ class Y(Generic[Unpack[Ts_default], T]): ...
+
+ def test_typevartuple_none(self):
+ U = TypeVarTuple('U')
+ U_None = TypeVarTuple('U_None', default=None)
+ self.assertIs(U.__default__, NoDefault)
+ self.assertFalse(U.has_default())
+ self.assertIs(U_None.__default__, None)
+ self.assertTrue(U_None.has_default())
+
+ def test_no_default_after_non_default(self):
+ DefaultStrT = typing_extensions.TypeVar('DefaultStrT', default=str)
+ T = TypeVar('T')
+
+ with self.assertRaises(TypeError):
+ Generic[DefaultStrT, T]
+
+ def test_need_more_params(self):
+ DefaultStrT = typing_extensions.TypeVar('DefaultStrT', default=str)
+ T = typing_extensions.TypeVar('T')
+ U = typing_extensions.TypeVar('U')
+
+ class A(Generic[T, U, DefaultStrT]): ...
+ A[int, bool]
+ A[int, bool, str]
+
+ with self.assertRaises(
+ TypeError, msg="Too few arguments for .+; actual 1, expected at least 2"
+ ):
+ A[int]
+
+ def test_pickle(self):
+ global U, U_co, U_contra, U_default # pickle wants to reference the class by name
+ U = typing_extensions.TypeVar('U')
+ U_co = typing_extensions.TypeVar('U_co', covariant=True)
+ U_contra = typing_extensions.TypeVar('U_contra', contravariant=True)
+ U_default = typing_extensions.TypeVar('U_default', default=int)
+ for proto in range(pickle.HIGHEST_PROTOCOL):
+ for typevar in (U, U_co, U_contra, U_default):
+ z = pickle.loads(pickle.dumps(typevar, proto))
+ self.assertEqual(z.__name__, typevar.__name__)
+ self.assertEqual(z.__covariant__, typevar.__covariant__)
+ self.assertEqual(z.__contravariant__, typevar.__contravariant__)
+ self.assertEqual(z.__bound__, typevar.__bound__)
+ self.assertEqual(z.__default__, typevar.__default__)
+
+ def test_strange_defaults_are_allowed(self):
+ # Leave it to type checkers to check whether strange default values
+ # should be allowed or disallowed
+ def not_a_type(): ...
+
+ for typevarlike_cls in TypeVar, ParamSpec, TypeVarTuple:
+ for default in not_a_type, 42, bytearray(), (int, not_a_type, 42):
+ with self.subTest(typevarlike_cls=typevarlike_cls, default=default):
+ T = typevarlike_cls("T", default=default)
+ self.assertEqual(T.__default__, default)
+
+ @skip_if_py313_beta_1
+ def test_allow_default_after_non_default_in_alias(self):
+ T_default = TypeVar('T_default', default=int)
+ T = TypeVar('T')
+ Ts = TypeVarTuple('Ts')
+
+ a1 = Callable[[T_default], T]
+ self.assertEqual(a1.__args__, (T_default, T))
+
+ if sys.version_info >= (3, 9):
+ a2 = dict[T_default, T]
+ self.assertEqual(a2.__args__, (T_default, T))
+
+ a3 = typing.Dict[T_default, T]
+ self.assertEqual(a3.__args__, (T_default, T))
+
+ a4 = Callable[[Unpack[Ts]], T]
+ self.assertEqual(a4.__args__, (Unpack[Ts], T))
+
+ @skipIf(
+ typing_extensions.Protocol is typing.Protocol,
+ "Test currently fails with the CPython version of Protocol and that's not our fault"
+ )
+ def test_generic_with_broken_eq(self):
+ # See https://github.com/python/typing_extensions/pull/422 for context
+ class BrokenEq(type):
+ def __eq__(self, other):
+ if other is typing_extensions.Protocol:
+ raise TypeError("I'm broken")
+ return False
+
+ class G(Generic[T], metaclass=BrokenEq):
+ pass
+
+ alias = G[int]
+ self.assertIs(get_origin(alias), G)
+ self.assertEqual(get_args(alias), (int,))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_paramspec_specialization(self):
+ T = TypeVar("T")
+ P = ParamSpec('P', default=[str, int])
+ self.assertEqual(P.__default__, [str, int])
+ class A(Generic[T, P]): ...
+ self.assertEqual(A[float].__args__, (float, (str, int)))
+ self.assertEqual(A[float, [range]].__args__, (float, (range,)))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_typevar_and_paramspec_specialization(self):
+ T = TypeVar("T")
+ U = TypeVar("U", default=float)
+ P = ParamSpec('P', default=[str, int])
+ self.assertEqual(P.__default__, [str, int])
+ class A(Generic[T, U, P]): ...
+ self.assertEqual(A[float].__args__, (float, float, (str, int)))
+ self.assertEqual(A[float, int].__args__, (float, int, (str, int)))
+ self.assertEqual(A[float, int, [range]].__args__, (float, int, (range,)))
+
+ @skipIf(
+ sys.version_info < (3, 11, 1),
+ "Not yet backported for older versions of Python"
+ )
+ def test_paramspec_and_typevar_specialization(self):
+ T = TypeVar("T")
+ P = ParamSpec('P', default=[str, int])
+ U = TypeVar("U", default=float)
+ self.assertEqual(P.__default__, [str, int])
+ class A(Generic[T, P, U]): ...
+ self.assertEqual(A[float].__args__, (float, (str, int), float))
+ self.assertEqual(A[float, [range]].__args__, (float, (range,), float))
+ self.assertEqual(A[float, [range], int].__args__, (float, (range,), int))
+
+
+class NoDefaultTests(BaseTestCase):
+ @skip_if_py313_beta_1
+ def test_pickling(self):
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ s = pickle.dumps(NoDefault, proto)
+ loaded = pickle.loads(s)
+ self.assertIs(NoDefault, loaded)
+
+ @skip_if_py313_beta_1
+ def test_doc(self):
+ self.assertIsInstance(NoDefault.__doc__, str)
+
+ def test_constructor(self):
+ self.assertIs(NoDefault, type(NoDefault)())
+ with self.assertRaises(TypeError):
+ type(NoDefault)(1)
+
+ def test_repr(self):
+ self.assertRegex(repr(NoDefault), r'typing(_extensions)?\.NoDefault')
+
+ def test_no_call(self):
+ with self.assertRaises(TypeError):
+ NoDefault()
+
+ @skip_if_py313_beta_1
+ def test_immutable(self):
+ with self.assertRaises(AttributeError):
+ NoDefault.foo = 'bar'
+ with self.assertRaises(AttributeError):
+ NoDefault.foo
+
+ # TypeError is consistent with the behavior of NoneType
+ with self.assertRaises(TypeError):
+ type(NoDefault).foo = 3
+ with self.assertRaises(AttributeError):
+ type(NoDefault).foo
+
+
+class TypeVarInferVarianceTests(BaseTestCase):
+ def test_typevar(self):
+ T = typing_extensions.TypeVar('T')
+ self.assertFalse(T.__infer_variance__)
+ T_infer = typing_extensions.TypeVar('T_infer', infer_variance=True)
+ self.assertTrue(T_infer.__infer_variance__)
+ T_noinfer = typing_extensions.TypeVar('T_noinfer', infer_variance=False)
+ self.assertFalse(T_noinfer.__infer_variance__)
+
+ def test_pickle(self):
+ global U, U_infer # pickle wants to reference the class by name
+ U = typing_extensions.TypeVar('U')
+ U_infer = typing_extensions.TypeVar('U_infer', infer_variance=True)
+ for proto in range(pickle.HIGHEST_PROTOCOL):
+ for typevar in (U, U_infer):
+ z = pickle.loads(pickle.dumps(typevar, proto))
+ self.assertEqual(z.__name__, typevar.__name__)
+ self.assertEqual(z.__covariant__, typevar.__covariant__)
+ self.assertEqual(z.__contravariant__, typevar.__contravariant__)
+ self.assertEqual(z.__bound__, typevar.__bound__)
+ self.assertEqual(z.__infer_variance__, typevar.__infer_variance__)
+
+
+class BufferTests(BaseTestCase):
+ def test(self):
+ self.assertIsInstance(memoryview(b''), Buffer)
+ self.assertIsInstance(bytearray(), Buffer)
+ self.assertIsInstance(b"x", Buffer)
+ self.assertNotIsInstance(1, Buffer)
+
+ self.assertIsSubclass(bytearray, Buffer)
+ self.assertIsSubclass(memoryview, Buffer)
+ self.assertIsSubclass(bytes, Buffer)
+ self.assertNotIsSubclass(int, Buffer)
+
+ class MyRegisteredBuffer:
+ def __buffer__(self, flags: int) -> memoryview:
+ return memoryview(b'')
+
+ # On 3.12, collections.abc.Buffer does a structural compatibility check
+ if TYPING_3_12_0:
+ self.assertIsInstance(MyRegisteredBuffer(), Buffer)
+ self.assertIsSubclass(MyRegisteredBuffer, Buffer)
+ else:
+ self.assertNotIsInstance(MyRegisteredBuffer(), Buffer)
+ self.assertNotIsSubclass(MyRegisteredBuffer, Buffer)
+ Buffer.register(MyRegisteredBuffer)
+ self.assertIsInstance(MyRegisteredBuffer(), Buffer)
+ self.assertIsSubclass(MyRegisteredBuffer, Buffer)
+
+ class MySubclassedBuffer(Buffer):
+ def __buffer__(self, flags: int) -> memoryview:
+ return memoryview(b'')
+
+ self.assertIsInstance(MySubclassedBuffer(), Buffer)
+ self.assertIsSubclass(MySubclassedBuffer, Buffer)
+
+
+class GetOriginalBasesTests(BaseTestCase):
+ def test_basics(self):
+ T = TypeVar('T')
+ class A: pass
+ class B(Generic[T]): pass
+ class C(B[int]): pass
+ class D(B[str], float): pass
+ self.assertEqual(get_original_bases(A), (object,))
+ self.assertEqual(get_original_bases(B), (Generic[T],))
+ self.assertEqual(get_original_bases(C), (B[int],))
+ self.assertEqual(get_original_bases(int), (object,))
+ self.assertEqual(get_original_bases(D), (B[str], float))
+
+ with self.assertRaisesRegex(TypeError, "Expected an instance of type"):
+ get_original_bases(object())
+
+ @skipUnless(TYPING_3_9_0, "PEP 585 is yet to be")
+ def test_builtin_generics(self):
+ class E(list[T]): pass
+ class F(list[int]): pass
+
+ self.assertEqual(get_original_bases(E), (list[T],))
+ self.assertEqual(get_original_bases(F), (list[int],))
+
+ @skipIf(
+ sys.version_info[:3] == (3, 12, 0) and sys.version_info[3] in {"alpha", "beta"},
+ "Early versions of py312 had a bug"
+ )
+ def test_concrete_subclasses_of_generic_classes(self):
+ T = TypeVar("T")
+
+ class FirstBase(Generic[T]): pass
+ class SecondBase(Generic[T]): pass
+ class First(FirstBase[int]): pass
+ class Second(SecondBase[int]): pass
+ class G(First, Second): pass
+ self.assertEqual(get_original_bases(G), (First, Second))
+
+ class First_(Generic[T]): pass
+ class Second_(Generic[T]): pass
+ class H(First_, Second_): pass
+ self.assertEqual(get_original_bases(H), (First_, Second_))
+
+ def test_namedtuples(self):
+ # On 3.12, this should work well with typing.NamedTuple and typing_extensions.NamedTuple
+ # On lower versions, it will only work fully with typing_extensions.NamedTuple
+ if sys.version_info >= (3, 12):
+ namedtuple_classes = (typing.NamedTuple, typing_extensions.NamedTuple)
+ else:
+ namedtuple_classes = (typing_extensions.NamedTuple,)
+
+ for NamedTuple in namedtuple_classes: # noqa: F402
+ with self.subTest(cls=NamedTuple):
+ class ClassBasedNamedTuple(NamedTuple):
+ x: int
+
+ class GenericNamedTuple(NamedTuple, Generic[T]):
+ x: T
+
+ CallBasedNamedTuple = NamedTuple("CallBasedNamedTuple", [("x", int)])
+
+ self.assertIs(
+ get_original_bases(ClassBasedNamedTuple)[0], NamedTuple
+ )
+ self.assertEqual(
+ get_original_bases(GenericNamedTuple),
+ (NamedTuple, Generic[T])
+ )
+ self.assertIs(
+ get_original_bases(CallBasedNamedTuple)[0], NamedTuple
+ )
+
+ def test_typeddicts(self):
+ # On 3.12, this should work well with typing.TypedDict and typing_extensions.TypedDict
+ # On lower versions, it will only work fully with typing_extensions.TypedDict
+ if sys.version_info >= (3, 12):
+ typeddict_classes = (typing.TypedDict, typing_extensions.TypedDict)
+ else:
+ typeddict_classes = (typing_extensions.TypedDict,)
+
+ for TypedDict in typeddict_classes: # noqa: F402
+ with self.subTest(cls=TypedDict):
+ class ClassBasedTypedDict(TypedDict):
+ x: int
+
+ class GenericTypedDict(TypedDict, Generic[T]):
+ x: T
+
+ CallBasedTypedDict = TypedDict("CallBasedTypedDict", {"x": int})
+
+ self.assertIs(
+ get_original_bases(ClassBasedTypedDict)[0],
+ TypedDict
+ )
+ self.assertEqual(
+ get_original_bases(GenericTypedDict),
+ (TypedDict, Generic[T])
+ )
+ self.assertIs(
+ get_original_bases(CallBasedTypedDict)[0],
+ TypedDict
+ )
+
+
+class TypeAliasTypeTests(BaseTestCase):
+ def test_attributes(self):
+ Simple = TypeAliasType("Simple", int)
+ self.assertEqual(Simple.__name__, "Simple")
+ self.assertIs(Simple.__value__, int)
+ self.assertEqual(Simple.__type_params__, ())
+ self.assertEqual(Simple.__parameters__, ())
+
+ T = TypeVar("T")
+ ListOrSetT = TypeAliasType("ListOrSetT", Union[List[T], Set[T]], type_params=(T,))
+ self.assertEqual(ListOrSetT.__name__, "ListOrSetT")
+ self.assertEqual(ListOrSetT.__value__, Union[List[T], Set[T]])
+ self.assertEqual(ListOrSetT.__type_params__, (T,))
+ self.assertEqual(ListOrSetT.__parameters__, (T,))
+
+ Ts = TypeVarTuple("Ts")
+ Variadic = TypeAliasType("Variadic", Tuple[int, Unpack[Ts]], type_params=(Ts,))
+ self.assertEqual(Variadic.__name__, "Variadic")
+ self.assertEqual(Variadic.__value__, Tuple[int, Unpack[Ts]])
+ self.assertEqual(Variadic.__type_params__, (Ts,))
+ self.assertEqual(Variadic.__parameters__, tuple(iter(Ts)))
+
+ def test_cannot_set_attributes(self):
+ Simple = TypeAliasType("Simple", int)
+ with self.assertRaisesRegex(AttributeError, "readonly attribute"):
+ Simple.__name__ = "NewName"
+ with self.assertRaisesRegex(
+ AttributeError,
+ "attribute '__value__' of 'typing.TypeAliasType' objects is not writable",
+ ):
+ Simple.__value__ = str
+ with self.assertRaisesRegex(
+ AttributeError,
+ "attribute '__type_params__' of 'typing.TypeAliasType' objects is not writable",
+ ):
+ Simple.__type_params__ = (T,)
+ with self.assertRaisesRegex(
+ AttributeError,
+ "attribute '__parameters__' of 'typing.TypeAliasType' objects is not writable",
+ ):
+ Simple.__parameters__ = (T,)
+ with self.assertRaisesRegex(
+ AttributeError,
+ "attribute '__module__' of 'typing.TypeAliasType' objects is not writable",
+ ):
+ Simple.__module__ = 42
+ with self.assertRaisesRegex(
+ AttributeError,
+ "'typing.TypeAliasType' object has no attribute 'some_attribute'",
+ ):
+ Simple.some_attribute = "not allowed"
+
+ def test_cannot_delete_attributes(self):
+ Simple = TypeAliasType("Simple", int)
+ with self.assertRaisesRegex(AttributeError, "readonly attribute"):
+ del Simple.__name__
+ with self.assertRaisesRegex(
+ AttributeError,
+ "attribute '__value__' of 'typing.TypeAliasType' objects is not writable",
+ ):
+ del Simple.__value__
+ with self.assertRaisesRegex(
+ AttributeError,
+ "'typing.TypeAliasType' object has no attribute 'some_attribute'",
+ ):
+ del Simple.some_attribute
+
+ def test_or(self):
+ Alias = TypeAliasType("Alias", int)
+ if sys.version_info >= (3, 10):
+ self.assertEqual(Alias | int, Union[Alias, int])
+ self.assertEqual(Alias | None, Union[Alias, None])
+ self.assertEqual(Alias | (int | str), Union[Alias, int | str])
+ self.assertEqual(Alias | list[float], Union[Alias, list[float]])
+ else:
+ with self.assertRaises(TypeError):
+ Alias | int
+ # Rejected on all versions
+ with self.assertRaises(TypeError):
+ Alias | "Ref"
+
+ def test_getitem(self):
+ ListOrSetT = TypeAliasType("ListOrSetT", Union[List[T], Set[T]], type_params=(T,))
+ subscripted = ListOrSetT[int]
+ self.assertEqual(get_args(subscripted), (int,))
+ self.assertIs(get_origin(subscripted), ListOrSetT)
+ with self.assertRaises(TypeError):
+ subscripted[str]
+
+ still_generic = ListOrSetT[Iterable[T]]
+ self.assertEqual(get_args(still_generic), (Iterable[T],))
+ self.assertIs(get_origin(still_generic), ListOrSetT)
+ fully_subscripted = still_generic[float]
+ self.assertEqual(get_args(fully_subscripted), (Iterable[float],))
+ self.assertIs(get_origin(fully_subscripted), ListOrSetT)
+
+ def test_pickle(self):
+ global Alias
+ Alias = TypeAliasType("Alias", int)
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(proto=proto):
+ pickled = pickle.dumps(Alias, proto)
+ unpickled = pickle.loads(pickled)
+ self.assertIs(unpickled, Alias)
+
+ def test_no_instance_subclassing(self):
+ with self.assertRaises(TypeError):
+ class MyAlias(TypeAliasType):
+ pass
+
+
+class DocTests(BaseTestCase):
+ def test_annotation(self):
+
+ def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: pass
+
+ hints = get_type_hints(hi, include_extras=True)
+ doc_info = hints["to"].__metadata__[0]
+ self.assertEqual(doc_info.documentation, "Who to say hi to")
+ self.assertIsInstance(doc_info, Doc)
+
+ def test_repr(self):
+ doc_info = Doc("Who to say hi to")
+ self.assertEqual(repr(doc_info), "Doc('Who to say hi to')")
+
+ def test_hashability(self):
+ doc_info = Doc("Who to say hi to")
+ self.assertIsInstance(hash(doc_info), int)
+ self.assertNotEqual(hash(doc_info), hash(Doc("Who not to say hi to")))
+
+ def test_equality(self):
+ doc_info = Doc("Who to say hi to")
+ # Equal to itself
+ self.assertEqual(doc_info, doc_info)
+ # Equal to another instance with the same string
+ self.assertEqual(doc_info, Doc("Who to say hi to"))
+ # Not equal to another instance with a different string
+ self.assertNotEqual(doc_info, Doc("Who not to say hi to"))
+
+ def test_pickle(self):
+ doc_info = Doc("Who to say hi to")
+ for proto in range(pickle.HIGHEST_PROTOCOL):
+ pickled = pickle.dumps(doc_info, protocol=proto)
+ self.assertEqual(doc_info, pickle.loads(pickled))
+
+
+@skipUnless(
+ hasattr(typing_extensions, "CapsuleType"),
+ "CapsuleType is not available on all Python implementations"
+)
+class CapsuleTypeTests(BaseTestCase):
+ def test_capsule_type(self):
+ import _datetime
+ self.assertIsInstance(_datetime.datetime_CAPI, typing_extensions.CapsuleType)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/src/typing_extensions.py b/src/typing_extensions.py
new file mode 100644
index 0000000..dec429c
--- /dev/null
+++ b/src/typing_extensions.py
@@ -0,0 +1,3641 @@
+import abc
+import collections
+import collections.abc
+import contextlib
+import functools
+import inspect
+import operator
+import sys
+import types as _types
+import typing
+import warnings
+
+__all__ = [
+ # Super-special typing primitives.
+ 'Any',
+ 'ClassVar',
+ 'Concatenate',
+ 'Final',
+ 'LiteralString',
+ 'ParamSpec',
+ 'ParamSpecArgs',
+ 'ParamSpecKwargs',
+ 'Self',
+ 'Type',
+ 'TypeVar',
+ 'TypeVarTuple',
+ 'Unpack',
+
+ # ABCs (from collections.abc).
+ 'Awaitable',
+ 'AsyncIterator',
+ 'AsyncIterable',
+ 'Coroutine',
+ 'AsyncGenerator',
+ 'AsyncContextManager',
+ 'Buffer',
+ 'ChainMap',
+
+ # Concrete collection types.
+ 'ContextManager',
+ 'Counter',
+ 'Deque',
+ 'DefaultDict',
+ 'NamedTuple',
+ 'OrderedDict',
+ 'TypedDict',
+
+ # Structural checks, a.k.a. protocols.
+ 'SupportsAbs',
+ 'SupportsBytes',
+ 'SupportsComplex',
+ 'SupportsFloat',
+ 'SupportsIndex',
+ 'SupportsInt',
+ 'SupportsRound',
+
+ # One-off things.
+ 'Annotated',
+ 'assert_never',
+ 'assert_type',
+ 'clear_overloads',
+ 'dataclass_transform',
+ 'deprecated',
+ 'Doc',
+ 'get_overloads',
+ 'final',
+ 'get_args',
+ 'get_origin',
+ 'get_original_bases',
+ 'get_protocol_members',
+ 'get_type_hints',
+ 'IntVar',
+ 'is_protocol',
+ 'is_typeddict',
+ 'Literal',
+ 'NewType',
+ 'overload',
+ 'override',
+ 'Protocol',
+ 'reveal_type',
+ 'runtime',
+ 'runtime_checkable',
+ 'Text',
+ 'TypeAlias',
+ 'TypeAliasType',
+ 'TypeGuard',
+ 'TypeIs',
+ 'TYPE_CHECKING',
+ 'Never',
+ 'NoReturn',
+ 'ReadOnly',
+ 'Required',
+ 'NotRequired',
+
+ # Pure aliases, have always been in typing
+ 'AbstractSet',
+ 'AnyStr',
+ 'BinaryIO',
+ 'Callable',
+ 'Collection',
+ 'Container',
+ 'Dict',
+ 'ForwardRef',
+ 'FrozenSet',
+ 'Generator',
+ 'Generic',
+ 'Hashable',
+ 'IO',
+ 'ItemsView',
+ 'Iterable',
+ 'Iterator',
+ 'KeysView',
+ 'List',
+ 'Mapping',
+ 'MappingView',
+ 'Match',
+ 'MutableMapping',
+ 'MutableSequence',
+ 'MutableSet',
+ 'NoDefault',
+ 'Optional',
+ 'Pattern',
+ 'Reversible',
+ 'Sequence',
+ 'Set',
+ 'Sized',
+ 'TextIO',
+ 'Tuple',
+ 'Union',
+ 'ValuesView',
+ 'cast',
+ 'no_type_check',
+ 'no_type_check_decorator',
+]
+
+# for backward compatibility
+PEP_560 = True
+GenericMeta = type
+_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta")
+
+# The functions below are modified copies of typing internal helpers.
+# They are needed by _ProtocolMeta and they provide support for PEP 646.
+
+
+class _Sentinel:
+ def __repr__(self):
+ return "<sentinel>"
+
+
+_marker = _Sentinel()
+
+
+if sys.version_info >= (3, 10):
+ def _should_collect_from_parameters(t):
+ return isinstance(
+ t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
+ )
+elif sys.version_info >= (3, 9):
+ def _should_collect_from_parameters(t):
+ return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
+else:
+ def _should_collect_from_parameters(t):
+ return isinstance(t, typing._GenericAlias) and not t._special
+
+
+NoReturn = typing.NoReturn
+
+# Some unconstrained type variables. These are used by the container types.
+# (These are not for export.)
+T = typing.TypeVar('T') # Any type.
+KT = typing.TypeVar('KT') # Key type.
+VT = typing.TypeVar('VT') # Value type.
+T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
+T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
+
+
+if sys.version_info >= (3, 11):
+ from typing import Any
+else:
+
+ class _AnyMeta(type):
+ def __instancecheck__(self, obj):
+ if self is Any:
+ raise TypeError("typing_extensions.Any cannot be used with isinstance()")
+ return super().__instancecheck__(obj)
+
+ def __repr__(self):
+ if self is Any:
+ return "typing_extensions.Any"
+ return super().__repr__()
+
+ class Any(metaclass=_AnyMeta):
+ """Special type indicating an unconstrained type.
+ - Any is compatible with every type.
+ - Any assumed to have all methods.
+ - All values assumed to be instances of Any.
+ Note that all the above statements are true from the point of view of
+ static type checkers. At runtime, Any should not be used with instance
+ checks.
+ """
+ def __new__(cls, *args, **kwargs):
+ if cls is Any:
+ raise TypeError("Any cannot be instantiated")
+ return super().__new__(cls, *args, **kwargs)
+
+
+ClassVar = typing.ClassVar
+
+
+class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
+ def __repr__(self):
+ return 'typing_extensions.' + self._name
+
+
+Final = typing.Final
+
+if sys.version_info >= (3, 11):
+ final = typing.final
+else:
+ # @final exists in 3.8+, but we backport it for all versions
+ # before 3.11 to keep support for the __final__ attribute.
+ # See https://bugs.python.org/issue46342
+ def final(f):
+ """This decorator can be used to indicate to type checkers that
+ the decorated method cannot be overridden, and decorated class
+ cannot be subclassed. For example:
+
+ class Base:
+ @final
+ def done(self) -> None:
+ ...
+ class Sub(Base):
+ def done(self) -> None: # Error reported by type checker
+ ...
+ @final
+ class Leaf:
+ ...
+ class Other(Leaf): # Error reported by type checker
+ ...
+
+ There is no runtime checking of these properties. The decorator
+ sets the ``__final__`` attribute to ``True`` on the decorated object
+ to allow runtime introspection.
+ """
+ try:
+ f.__final__ = True
+ except (AttributeError, TypeError):
+ # Skip the attribute silently if it is not writable.
+ # AttributeError happens if the object has __slots__ or a
+ # read-only property, TypeError if it's a builtin class.
+ pass
+ return f
+
+
+def IntVar(name):
+ return typing.TypeVar(name)
+
+
+# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8
+if sys.version_info >= (3, 10, 1):
+ Literal = typing.Literal
+else:
+ def _flatten_literal_params(parameters):
+ """An internal helper for Literal creation: flatten Literals among parameters"""
+ params = []
+ for p in parameters:
+ if isinstance(p, _LiteralGenericAlias):
+ params.extend(p.__args__)
+ else:
+ params.append(p)
+ return tuple(params)
+
+ def _value_and_type_iter(params):
+ for p in params:
+ yield p, type(p)
+
+ class _LiteralGenericAlias(typing._GenericAlias, _root=True):
+ def __eq__(self, other):
+ if not isinstance(other, _LiteralGenericAlias):
+ return NotImplemented
+ these_args_deduped = set(_value_and_type_iter(self.__args__))
+ other_args_deduped = set(_value_and_type_iter(other.__args__))
+ return these_args_deduped == other_args_deduped
+
+ def __hash__(self):
+ return hash(frozenset(_value_and_type_iter(self.__args__)))
+
+ class _LiteralForm(_ExtensionsSpecialForm, _root=True):
+ def __init__(self, doc: str):
+ self._name = 'Literal'
+ self._doc = self.__doc__ = doc
+
+ def __getitem__(self, parameters):
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+
+ parameters = _flatten_literal_params(parameters)
+
+ val_type_pairs = list(_value_and_type_iter(parameters))
+ try:
+ deduped_pairs = set(val_type_pairs)
+ except TypeError:
+ # unhashable parameters
+ pass
+ else:
+ # similar logic to typing._deduplicate on Python 3.9+
+ if len(deduped_pairs) < len(val_type_pairs):
+ new_parameters = []
+ for pair in val_type_pairs:
+ if pair in deduped_pairs:
+ new_parameters.append(pair[0])
+ deduped_pairs.remove(pair)
+ assert not deduped_pairs, deduped_pairs
+ parameters = tuple(new_parameters)
+
+ return _LiteralGenericAlias(self, parameters)
+
+ Literal = _LiteralForm(doc="""\
+ A type that can be used to indicate to type checkers
+ that the corresponding value has a value literally equivalent
+ to the provided parameter. For example:
+
+ var: Literal[4] = 4
+
+ The type checker understands that 'var' is literally equal to
+ the value 4 and no other value.
+
+ Literal[...] cannot be subclassed. There is no runtime
+ checking verifying that the parameter is actually a value
+ instead of a type.""")
+
+
+_overload_dummy = typing._overload_dummy
+
+
+if hasattr(typing, "get_overloads"): # 3.11+
+ overload = typing.overload
+ get_overloads = typing.get_overloads
+ clear_overloads = typing.clear_overloads
+else:
+ # {module: {qualname: {firstlineno: func}}}
+ _overload_registry = collections.defaultdict(
+ functools.partial(collections.defaultdict, dict)
+ )
+
+ def overload(func):
+ """Decorator for overloaded functions/methods.
+
+ In a stub file, place two or more stub definitions for the same
+ function in a row, each decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+
+ In a non-stub file (i.e. a regular .py file), do the same but
+ follow it with an implementation. The implementation should *not*
+ be decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+ def utf8(value):
+ # implementation goes here
+
+ The overloads for a function can be retrieved at runtime using the
+ get_overloads() function.
+ """
+ # classmethod and staticmethod
+ f = getattr(func, "__func__", func)
+ try:
+ _overload_registry[f.__module__][f.__qualname__][
+ f.__code__.co_firstlineno
+ ] = func
+ except AttributeError:
+ # Not a normal function; ignore.
+ pass
+ return _overload_dummy
+
+ def get_overloads(func):
+ """Return all defined overloads for *func* as a sequence."""
+ # classmethod and staticmethod
+ f = getattr(func, "__func__", func)
+ if f.__module__ not in _overload_registry:
+ return []
+ mod_dict = _overload_registry[f.__module__]
+ if f.__qualname__ not in mod_dict:
+ return []
+ return list(mod_dict[f.__qualname__].values())
+
+ def clear_overloads():
+ """Clear all overloads in the registry."""
+ _overload_registry.clear()
+
+
+# This is not a real generic class. Don't use outside annotations.
+Type = typing.Type
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+Awaitable = typing.Awaitable
+Coroutine = typing.Coroutine
+AsyncIterable = typing.AsyncIterable
+AsyncIterator = typing.AsyncIterator
+Deque = typing.Deque
+DefaultDict = typing.DefaultDict
+OrderedDict = typing.OrderedDict
+Counter = typing.Counter
+ChainMap = typing.ChainMap
+Text = typing.Text
+TYPE_CHECKING = typing.TYPE_CHECKING
+
+
+if sys.version_info >= (3, 13, 0, "beta"):
+ from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator
+else:
+ def _is_dunder(attr):
+ return attr.startswith('__') and attr.endswith('__')
+
+ # Python <3.9 doesn't have typing._SpecialGenericAlias
+ _special_generic_alias_base = getattr(
+ typing, "_SpecialGenericAlias", typing._GenericAlias
+ )
+
+ class _SpecialGenericAlias(_special_generic_alias_base, _root=True):
+ def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()):
+ if _special_generic_alias_base is typing._GenericAlias:
+ # Python <3.9
+ self.__origin__ = origin
+ self._nparams = nparams
+ super().__init__(origin, nparams, special=True, inst=inst, name=name)
+ else:
+ # Python >= 3.9
+ super().__init__(origin, nparams, inst=inst, name=name)
+ self._defaults = defaults
+
+ def __setattr__(self, attr, val):
+ allowed_attrs = {'_name', '_inst', '_nparams', '_defaults'}
+ if _special_generic_alias_base is typing._GenericAlias:
+ # Python <3.9
+ allowed_attrs.add("__origin__")
+ if _is_dunder(attr) or attr in allowed_attrs:
+ object.__setattr__(self, attr, val)
+ else:
+ setattr(self.__origin__, attr, val)
+
+ @typing._tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ msg = "Parameters to generic types must be types."
+ params = tuple(typing._type_check(p, msg) for p in params)
+ if (
+ self._defaults
+ and len(params) < self._nparams
+ and len(params) + len(self._defaults) >= self._nparams
+ ):
+ params = (*params, *self._defaults[len(params) - self._nparams:])
+ actual_len = len(params)
+
+ if actual_len != self._nparams:
+ if self._defaults:
+ expected = f"at least {self._nparams - len(self._defaults)}"
+ else:
+ expected = str(self._nparams)
+ if not self._nparams:
+ raise TypeError(f"{self} is not a generic class")
+ raise TypeError(
+ f"Too {'many' if actual_len > self._nparams else 'few'}"
+ f" arguments for {self};"
+ f" actual {actual_len}, expected {expected}"
+ )
+ return self.copy_with(params)
+
+ _NoneType = type(None)
+ Generator = _SpecialGenericAlias(
+ collections.abc.Generator, 3, defaults=(_NoneType, _NoneType)
+ )
+ AsyncGenerator = _SpecialGenericAlias(
+ collections.abc.AsyncGenerator, 2, defaults=(_NoneType,)
+ )
+ ContextManager = _SpecialGenericAlias(
+ contextlib.AbstractContextManager,
+ 2,
+ name="ContextManager",
+ defaults=(typing.Optional[bool],)
+ )
+ AsyncContextManager = _SpecialGenericAlias(
+ contextlib.AbstractAsyncContextManager,
+ 2,
+ name="AsyncContextManager",
+ defaults=(typing.Optional[bool],)
+ )
+
+
+_PROTO_ALLOWLIST = {
+ 'collections.abc': [
+ 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
+ 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer',
+ ],
+ 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
+ 'typing_extensions': ['Buffer'],
+}
+
+
+_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | {
+ "__match_args__", "__protocol_attrs__", "__non_callable_proto_members__",
+ "__final__",
+}
+
+
+def _get_protocol_attrs(cls):
+ attrs = set()
+ for base in cls.__mro__[:-1]: # without object
+ if base.__name__ in {'Protocol', 'Generic'}:
+ continue
+ annotations = getattr(base, '__annotations__', {})
+ for attr in (*base.__dict__, *annotations):
+ if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS):
+ attrs.add(attr)
+ return attrs
+
+
+def _caller(depth=2):
+ try:
+ return sys._getframe(depth).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError): # For platforms without _getframe()
+ return None
+
+
+# `__match_args__` attribute was removed from protocol members in 3.13,
+# we want to backport this change to older Python versions.
+if sys.version_info >= (3, 13):
+ Protocol = typing.Protocol
+else:
+ def _allow_reckless_class_checks(depth=3):
+ """Allow instance and class checks for special stdlib modules.
+ The abc and functools modules indiscriminately call isinstance() and
+ issubclass() on the whole MRO of a user class, which may contain protocols.
+ """
+ return _caller(depth) in {'abc', 'functools', None}
+
+ def _no_init(self, *args, **kwargs):
+ if type(self)._is_protocol:
+ raise TypeError('Protocols cannot be instantiated')
+
+ def _type_check_issubclass_arg_1(arg):
+ """Raise TypeError if `arg` is not an instance of `type`
+ in `issubclass(arg, <protocol>)`.
+
+ In most cases, this is verified by type.__subclasscheck__.
+ Checking it again unnecessarily would slow down issubclass() checks,
+ so, we don't perform this check unless we absolutely have to.
+
+ For various error paths, however,
+ we want to ensure that *this* error message is shown to the user
+ where relevant, rather than a typing.py-specific error message.
+ """
+ if not isinstance(arg, type):
+ # Same error message as for issubclass(1, int).
+ raise TypeError('issubclass() arg 1 must be a class')
+
+ # Inheriting from typing._ProtocolMeta isn't actually desirable,
+ # but is necessary to allow typing.Protocol and typing_extensions.Protocol
+ # to mix without getting TypeErrors about "metaclass conflict"
+ class _ProtocolMeta(type(typing.Protocol)):
+ # This metaclass is somewhat unfortunate,
+ # but is necessary for several reasons...
+ #
+ # NOTE: DO NOT call super() in any methods in this class
+ # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11
+ # and those are slow
+ def __new__(mcls, name, bases, namespace, **kwargs):
+ if name == "Protocol" and len(bases) < 2:
+ pass
+ elif {Protocol, typing.Protocol} & set(bases):
+ for base in bases:
+ if not (
+ base in {object, typing.Generic, Protocol, typing.Protocol}
+ or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, [])
+ or is_protocol(base)
+ ):
+ raise TypeError(
+ f"Protocols can only inherit from other protocols, "
+ f"got {base!r}"
+ )
+ return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
+
+ def __init__(cls, *args, **kwargs):
+ abc.ABCMeta.__init__(cls, *args, **kwargs)
+ if getattr(cls, "_is_protocol", False):
+ cls.__protocol_attrs__ = _get_protocol_attrs(cls)
+
+ def __subclasscheck__(cls, other):
+ if cls is Protocol:
+ return type.__subclasscheck__(cls, other)
+ if (
+ getattr(cls, '_is_protocol', False)
+ and not _allow_reckless_class_checks()
+ ):
+ if not getattr(cls, '_is_runtime_protocol', False):
+ _type_check_issubclass_arg_1(other)
+ raise TypeError(
+ "Instance and class checks can only be used with "
+ "@runtime_checkable protocols"
+ )
+ if (
+ # this attribute is set by @runtime_checkable:
+ cls.__non_callable_proto_members__
+ and cls.__dict__.get("__subclasshook__") is _proto_hook
+ ):
+ _type_check_issubclass_arg_1(other)
+ non_method_attrs = sorted(cls.__non_callable_proto_members__)
+ raise TypeError(
+ "Protocols with non-method members don't support issubclass()."
+ f" Non-method members: {str(non_method_attrs)[1:-1]}."
+ )
+ return abc.ABCMeta.__subclasscheck__(cls, other)
+
+ def __instancecheck__(cls, instance):
+ # We need this method for situations where attributes are
+ # assigned in __init__.
+ if cls is Protocol:
+ return type.__instancecheck__(cls, instance)
+ if not getattr(cls, "_is_protocol", False):
+ # i.e., it's a concrete subclass of a protocol
+ return abc.ABCMeta.__instancecheck__(cls, instance)
+
+ if (
+ not getattr(cls, '_is_runtime_protocol', False) and
+ not _allow_reckless_class_checks()
+ ):
+ raise TypeError("Instance and class checks can only be used with"
+ " @runtime_checkable protocols")
+
+ if abc.ABCMeta.__instancecheck__(cls, instance):
+ return True
+
+ for attr in cls.__protocol_attrs__:
+ try:
+ val = inspect.getattr_static(instance, attr)
+ except AttributeError:
+ break
+ # this attribute is set by @runtime_checkable:
+ if val is None and attr not in cls.__non_callable_proto_members__:
+ break
+ else:
+ return True
+
+ return False
+
+ def __eq__(cls, other):
+ # Hack so that typing.Generic.__class_getitem__
+ # treats typing_extensions.Protocol
+ # as equivalent to typing.Protocol
+ if abc.ABCMeta.__eq__(cls, other) is True:
+ return True
+ return cls is Protocol and other is typing.Protocol
+
+ # This has to be defined, or the abc-module cache
+ # complains about classes with this metaclass being unhashable,
+ # if we define only __eq__!
+ def __hash__(cls) -> int:
+ return type.__hash__(cls)
+
+ @classmethod
+ def _proto_hook(cls, other):
+ if not cls.__dict__.get('_is_protocol', False):
+ return NotImplemented
+
+ for attr in cls.__protocol_attrs__:
+ for base in other.__mro__:
+ # Check if the members appears in the class dictionary...
+ if attr in base.__dict__:
+ if base.__dict__[attr] is None:
+ return NotImplemented
+ break
+
+ # ...or in annotations, if it is a sub-protocol.
+ annotations = getattr(base, '__annotations__', {})
+ if (
+ isinstance(annotations, collections.abc.Mapping)
+ and attr in annotations
+ and is_protocol(other)
+ ):
+ break
+ else:
+ return NotImplemented
+ return True
+
+ class Protocol(typing.Generic, metaclass=_ProtocolMeta):
+ __doc__ = typing.Protocol.__doc__
+ __slots__ = ()
+ _is_protocol = True
+ _is_runtime_protocol = False
+
+ def __init_subclass__(cls, *args, **kwargs):
+ super().__init_subclass__(*args, **kwargs)
+
+ # Determine if this is a protocol or a concrete subclass.
+ if not cls.__dict__.get('_is_protocol', False):
+ cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+ # Set (or override) the protocol subclass hook.
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = _proto_hook
+
+ # Prohibit instantiation for protocol classes
+ if cls._is_protocol and cls.__init__ is Protocol.__init__:
+ cls.__init__ = _no_init
+
+
+if sys.version_info >= (3, 13):
+ runtime_checkable = typing.runtime_checkable
+else:
+ def runtime_checkable(cls):
+ """Mark a protocol class as a runtime protocol.
+
+ Such protocol can be used with isinstance() and issubclass().
+ Raise TypeError if applied to a non-protocol class.
+ This allows a simple-minded structural check very similar to
+ one trick ponies in collections.abc such as Iterable.
+
+ For example::
+
+ @runtime_checkable
+ class Closable(Protocol):
+ def close(self): ...
+
+ assert isinstance(open('/some/file'), Closable)
+
+ Warning: this will check only the presence of the required methods,
+ not their type signatures!
+ """
+ if not issubclass(cls, typing.Generic) or not getattr(cls, '_is_protocol', False):
+ raise TypeError(f'@runtime_checkable can be only applied to protocol classes,'
+ f' got {cls!r}')
+ cls._is_runtime_protocol = True
+
+ # typing.Protocol classes on <=3.11 break if we execute this block,
+ # because typing.Protocol classes on <=3.11 don't have a
+ # `__protocol_attrs__` attribute, and this block relies on the
+ # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+
+ # break if we *don't* execute this block, because *they* assume that all
+ # protocol classes have a `__non_callable_proto_members__` attribute
+ # (which this block sets)
+ if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2):
+ # PEP 544 prohibits using issubclass()
+ # with protocols that have non-method members.
+ # See gh-113320 for why we compute this attribute here,
+ # rather than in `_ProtocolMeta.__init__`
+ cls.__non_callable_proto_members__ = set()
+ for attr in cls.__protocol_attrs__:
+ try:
+ is_callable = callable(getattr(cls, attr, None))
+ except Exception as e:
+ raise TypeError(
+ f"Failed to determine whether protocol member {attr!r} "
+ "is a method member"
+ ) from e
+ else:
+ if not is_callable:
+ cls.__non_callable_proto_members__.add(attr)
+
+ return cls
+
+
+# The "runtime" alias exists for backwards compatibility.
+runtime = runtime_checkable
+
+
+# Our version of runtime-checkable protocols is faster on Python 3.8-3.11
+if sys.version_info >= (3, 12):
+ SupportsInt = typing.SupportsInt
+ SupportsFloat = typing.SupportsFloat
+ SupportsComplex = typing.SupportsComplex
+ SupportsBytes = typing.SupportsBytes
+ SupportsIndex = typing.SupportsIndex
+ SupportsAbs = typing.SupportsAbs
+ SupportsRound = typing.SupportsRound
+else:
+ @runtime_checkable
+ class SupportsInt(Protocol):
+ """An ABC with one abstract method __int__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __int__(self) -> int:
+ pass
+
+ @runtime_checkable
+ class SupportsFloat(Protocol):
+ """An ABC with one abstract method __float__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __float__(self) -> float:
+ pass
+
+ @runtime_checkable
+ class SupportsComplex(Protocol):
+ """An ABC with one abstract method __complex__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __complex__(self) -> complex:
+ pass
+
+ @runtime_checkable
+ class SupportsBytes(Protocol):
+ """An ABC with one abstract method __bytes__."""
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __bytes__(self) -> bytes:
+ pass
+
+ @runtime_checkable
+ class SupportsIndex(Protocol):
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __index__(self) -> int:
+ pass
+
+ @runtime_checkable
+ class SupportsAbs(Protocol[T_co]):
+ """
+ An ABC with one abstract method __abs__ that is covariant in its return type.
+ """
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __abs__(self) -> T_co:
+ pass
+
+ @runtime_checkable
+ class SupportsRound(Protocol[T_co]):
+ """
+ An ABC with one abstract method __round__ that is covariant in its return type.
+ """
+ __slots__ = ()
+
+ @abc.abstractmethod
+ def __round__(self, ndigits: int = 0) -> T_co:
+ pass
+
+
+def _ensure_subclassable(mro_entries):
+ def inner(func):
+ if sys.implementation.name == "pypy" and sys.version_info < (3, 9):
+ cls_dict = {
+ "__call__": staticmethod(func),
+ "__mro_entries__": staticmethod(mro_entries)
+ }
+ t = type(func.__name__, (), cls_dict)
+ return functools.update_wrapper(t(), func)
+ else:
+ func.__mro_entries__ = mro_entries
+ return func
+ return inner
+
+
+# Update this to something like >=3.13.0b1 if and when
+# PEP 728 is implemented in CPython
+_PEP_728_IMPLEMENTED = False
+
+if _PEP_728_IMPLEMENTED:
+ # The standard library TypedDict in Python 3.8 does not store runtime information
+ # about which (if any) keys are optional. See https://bugs.python.org/issue38834
+ # The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
+ # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
+ # The standard library TypedDict below Python 3.11 does not store runtime
+ # information about optional and required keys when using Required or NotRequired.
+ # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
+ # Aaaand on 3.12 we add __orig_bases__ to TypedDict
+ # to enable better runtime introspection.
+ # On 3.13 we deprecate some odd ways of creating TypedDicts.
+ # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier.
+ # PEP 728 (still pending) makes more changes.
+ TypedDict = typing.TypedDict
+ _TypedDictMeta = typing._TypedDictMeta
+ is_typeddict = typing.is_typeddict
+else:
+ # 3.10.0 and later
+ _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters
+
+ def _get_typeddict_qualifiers(annotation_type):
+ while True:
+ annotation_origin = get_origin(annotation_type)
+ if annotation_origin is Annotated:
+ annotation_args = get_args(annotation_type)
+ if annotation_args:
+ annotation_type = annotation_args[0]
+ else:
+ break
+ elif annotation_origin is Required:
+ yield Required
+ annotation_type, = get_args(annotation_type)
+ elif annotation_origin is NotRequired:
+ yield NotRequired
+ annotation_type, = get_args(annotation_type)
+ elif annotation_origin is ReadOnly:
+ yield ReadOnly
+ annotation_type, = get_args(annotation_type)
+ else:
+ break
+
+ class _TypedDictMeta(type):
+ def __new__(cls, name, bases, ns, *, total=True, closed=False):
+ """Create new typed dict class object.
+
+ This method is called when TypedDict is subclassed,
+ or when TypedDict is instantiated. This way
+ TypedDict supports all three syntax forms described in its docstring.
+ Subclasses and instances of TypedDict return actual dictionaries.
+ """
+ for base in bases:
+ if type(base) is not _TypedDictMeta and base is not typing.Generic:
+ raise TypeError('cannot inherit from both a TypedDict type '
+ 'and a non-TypedDict base class')
+
+ if any(issubclass(b, typing.Generic) for b in bases):
+ generic_base = (typing.Generic,)
+ else:
+ generic_base = ()
+
+ # typing.py generally doesn't let you inherit from plain Generic, unless
+ # the name of the class happens to be "Protocol"
+ tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns)
+ tp_dict.__name__ = name
+ if tp_dict.__qualname__ == "Protocol":
+ tp_dict.__qualname__ = name
+
+ if not hasattr(tp_dict, '__orig_bases__'):
+ tp_dict.__orig_bases__ = bases
+
+ annotations = {}
+ if "__annotations__" in ns:
+ own_annotations = ns["__annotations__"]
+ elif "__annotate__" in ns:
+ # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+ own_annotations = ns["__annotate__"](1)
+ else:
+ own_annotations = {}
+ msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+ if _TAKES_MODULE:
+ own_annotations = {
+ n: typing._type_check(tp, msg, module=tp_dict.__module__)
+ for n, tp in own_annotations.items()
+ }
+ else:
+ own_annotations = {
+ n: typing._type_check(tp, msg)
+ for n, tp in own_annotations.items()
+ }
+ required_keys = set()
+ optional_keys = set()
+ readonly_keys = set()
+ mutable_keys = set()
+ extra_items_type = None
+
+ for base in bases:
+ base_dict = base.__dict__
+
+ annotations.update(base_dict.get('__annotations__', {}))
+ required_keys.update(base_dict.get('__required_keys__', ()))
+ optional_keys.update(base_dict.get('__optional_keys__', ()))
+ readonly_keys.update(base_dict.get('__readonly_keys__', ()))
+ mutable_keys.update(base_dict.get('__mutable_keys__', ()))
+ base_extra_items_type = base_dict.get('__extra_items__', None)
+ if base_extra_items_type is not None:
+ extra_items_type = base_extra_items_type
+
+ if closed and extra_items_type is None:
+ extra_items_type = Never
+ if closed and "__extra_items__" in own_annotations:
+ annotation_type = own_annotations.pop("__extra_items__")
+ qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+ if Required in qualifiers:
+ raise TypeError(
+ "Special key __extra_items__ does not support "
+ "Required"
+ )
+ if NotRequired in qualifiers:
+ raise TypeError(
+ "Special key __extra_items__ does not support "
+ "NotRequired"
+ )
+ extra_items_type = annotation_type
+
+ annotations.update(own_annotations)
+ for annotation_key, annotation_type in own_annotations.items():
+ qualifiers = set(_get_typeddict_qualifiers(annotation_type))
+
+ if Required in qualifiers:
+ required_keys.add(annotation_key)
+ elif NotRequired in qualifiers:
+ optional_keys.add(annotation_key)
+ elif total:
+ required_keys.add(annotation_key)
+ else:
+ optional_keys.add(annotation_key)
+ if ReadOnly in qualifiers:
+ mutable_keys.discard(annotation_key)
+ readonly_keys.add(annotation_key)
+ else:
+ mutable_keys.add(annotation_key)
+ readonly_keys.discard(annotation_key)
+
+ tp_dict.__annotations__ = annotations
+ tp_dict.__required_keys__ = frozenset(required_keys)
+ tp_dict.__optional_keys__ = frozenset(optional_keys)
+ tp_dict.__readonly_keys__ = frozenset(readonly_keys)
+ tp_dict.__mutable_keys__ = frozenset(mutable_keys)
+ if not hasattr(tp_dict, '__total__'):
+ tp_dict.__total__ = total
+ tp_dict.__closed__ = closed
+ tp_dict.__extra_items__ = extra_items_type
+ return tp_dict
+
+ __call__ = dict # static method
+
+ def __subclasscheck__(cls, other):
+ # Typed dicts are only for static structural subtyping.
+ raise TypeError('TypedDict does not support instance and class checks')
+
+ __instancecheck__ = __subclasscheck__
+
+ _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+
+ @_ensure_subclassable(lambda bases: (_TypedDict,))
+ def TypedDict(typename, fields=_marker, /, *, total=True, closed=False, **kwargs):
+ """A simple typed namespace. At runtime it is equivalent to a plain dict.
+
+ TypedDict creates a dictionary type such that a type checker will expect all
+ instances to have a certain set of keys, where each key is
+ associated with a value of a consistent type. This expectation
+ is not checked at runtime.
+
+ Usage::
+
+ class Point2D(TypedDict):
+ x: int
+ y: int
+ label: str
+
+ a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
+ b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
+
+ assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+ The type info can be accessed via the Point2D.__annotations__ dict, and
+ the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+ TypedDict supports an additional equivalent form::
+
+ Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+ By default, all keys must be present in a TypedDict. It is possible
+ to override this by specifying totality::
+
+ class Point2D(TypedDict, total=False):
+ x: int
+ y: int
+
+ This means that a Point2D TypedDict can have any of the keys omitted. A type
+ checker is only expected to support a literal False or True as the value of
+ the total argument. True is the default, and makes all items defined in the
+ class body be required.
+
+ The Required and NotRequired special forms can also be used to mark
+ individual keys as being required or not required::
+
+ class Point2D(TypedDict):
+ x: int # the "x" key must always be present (Required is the default)
+ y: NotRequired[int] # the "y" key can be omitted
+
+ See PEP 655 for more details on Required and NotRequired.
+ """
+ if fields is _marker or fields is None:
+ if fields is _marker:
+ deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+ else:
+ deprecated_thing = "Passing `None` as the 'fields' parameter"
+
+ example = f"`{typename} = TypedDict({typename!r}, {{}})`"
+ deprecation_msg = (
+ f"{deprecated_thing} is deprecated and will be disallowed in "
+ "Python 3.15. To create a TypedDict class with 0 fields "
+ "using the functional syntax, pass an empty dictionary, e.g. "
+ ) + example + "."
+ warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2)
+ if closed is not False and closed is not True:
+ kwargs["closed"] = closed
+ closed = False
+ fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+ if kwargs:
+ if sys.version_info >= (3, 13):
+ raise TypeError("TypedDict takes no keyword arguments")
+ warnings.warn(
+ "The kwargs-based syntax for TypedDict definitions is deprecated "
+ "in Python 3.11, will be removed in Python 3.13, and may not be "
+ "understood by third-party type checkers.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ ns = {'__annotations__': dict(fields)}
+ module = _caller()
+ if module is not None:
+ # Setting correct module is necessary to make typed dict classes pickleable.
+ ns['__module__'] = module
+
+ td = _TypedDictMeta(typename, (), ns, total=total, closed=closed)
+ td.__orig_bases__ = (TypedDict,)
+ return td
+
+ if hasattr(typing, "_TypedDictMeta"):
+ _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
+ else:
+ _TYPEDDICT_TYPES = (_TypedDictMeta,)
+
+ def is_typeddict(tp):
+ """Check if an annotation is a TypedDict class
+
+ For example::
+ class Film(TypedDict):
+ title: str
+ year: int
+
+ is_typeddict(Film) # => True
+ is_typeddict(Union[list, str]) # => False
+ """
+ # On 3.8, this would otherwise return True
+ if hasattr(typing, "TypedDict") and tp is typing.TypedDict:
+ return False
+ return isinstance(tp, _TYPEDDICT_TYPES)
+
+
+if hasattr(typing, "assert_type"):
+ assert_type = typing.assert_type
+
+else:
+ def assert_type(val, typ, /):
+ """Assert (to the type checker) that the value is of the given type.
+
+ When the type checker encounters a call to assert_type(), it
+ emits an error if the value is not of the specified type::
+
+ def greet(name: str) -> None:
+ assert_type(name, str) # ok
+ assert_type(name, int) # type checker error
+
+ At runtime this returns the first argument unchanged and otherwise
+ does nothing.
+ """
+ return val
+
+
+if hasattr(typing, "ReadOnly"): # 3.13+
+ get_type_hints = typing.get_type_hints
+else: # <=3.13
+ # replaces _strip_annotations()
+ def _strip_extras(t):
+ """Strips Annotated, Required and NotRequired from a given type."""
+ if isinstance(t, _AnnotatedAlias):
+ return _strip_extras(t.__origin__)
+ if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired, ReadOnly):
+ return _strip_extras(t.__args__[0])
+ if isinstance(t, typing._GenericAlias):
+ stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return t.copy_with(stripped_args)
+ if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias):
+ stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return _types.GenericAlias(t.__origin__, stripped_args)
+ if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType):
+ stripped_args = tuple(_strip_extras(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return functools.reduce(operator.or_, stripped_args)
+
+ return t
+
+ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+ """Return type hints for an object.
+
+ This is often the same as obj.__annotations__, but it handles
+ forward references encoded as string literals, adds Optional[t] if a
+ default value equal to None is set and recursively replaces all
+ 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
+ (unless 'include_extras=True').
+
+ The argument may be a module, class, method, or function. The annotations
+ are returned as a dictionary. For classes, annotations include also
+ inherited members.
+
+ TypeError is raised if the argument is not of a type that can contain
+ annotations, and an empty dictionary is returned if no annotations are
+ present.
+
+ BEWARE -- the behavior of globalns and localns is counterintuitive
+ (unless you are familiar with how eval() and exec() work). The
+ search order is locals first, then globals.
+
+ - If no dict arguments are passed, an attempt is made to use the
+ globals from obj (or the respective module's globals for classes),
+ and these are also used as the locals. If the object does not appear
+ to have globals, an empty dictionary is used.
+
+ - If one dict argument is passed, it is used for both globals and
+ locals.
+
+ - If two dict arguments are passed, they specify globals and
+ locals, respectively.
+ """
+ if hasattr(typing, "Annotated"): # 3.9+
+ hint = typing.get_type_hints(
+ obj, globalns=globalns, localns=localns, include_extras=True
+ )
+ else: # 3.8
+ hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
+ if include_extras:
+ return hint
+ return {k: _strip_extras(t) for k, t in hint.items()}
+
+
+# Python 3.9+ has PEP 593 (Annotated)
+if hasattr(typing, 'Annotated'):
+ Annotated = typing.Annotated
+ # Not exported and not a public API, but needed for get_origin() and get_args()
+ # to work.
+ _AnnotatedAlias = typing._AnnotatedAlias
+# 3.8
+else:
+ class _AnnotatedAlias(typing._GenericAlias, _root=True):
+ """Runtime representation of an annotated type.
+
+ At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+ with extra annotations. The alias behaves like a normal typing alias,
+ instantiating is the same as instantiating the underlying type, binding
+ it to types is also the same.
+ """
+ def __init__(self, origin, metadata):
+ if isinstance(origin, _AnnotatedAlias):
+ metadata = origin.__metadata__ + metadata
+ origin = origin.__origin__
+ super().__init__(origin, origin)
+ self.__metadata__ = metadata
+
+ def copy_with(self, params):
+ assert len(params) == 1
+ new_type = params[0]
+ return _AnnotatedAlias(new_type, self.__metadata__)
+
+ def __repr__(self):
+ return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
+ f"{', '.join(repr(a) for a in self.__metadata__)}]")
+
+ def __reduce__(self):
+ return operator.getitem, (
+ Annotated, (self.__origin__, *self.__metadata__)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, _AnnotatedAlias):
+ return NotImplemented
+ if self.__origin__ != other.__origin__:
+ return False
+ return self.__metadata__ == other.__metadata__
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__metadata__))
+
+ class Annotated:
+ """Add context specific metadata to a type.
+
+ Example: Annotated[int, runtime_check.Unsigned] indicates to the
+ hypothetical runtime_check module that this type is an unsigned int.
+ Every other consumer of this type can ignore this metadata and treat
+ this type as int.
+
+ The first argument to Annotated must be a valid type (and will be in
+ the __origin__ field), the remaining arguments are kept as a tuple in
+ the __extra__ field.
+
+ Details:
+
+ - It's an error to call `Annotated` with less than two arguments.
+ - Nested Annotated are flattened::
+
+ Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+ - Instantiating an annotated type is equivalent to instantiating the
+ underlying type::
+
+ Annotated[C, Ann1](5) == C(5)
+
+ - Annotated can be used as a generic type alias::
+
+ Optimized = Annotated[T, runtime.Optimize()]
+ Optimized[int] == Annotated[int, runtime.Optimize()]
+
+ OptimizedList = Annotated[List[T], runtime.Optimize()]
+ OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise TypeError("Type Annotated cannot be instantiated.")
+
+ @typing._tp_cache
+ def __class_getitem__(cls, params):
+ if not isinstance(params, tuple) or len(params) < 2:
+ raise TypeError("Annotated[...] should be used "
+ "with at least two arguments (a type and an "
+ "annotation).")
+ allowed_special_forms = (ClassVar, Final)
+ if get_origin(params[0]) in allowed_special_forms:
+ origin = params[0]
+ else:
+ msg = "Annotated[t, ...]: t must be a type."
+ origin = typing._type_check(params[0], msg)
+ metadata = tuple(params[1:])
+ return _AnnotatedAlias(origin, metadata)
+
+ def __init_subclass__(cls, *args, **kwargs):
+ raise TypeError(
+ f"Cannot subclass {cls.__module__}.Annotated"
+ )
+
+# Python 3.8 has get_origin() and get_args() but those implementations aren't
+# Annotated-aware, so we can't use those. Python 3.9's versions don't support
+# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
+if sys.version_info[:2] >= (3, 10):
+ get_origin = typing.get_origin
+ get_args = typing.get_args
+# 3.8-3.9
+else:
+ try:
+ # 3.9+
+ from typing import _BaseGenericAlias
+ except ImportError:
+ _BaseGenericAlias = typing._GenericAlias
+ try:
+ # 3.9+
+ from typing import GenericAlias as _typing_GenericAlias
+ except ImportError:
+ _typing_GenericAlias = typing._GenericAlias
+
+ def get_origin(tp):
+ """Get the unsubscripted version of a type.
+
+ This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+ and Annotated. Return None for unsupported types. Examples::
+
+ get_origin(Literal[42]) is Literal
+ get_origin(int) is None
+ get_origin(ClassVar[int]) is ClassVar
+ get_origin(Generic) is Generic
+ get_origin(Generic[T]) is Generic
+ get_origin(Union[T, int]) is Union
+ get_origin(List[Tuple[T, T]][int]) == list
+ get_origin(P.args) is P
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return Annotated
+ if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
+ ParamSpecArgs, ParamSpecKwargs)):
+ return tp.__origin__
+ if tp is typing.Generic:
+ return typing.Generic
+ return None
+
+ def get_args(tp):
+ """Get type arguments with all substitutions performed.
+
+ For unions, basic simplifications used by Union constructor are performed.
+ Examples::
+ get_args(Dict[str, int]) == (str, int)
+ get_args(int) == ()
+ get_args(Union[int, Union[T, int], str][int]) == (int, str)
+ get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+ get_args(Callable[[], T][int]) == ([], int)
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return (tp.__origin__, *tp.__metadata__)
+ if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
+ if getattr(tp, "_special", False):
+ return ()
+ res = tp.__args__
+ if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
+ res = (list(res[:-1]), res[-1])
+ return res
+ return ()
+
+
+# 3.10+
+if hasattr(typing, 'TypeAlias'):
+ TypeAlias = typing.TypeAlias
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ @_ExtensionsSpecialForm
+ def TypeAlias(self, parameters):
+ """Special marker indicating that an assignment should
+ be recognized as a proper type alias definition by type
+ checkers.
+
+ For example::
+
+ Predicate: TypeAlias = Callable[..., bool]
+
+ It's invalid when used anywhere except as in the example above.
+ """
+ raise TypeError(f"{self} is not subscriptable")
+# 3.8
+else:
+ TypeAlias = _ExtensionsSpecialForm(
+ 'TypeAlias',
+ doc="""Special marker indicating that an assignment should
+ be recognized as a proper type alias definition by type
+ checkers.
+
+ For example::
+
+ Predicate: TypeAlias = Callable[..., bool]
+
+ It's invalid when used anywhere except as in the example
+ above."""
+ )
+
+
+if hasattr(typing, "NoDefault"):
+ NoDefault = typing.NoDefault
+else:
+ class NoDefaultTypeMeta(type):
+ def __setattr__(cls, attr, value):
+ # TypeError is consistent with the behavior of NoneType
+ raise TypeError(
+ f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}"
+ )
+
+ class NoDefaultType(metaclass=NoDefaultTypeMeta):
+ """The type of the NoDefault singleton."""
+
+ __slots__ = ()
+
+ def __new__(cls):
+ return globals().get("NoDefault") or object.__new__(cls)
+
+ def __repr__(self):
+ return "typing_extensions.NoDefault"
+
+ def __reduce__(self):
+ return "NoDefault"
+
+ NoDefault = NoDefaultType()
+ del NoDefaultType, NoDefaultTypeMeta
+
+
+def _set_default(type_param, default):
+ type_param.has_default = lambda: default is not NoDefault
+ type_param.__default__ = default
+
+
+def _set_module(typevarlike):
+ # for pickling:
+ def_mod = _caller(depth=3)
+ if def_mod != 'typing_extensions':
+ typevarlike.__module__ = def_mod
+
+
+class _DefaultMixin:
+ """Mixin for TypeVarLike defaults."""
+
+ __slots__ = ()
+ __init__ = _set_default
+
+
+# Classes using this metaclass must provide a _backported_typevarlike ClassVar
+class _TypeVarLikeMeta(type):
+ def __instancecheck__(cls, __instance: Any) -> bool:
+ return isinstance(__instance, cls._backported_typevarlike)
+
+
+if _PEP_696_IMPLEMENTED:
+ from typing import TypeVar
+else:
+ # Add default and infer_variance parameters from PEP 696 and 695
+ class TypeVar(metaclass=_TypeVarLikeMeta):
+ """Type variable."""
+
+ _backported_typevarlike = typing.TypeVar
+
+ def __new__(cls, name, *constraints, bound=None,
+ covariant=False, contravariant=False,
+ default=NoDefault, infer_variance=False):
+ if hasattr(typing, "TypeAliasType"):
+ # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant,
+ infer_variance=infer_variance)
+ else:
+ typevar = typing.TypeVar(name, *constraints, bound=bound,
+ covariant=covariant, contravariant=contravariant)
+ if infer_variance and (covariant or contravariant):
+ raise ValueError("Variance cannot be specified with infer_variance.")
+ typevar.__infer_variance__ = infer_variance
+
+ _set_default(typevar, default)
+ _set_module(typevar)
+
+ def _tvar_prepare_subst(alias, args):
+ if (
+ typevar.has_default()
+ and alias.__parameters__.index(typevar) == len(args)
+ ):
+ args += (typevar.__default__,)
+ return args
+
+ typevar.__typing_prepare_subst__ = _tvar_prepare_subst
+ return typevar
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type")
+
+
+# Python 3.10+ has PEP 612
+if hasattr(typing, 'ParamSpecArgs'):
+ ParamSpecArgs = typing.ParamSpecArgs
+ ParamSpecKwargs = typing.ParamSpecKwargs
+# 3.8-3.9
+else:
+ class _Immutable:
+ """Mixin to indicate that object should not be copied."""
+ __slots__ = ()
+
+ def __copy__(self):
+ return self
+
+ def __deepcopy__(self, memo):
+ return self
+
+ class ParamSpecArgs(_Immutable):
+ """The args for a ParamSpec object.
+
+ Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
+
+ ParamSpecArgs objects have a reference back to their ParamSpec:
+
+ P.args.__origin__ is P
+
+ This type is meant for runtime introspection and has no special meaning to
+ static type checkers.
+ """
+ def __init__(self, origin):
+ self.__origin__ = origin
+
+ def __repr__(self):
+ return f"{self.__origin__.__name__}.args"
+
+ def __eq__(self, other):
+ if not isinstance(other, ParamSpecArgs):
+ return NotImplemented
+ return self.__origin__ == other.__origin__
+
+ class ParamSpecKwargs(_Immutable):
+ """The kwargs for a ParamSpec object.
+
+ Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
+
+ ParamSpecKwargs objects have a reference back to their ParamSpec:
+
+ P.kwargs.__origin__ is P
+
+ This type is meant for runtime introspection and has no special meaning to
+ static type checkers.
+ """
+ def __init__(self, origin):
+ self.__origin__ = origin
+
+ def __repr__(self):
+ return f"{self.__origin__.__name__}.kwargs"
+
+ def __eq__(self, other):
+ if not isinstance(other, ParamSpecKwargs):
+ return NotImplemented
+ return self.__origin__ == other.__origin__
+
+
+if _PEP_696_IMPLEMENTED:
+ from typing import ParamSpec
+
+# 3.10+
+elif hasattr(typing, 'ParamSpec'):
+
+ # Add default parameter - PEP 696
+ class ParamSpec(metaclass=_TypeVarLikeMeta):
+ """Parameter specification."""
+
+ _backported_typevarlike = typing.ParamSpec
+
+ def __new__(cls, name, *, bound=None,
+ covariant=False, contravariant=False,
+ infer_variance=False, default=NoDefault):
+ if hasattr(typing, "TypeAliasType"):
+ # PEP 695 implemented, can pass infer_variance to typing.TypeVar
+ paramspec = typing.ParamSpec(name, bound=bound,
+ covariant=covariant,
+ contravariant=contravariant,
+ infer_variance=infer_variance)
+ else:
+ paramspec = typing.ParamSpec(name, bound=bound,
+ covariant=covariant,
+ contravariant=contravariant)
+ paramspec.__infer_variance__ = infer_variance
+
+ _set_default(paramspec, default)
+ _set_module(paramspec)
+
+ def _paramspec_prepare_subst(alias, args):
+ params = alias.__parameters__
+ i = params.index(paramspec)
+ if i == len(args) and paramspec.has_default():
+ args = [*args, paramspec.__default__]
+ if i >= len(args):
+ raise TypeError(f"Too few arguments for {alias}")
+ # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612.
+ if len(params) == 1 and not typing._is_param_expr(args[0]):
+ assert i == 0
+ args = (args,)
+ # Convert lists to tuples to help other libraries cache the results.
+ elif isinstance(args[i], list):
+ args = (*args[:i], tuple(args[i]), *args[i + 1:])
+ return args
+
+ paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst
+ return paramspec
+
+ def __init_subclass__(cls) -> None:
+ raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type")
+
+# 3.8-3.9
+else:
+
+ # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+ class ParamSpec(list, _DefaultMixin):
+ """Parameter specification variable.
+
+ Usage::
+
+ P = ParamSpec('P')
+
+ Parameter specification variables exist primarily for the benefit of static
+ type checkers. They are used to forward the parameter types of one
+ callable to another callable, a pattern commonly found in higher order
+ functions and decorators. They are only valid when used in ``Concatenate``,
+ or s the first argument to ``Callable``. In Python 3.10 and higher,
+ they are also supported in user-defined Generics at runtime.
+ See class Generic for more information on generic types. An
+ example for annotating a decorator::
+
+ T = TypeVar('T')
+ P = ParamSpec('P')
+
+ def add_logging(f: Callable[P, T]) -> Callable[P, T]:
+ '''A type-safe decorator to add logging to a function.'''
+ def inner(*args: P.args, **kwargs: P.kwargs) -> T:
+ logging.info(f'{f.__name__} was called')
+ return f(*args, **kwargs)
+ return inner
+
+ @add_logging
+ def add_two(x: float, y: float) -> float:
+ '''Add two numbers together.'''
+ return x + y
+
+ Parameter specification variables defined with covariant=True or
+ contravariant=True can be used to declare covariant or contravariant
+ generic types. These keyword arguments are valid, but their actual semantics
+ are yet to be decided. See PEP 612 for details.
+
+ Parameter specification variables can be introspected. e.g.:
+
+ P.__name__ == 'T'
+ P.__bound__ == None
+ P.__covariant__ == False
+ P.__contravariant__ == False
+
+ Note that only parameter specification variables defined in global scope can
+ be pickled.
+ """
+
+ # Trick Generic __parameters__.
+ __class__ = typing.TypeVar
+
+ @property
+ def args(self):
+ return ParamSpecArgs(self)
+
+ @property
+ def kwargs(self):
+ return ParamSpecKwargs(self)
+
+ def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
+ infer_variance=False, default=NoDefault):
+ list.__init__(self, [self])
+ self.__name__ = name
+ self.__covariant__ = bool(covariant)
+ self.__contravariant__ = bool(contravariant)
+ self.__infer_variance__ = bool(infer_variance)
+ if bound:
+ self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
+ else:
+ self.__bound__ = None
+ _DefaultMixin.__init__(self, default)
+
+ # for pickling:
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ def __repr__(self):
+ if self.__infer_variance__:
+ prefix = ''
+ elif self.__covariant__:
+ prefix = '+'
+ elif self.__contravariant__:
+ prefix = '-'
+ else:
+ prefix = '~'
+ return prefix + self.__name__
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ def __eq__(self, other):
+ return self is other
+
+ def __reduce__(self):
+ return self.__name__
+
+ # Hack to get typing._type_check to pass.
+ def __call__(self, *args, **kwargs):
+ pass
+
+
+# 3.8-3.9
+if not hasattr(typing, 'Concatenate'):
+ # Inherits from list as a workaround for Callable checks in Python < 3.9.2.
+ class _ConcatenateGenericAlias(list):
+
+ # Trick Generic into looking into this for __parameters__.
+ __class__ = typing._GenericAlias
+
+ # Flag in 3.8.
+ _special = False
+
+ def __init__(self, origin, args):
+ super().__init__(args)
+ self.__origin__ = origin
+ self.__args__ = args
+
+ def __repr__(self):
+ _type_repr = typing._type_repr
+ return (f'{_type_repr(self.__origin__)}'
+ f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__args__))
+
+ # Hack to get typing._type_check to pass in Generic.
+ def __call__(self, *args, **kwargs):
+ pass
+
+ @property
+ def __parameters__(self):
+ return tuple(
+ tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
+ )
+
+
+# 3.8-3.9
+@typing._tp_cache
+def _concatenate_getitem(self, parameters):
+ if parameters == ():
+ raise TypeError("Cannot take a Concatenate of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if not isinstance(parameters[-1], ParamSpec):
+ raise TypeError("The last parameter to Concatenate should be a "
+ "ParamSpec variable.")
+ msg = "Concatenate[arg, ...]: each arg must be a type."
+ parameters = tuple(typing._type_check(p, msg) for p in parameters)
+ return _ConcatenateGenericAlias(self, parameters)
+
+
+# 3.10+
+if hasattr(typing, 'Concatenate'):
+ Concatenate = typing.Concatenate
+ _ConcatenateGenericAlias = typing._ConcatenateGenericAlias
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ @_ExtensionsSpecialForm
+ def Concatenate(self, parameters):
+ """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+ higher order function which adds, removes or transforms parameters of a
+ callable.
+
+ For example::
+
+ Callable[Concatenate[int, P], int]
+
+ See PEP 612 for detailed information.
+ """
+ return _concatenate_getitem(self, parameters)
+# 3.8
+else:
+ class _ConcatenateForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ return _concatenate_getitem(self, parameters)
+
+ Concatenate = _ConcatenateForm(
+ 'Concatenate',
+ doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
+ higher order function which adds, removes or transforms parameters of a
+ callable.
+
+ For example::
+
+ Callable[Concatenate[int, P], int]
+
+ See PEP 612 for detailed information.
+ """)
+
+# 3.10+
+if hasattr(typing, 'TypeGuard'):
+ TypeGuard = typing.TypeGuard
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ @_ExtensionsSpecialForm
+ def TypeGuard(self, parameters):
+ """Special typing form used to annotate the return type of a user-defined
+ type guard function. ``TypeGuard`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeGuard[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeGuard`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the type inside ``TypeGuard``.
+
+ For example::
+
+ def is_str(val: Union[str, float]):
+ # "isinstance" type guard
+ if isinstance(val, str):
+ # Type of ``val`` is narrowed to ``str``
+ ...
+ else:
+ # Else, type of ``val`` is narrowed to ``float``.
+ ...
+
+ Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+ form of ``TypeA`` (it can even be a wider form) and this may lead to
+ type-unsafe results. The main reason is to allow for things like
+ narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+ a subtype of the former, since ``List`` is invariant. The responsibility of
+ writing type-safe type guards is left to the user.
+
+ ``TypeGuard`` also works with type variables. For more information, see
+ PEP 647 (User-Defined Type Guards).
+ """
+ item = typing._type_check(parameters, f'{self} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+ class _TypeGuardForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type')
+ return typing._GenericAlias(self, (item,))
+
+ TypeGuard = _TypeGuardForm(
+ 'TypeGuard',
+ doc="""Special typing form used to annotate the return type of a user-defined
+ type guard function. ``TypeGuard`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeGuard[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeGuard`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the type inside ``TypeGuard``.
+
+ For example::
+
+ def is_str(val: Union[str, float]):
+ # "isinstance" type guard
+ if isinstance(val, str):
+ # Type of ``val`` is narrowed to ``str``
+ ...
+ else:
+ # Else, type of ``val`` is narrowed to ``float``.
+ ...
+
+ Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
+ form of ``TypeA`` (it can even be a wider form) and this may lead to
+ type-unsafe results. The main reason is to allow for things like
+ narrowing ``List[object]`` to ``List[str]`` even though the latter is not
+ a subtype of the former, since ``List`` is invariant. The responsibility of
+ writing type-safe type guards is left to the user.
+
+ ``TypeGuard`` also works with type variables. For more information, see
+ PEP 647 (User-Defined Type Guards).
+ """)
+
+# 3.13+
+if hasattr(typing, 'TypeIs'):
+ TypeIs = typing.TypeIs
+# 3.9
+elif sys.version_info[:2] >= (3, 9):
+ @_ExtensionsSpecialForm
+ def TypeIs(self, parameters):
+ """Special typing form used to annotate the return type of a user-defined
+ type narrower function. ``TypeIs`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeIs[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeIs`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the intersection of the type inside ``TypeGuard`` and the argument's
+ previously known type.
+
+ For example::
+
+ def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+ return hasattr(val, '__await__')
+
+ def f(val: Union[int, Awaitable[int]]) -> int:
+ if is_awaitable(val):
+ assert_type(val, Awaitable[int])
+ else:
+ assert_type(val, int)
+
+ ``TypeIs`` also works with type variables. For more information, see
+ PEP 742 (Narrowing types with TypeIs).
+ """
+ item = typing._type_check(parameters, f'{self} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+# 3.8
+else:
+ class _TypeIsForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type')
+ return typing._GenericAlias(self, (item,))
+
+ TypeIs = _TypeIsForm(
+ 'TypeIs',
+ doc="""Special typing form used to annotate the return type of a user-defined
+ type narrower function. ``TypeIs`` only accepts a single type argument.
+ At runtime, functions marked this way should return a boolean.
+
+ ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static
+ type checkers to determine a more precise type of an expression within a
+ program's code flow. Usually type narrowing is done by analyzing
+ conditional code flow and applying the narrowing to a block of code. The
+ conditional expression here is sometimes referred to as a "type guard".
+
+ Sometimes it would be convenient to use a user-defined boolean function
+ as a type guard. Such a function should use ``TypeIs[...]`` as its
+ return type to alert static type checkers to this intention.
+
+ Using ``-> TypeIs`` tells the static type checker that for a given
+ function:
+
+ 1. The return value is a boolean.
+ 2. If the return value is ``True``, the type of its argument
+ is the intersection of the type inside ``TypeGuard`` and the argument's
+ previously known type.
+
+ For example::
+
+ def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]:
+ return hasattr(val, '__await__')
+
+ def f(val: Union[int, Awaitable[int]]) -> int:
+ if is_awaitable(val):
+ assert_type(val, Awaitable[int])
+ else:
+ assert_type(val, int)
+
+ ``TypeIs`` also works with type variables. For more information, see
+ PEP 742 (Narrowing types with TypeIs).
+ """)
+
+
+# Vendored from cpython typing._SpecialFrom
+class _SpecialForm(typing._Final, _root=True):
+ __slots__ = ('_name', '__doc__', '_getitem')
+
+ def __init__(self, getitem):
+ self._getitem = getitem
+ self._name = getitem.__name__
+ self.__doc__ = getitem.__doc__
+
+ def __getattr__(self, item):
+ if item in {'__name__', '__qualname__'}:
+ return self._name
+
+ raise AttributeError(item)
+
+ def __mro_entries__(self, bases):
+ raise TypeError(f"Cannot subclass {self!r}")
+
+ def __repr__(self):
+ return f'typing_extensions.{self._name}'
+
+ def __reduce__(self):
+ return self._name
+
+ def __call__(self, *args, **kwds):
+ raise TypeError(f"Cannot instantiate {self!r}")
+
+ def __or__(self, other):
+ return typing.Union[self, other]
+
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+ def __instancecheck__(self, obj):
+ raise TypeError(f"{self} cannot be used with isinstance()")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError(f"{self} cannot be used with issubclass()")
+
+ @typing._tp_cache
+ def __getitem__(self, parameters):
+ return self._getitem(self, parameters)
+
+
+if hasattr(typing, "LiteralString"): # 3.11+
+ LiteralString = typing.LiteralString
+else:
+ @_SpecialForm
+ def LiteralString(self, params):
+ """Represents an arbitrary literal string.
+
+ Example::
+
+ from typing_extensions import LiteralString
+
+ def query(sql: LiteralString) -> ...:
+ ...
+
+ query("SELECT * FROM table") # ok
+ query(f"SELECT * FROM {input()}") # not ok
+
+ See PEP 675 for details.
+
+ """
+ raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Self"): # 3.11+
+ Self = typing.Self
+else:
+ @_SpecialForm
+ def Self(self, params):
+ """Used to spell the type of "self" in classes.
+
+ Example::
+
+ from typing import Self
+
+ class ReturnsSelf:
+ def parse(self, data: bytes) -> Self:
+ ...
+ return self
+
+ """
+
+ raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, "Never"): # 3.11+
+ Never = typing.Never
+else:
+ @_SpecialForm
+ def Never(self, params):
+ """The bottom type, a type that has no members.
+
+ This can be used to define a function that should never be
+ called, or a function that never returns::
+
+ from typing_extensions import Never
+
+ def never_call_me(arg: Never) -> None:
+ pass
+
+ def int_or_str(arg: int | str) -> None:
+ never_call_me(arg) # type checker error
+ match arg:
+ case int():
+ print("It's an int")
+ case str():
+ print("It's a str")
+ case _:
+ never_call_me(arg) # ok, arg is of type Never
+
+ """
+
+ raise TypeError(f"{self} is not subscriptable")
+
+
+if hasattr(typing, 'Required'): # 3.11+
+ Required = typing.Required
+ NotRequired = typing.NotRequired
+elif sys.version_info[:2] >= (3, 9): # 3.9-3.10
+ @_ExtensionsSpecialForm
+ def Required(self, parameters):
+ """A special typing construct to mark a key of a total=False TypedDict
+ as required. For example:
+
+ class Movie(TypedDict, total=False):
+ title: Required[str]
+ year: int
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+
+ There is no runtime checking that a required key is actually provided
+ when instantiating a related TypedDict.
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+ @_ExtensionsSpecialForm
+ def NotRequired(self, parameters):
+ """A special typing construct to mark a key of a TypedDict as
+ potentially missing. For example:
+
+ class Movie(TypedDict):
+ title: str
+ year: NotRequired[int]
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+else: # 3.8
+ class _RequiredForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+ Required = _RequiredForm(
+ 'Required',
+ doc="""A special typing construct to mark a key of a total=False TypedDict
+ as required. For example:
+
+ class Movie(TypedDict, total=False):
+ title: Required[str]
+ year: int
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+
+ There is no runtime checking that a required key is actually provided
+ when instantiating a related TypedDict.
+ """)
+ NotRequired = _RequiredForm(
+ 'NotRequired',
+ doc="""A special typing construct to mark a key of a TypedDict as
+ potentially missing. For example:
+
+ class Movie(TypedDict):
+ title: str
+ year: NotRequired[int]
+
+ m = Movie(
+ title='The Matrix', # typechecker error if key is omitted
+ year=1999,
+ )
+ """)
+
+
+if hasattr(typing, 'ReadOnly'):
+ ReadOnly = typing.ReadOnly
+elif sys.version_info[:2] >= (3, 9): # 3.9-3.12
+ @_ExtensionsSpecialForm
+ def ReadOnly(self, parameters):
+ """A special typing construct to mark an item of a TypedDict as read-only.
+
+ For example:
+
+ class Movie(TypedDict):
+ title: ReadOnly[str]
+ year: int
+
+ def mutate_movie(m: Movie) -> None:
+ m["year"] = 1992 # allowed
+ m["title"] = "The Matrix" # typechecker error
+
+ There is no runtime checking for this property.
+ """
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+else: # 3.8
+ class _ReadOnlyForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type.')
+ return typing._GenericAlias(self, (item,))
+
+ ReadOnly = _ReadOnlyForm(
+ 'ReadOnly',
+ doc="""A special typing construct to mark a key of a TypedDict as read-only.
+
+ For example:
+
+ class Movie(TypedDict):
+ title: ReadOnly[str]
+ year: int
+
+ def mutate_movie(m: Movie) -> None:
+ m["year"] = 1992 # allowed
+ m["title"] = "The Matrix" # typechecker error
+
+ There is no runtime checking for this propery.
+ """)
+
+
+_UNPACK_DOC = """\
+Type unpack operator.
+
+The type unpack operator takes the child types from some container type,
+such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For
+example:
+
+ # For some generic class `Foo`:
+ Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str]
+
+ Ts = TypeVarTuple('Ts')
+ # Specifies that `Bar` is generic in an arbitrary number of types.
+ # (Think of `Ts` as a tuple of an arbitrary number of individual
+ # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the
+ # `Generic[]`.)
+ class Bar(Generic[Unpack[Ts]]): ...
+ Bar[int] # Valid
+ Bar[int, str] # Also valid
+
+From Python 3.11, this can also be done using the `*` operator:
+
+ Foo[*tuple[int, str]]
+ class Bar(Generic[*Ts]): ...
+
+The operator can also be used along with a `TypedDict` to annotate
+`**kwargs` in a function signature. For instance:
+
+ class Movie(TypedDict):
+ name: str
+ year: int
+
+ # This function expects two keyword arguments - *name* of type `str` and
+ # *year* of type `int`.
+ def foo(**kwargs: Unpack[Movie]): ...
+
+Note that there is only some runtime checking of this operator. Not
+everything the runtime allows may be accepted by static type checkers.
+
+For more information, see PEP 646 and PEP 692.
+"""
+
+
+if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[]
+ Unpack = typing.Unpack
+
+ def _is_unpack(obj):
+ return get_origin(obj) is Unpack
+
+elif sys.version_info[:2] >= (3, 9): # 3.9+
+ class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True):
+ def __init__(self, getitem):
+ super().__init__(getitem)
+ self.__doc__ = _UNPACK_DOC
+
+ class _UnpackAlias(typing._GenericAlias, _root=True):
+ __class__ = typing.TypeVar
+
+ @property
+ def __typing_unpacked_tuple_args__(self):
+ assert self.__origin__ is Unpack
+ assert len(self.__args__) == 1
+ arg, = self.__args__
+ if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)):
+ if arg.__origin__ is not tuple:
+ raise TypeError("Unpack[...] must be used with a tuple type")
+ return arg.__args__
+ return None
+
+ @_UnpackSpecialForm
+ def Unpack(self, parameters):
+ item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
+ return _UnpackAlias(self, (item,))
+
+ def _is_unpack(obj):
+ return isinstance(obj, _UnpackAlias)
+
+else: # 3.8
+ class _UnpackAlias(typing._GenericAlias, _root=True):
+ __class__ = typing.TypeVar
+
+ class _UnpackForm(_ExtensionsSpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ item = typing._type_check(parameters,
+ f'{self._name} accepts only a single type.')
+ return _UnpackAlias(self, (item,))
+
+ Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC)
+
+ def _is_unpack(obj):
+ return isinstance(obj, _UnpackAlias)
+
+
+if _PEP_696_IMPLEMENTED:
+ from typing import TypeVarTuple
+
+elif hasattr(typing, "TypeVarTuple"): # 3.11+
+
+ def _unpack_args(*args):
+ newargs = []
+ for arg in args:
+ subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+ if subargs is not None and not (subargs and subargs[-1] is ...):
+ newargs.extend(subargs)
+ else:
+ newargs.append(arg)
+ return newargs
+
+ # Add default parameter - PEP 696
+ class TypeVarTuple(metaclass=_TypeVarLikeMeta):
+ """Type variable tuple."""
+
+ _backported_typevarlike = typing.TypeVarTuple
+
+ def __new__(cls, name, *, default=NoDefault):
+ tvt = typing.TypeVarTuple(name)
+ _set_default(tvt, default)
+ _set_module(tvt)
+
+ def _typevartuple_prepare_subst(alias, args):
+ params = alias.__parameters__
+ typevartuple_index = params.index(tvt)
+ for param in params[typevartuple_index + 1:]:
+ if isinstance(param, TypeVarTuple):
+ raise TypeError(
+ f"More than one TypeVarTuple parameter in {alias}"
+ )
+
+ alen = len(args)
+ plen = len(params)
+ left = typevartuple_index
+ right = plen - typevartuple_index - 1
+ var_tuple_index = None
+ fillarg = None
+ for k, arg in enumerate(args):
+ if not isinstance(arg, type):
+ subargs = getattr(arg, '__typing_unpacked_tuple_args__', None)
+ if subargs and len(subargs) == 2 and subargs[-1] is ...:
+ if var_tuple_index is not None:
+ raise TypeError(
+ "More than one unpacked "
+ "arbitrary-length tuple argument"
+ )
+ var_tuple_index = k
+ fillarg = subargs[0]
+ if var_tuple_index is not None:
+ left = min(left, var_tuple_index)
+ right = min(right, alen - var_tuple_index - 1)
+ elif left + right > alen:
+ raise TypeError(f"Too few arguments for {alias};"
+ f" actual {alen}, expected at least {plen - 1}")
+ if left == alen - right and tvt.has_default():
+ replacement = _unpack_args(tvt.__default__)
+ else:
+ replacement = args[left: alen - right]
+
+ return (
+ *args[:left],
+ *([fillarg] * (typevartuple_index - left)),
+ replacement,
+ *([fillarg] * (plen - right - left - typevartuple_index - 1)),
+ *args[alen - right:],
+ )
+
+ tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst
+ return tvt
+
+ def __init_subclass__(self, *args, **kwds):
+ raise TypeError("Cannot subclass special typing classes")
+
+else: # <=3.10
+ class TypeVarTuple(_DefaultMixin):
+ """Type variable tuple.
+
+ Usage::
+
+ Ts = TypeVarTuple('Ts')
+
+ In the same way that a normal type variable is a stand-in for a single
+ type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
+ type such as ``Tuple[int, str]``.
+
+ Type variable tuples can be used in ``Generic`` declarations.
+ Consider the following example::
+
+ class Array(Generic[*Ts]): ...
+
+ The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
+ where ``T1`` and ``T2`` are type variables. To use these type variables
+ as type parameters of ``Array``, we must *unpack* the type variable tuple using
+ the star operator: ``*Ts``. The signature of ``Array`` then behaves
+ as if we had simply written ``class Array(Generic[T1, T2]): ...``.
+ In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
+ us to parameterise the class with an *arbitrary* number of type parameters.
+
+ Type variable tuples can be used anywhere a normal ``TypeVar`` can.
+ This includes class definitions, as shown above, as well as function
+ signatures and variable annotations::
+
+ class Array(Generic[*Ts]):
+
+ def __init__(self, shape: Tuple[*Ts]):
+ self._shape: Tuple[*Ts] = shape
+
+ def get_shape(self) -> Tuple[*Ts]:
+ return self._shape
+
+ shape = (Height(480), Width(640))
+ x: Array[Height, Width] = Array(shape)
+ y = abs(x) # Inferred type is Array[Height, Width]
+ z = x + x # ... is Array[Height, Width]
+ x.get_shape() # ... is tuple[Height, Width]
+
+ """
+
+ # Trick Generic __parameters__.
+ __class__ = typing.TypeVar
+
+ def __iter__(self):
+ yield self.__unpacked__
+
+ def __init__(self, name, *, default=NoDefault):
+ self.__name__ = name
+ _DefaultMixin.__init__(self, default)
+
+ # for pickling:
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ self.__unpacked__ = Unpack[self]
+
+ def __repr__(self):
+ return self.__name__
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ def __eq__(self, other):
+ return self is other
+
+ def __reduce__(self):
+ return self.__name__
+
+ def __init_subclass__(self, *args, **kwds):
+ if '_root' not in kwds:
+ raise TypeError("Cannot subclass special typing classes")
+
+
+if hasattr(typing, "reveal_type"): # 3.11+
+ reveal_type = typing.reveal_type
+else: # <=3.10
+ def reveal_type(obj: T, /) -> T:
+ """Reveal the inferred type of a variable.
+
+ When a static type checker encounters a call to ``reveal_type()``,
+ it will emit the inferred type of the argument::
+
+ x: int = 1
+ reveal_type(x)
+
+ Running a static type checker (e.g., ``mypy``) on this example
+ will produce output similar to 'Revealed type is "builtins.int"'.
+
+ At runtime, the function prints the runtime type of the
+ argument and returns it unchanged.
+
+ """
+ print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr)
+ return obj
+
+
+if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+
+ _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH
+else: # <=3.10
+ _ASSERT_NEVER_REPR_MAX_LENGTH = 100
+
+
+if hasattr(typing, "assert_never"): # 3.11+
+ assert_never = typing.assert_never
+else: # <=3.10
+ def assert_never(arg: Never, /) -> Never:
+ """Assert to the type checker that a line of code is unreachable.
+
+ Example::
+
+ def int_or_str(arg: int | str) -> None:
+ match arg:
+ case int():
+ print("It's an int")
+ case str():
+ print("It's a str")
+ case _:
+ assert_never(arg)
+
+ If a type checker finds that a call to assert_never() is
+ reachable, it will emit an error.
+
+ At runtime, this throws an exception when called.
+
+ """
+ value = repr(arg)
+ if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH:
+ value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + '...'
+ raise AssertionError(f"Expected code to be unreachable, but got: {value}")
+
+
+if sys.version_info >= (3, 12): # 3.12+
+ # dataclass_transform exists in 3.11 but lacks the frozen_default parameter
+ dataclass_transform = typing.dataclass_transform
+else: # <=3.11
+ def dataclass_transform(
+ *,
+ eq_default: bool = True,
+ order_default: bool = False,
+ kw_only_default: bool = False,
+ frozen_default: bool = False,
+ field_specifiers: typing.Tuple[
+ typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
+ ...
+ ] = (),
+ **kwargs: typing.Any,
+ ) -> typing.Callable[[T], T]:
+ """Decorator that marks a function, class, or metaclass as providing
+ dataclass-like behavior.
+
+ Example:
+
+ from typing_extensions import dataclass_transform
+
+ _T = TypeVar("_T")
+
+ # Used on a decorator function
+ @dataclass_transform()
+ def create_model(cls: type[_T]) -> type[_T]:
+ ...
+ return cls
+
+ @create_model
+ class CustomerModel:
+ id: int
+ name: str
+
+ # Used on a base class
+ @dataclass_transform()
+ class ModelBase: ...
+
+ class CustomerModel(ModelBase):
+ id: int
+ name: str
+
+ # Used on a metaclass
+ @dataclass_transform()
+ class ModelMeta(type): ...
+
+ class ModelBase(metaclass=ModelMeta): ...
+
+ class CustomerModel(ModelBase):
+ id: int
+ name: str
+
+ Each of the ``CustomerModel`` classes defined in this example will now
+ behave similarly to a dataclass created with the ``@dataclasses.dataclass``
+ decorator. For example, the type checker will synthesize an ``__init__``
+ method.
+
+ The arguments to this decorator can be used to customize this behavior:
+ - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
+ True or False if it is omitted by the caller.
+ - ``order_default`` indicates whether the ``order`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+ - ``kw_only_default`` indicates whether the ``kw_only`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+ - ``frozen_default`` indicates whether the ``frozen`` parameter is
+ assumed to be True or False if it is omitted by the caller.
+ - ``field_specifiers`` specifies a static list of supported classes
+ or functions that describe fields, similar to ``dataclasses.field()``.
+
+ At runtime, this decorator records its arguments in the
+ ``__dataclass_transform__`` attribute on the decorated object.
+
+ See PEP 681 for details.
+
+ """
+ def decorator(cls_or_fn):
+ cls_or_fn.__dataclass_transform__ = {
+ "eq_default": eq_default,
+ "order_default": order_default,
+ "kw_only_default": kw_only_default,
+ "frozen_default": frozen_default,
+ "field_specifiers": field_specifiers,
+ "kwargs": kwargs,
+ }
+ return cls_or_fn
+ return decorator
+
+
+if hasattr(typing, "override"): # 3.12+
+ override = typing.override
+else: # <=3.11
+ _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
+
+ def override(arg: _F, /) -> _F:
+ """Indicate that a method is intended to override a method in a base class.
+
+ Usage:
+
+ class Base:
+ def method(self) -> None:
+ pass
+
+ class Child(Base):
+ @override
+ def method(self) -> None:
+ super().method()
+
+ When this decorator is applied to a method, the type checker will
+ validate that it overrides a method with the same name on a base class.
+ This helps prevent bugs that may occur when a base class is changed
+ without an equivalent change to a child class.
+
+ There is no runtime checking of these properties. The decorator
+ sets the ``__override__`` attribute to ``True`` on the decorated object
+ to allow runtime introspection.
+
+ See PEP 698 for details.
+
+ """
+ try:
+ arg.__override__ = True
+ except (AttributeError, TypeError):
+ # Skip the attribute silently if it is not writable.
+ # AttributeError happens if the object has __slots__ or a
+ # read-only property, TypeError if it's a builtin class.
+ pass
+ return arg
+
+
+if hasattr(warnings, "deprecated"):
+ deprecated = warnings.deprecated
+else:
+ _T = typing.TypeVar("_T")
+
+ class deprecated:
+ """Indicate that a class, function or overload is deprecated.
+
+ When this decorator is applied to an object, the type checker
+ will generate a diagnostic on usage of the deprecated object.
+
+ Usage:
+
+ @deprecated("Use B instead")
+ class A:
+ pass
+
+ @deprecated("Use g instead")
+ def f():
+ pass
+
+ @overload
+ @deprecated("int support is deprecated")
+ def g(x: int) -> int: ...
+ @overload
+ def g(x: str) -> int: ...
+
+ The warning specified by *category* will be emitted at runtime
+ on use of deprecated objects. For functions, that happens on calls;
+ for classes, on instantiation and on creation of subclasses.
+ If the *category* is ``None``, no warning is emitted at runtime.
+ The *stacklevel* determines where the
+ warning is emitted. If it is ``1`` (the default), the warning
+ is emitted at the direct caller of the deprecated object; if it
+ is higher, it is emitted further up the stack.
+ Static type checker behavior is not affected by the *category*
+ and *stacklevel* arguments.
+
+ The deprecation message passed to the decorator is saved in the
+ ``__deprecated__`` attribute on the decorated object.
+ If applied to an overload, the decorator
+ must be after the ``@overload`` decorator for the attribute to
+ exist on the overload as returned by ``get_overloads()``.
+
+ See PEP 702 for details.
+
+ """
+ def __init__(
+ self,
+ message: str,
+ /,
+ *,
+ category: typing.Optional[typing.Type[Warning]] = DeprecationWarning,
+ stacklevel: int = 1,
+ ) -> None:
+ if not isinstance(message, str):
+ raise TypeError(
+ "Expected an object of type str for 'message', not "
+ f"{type(message).__name__!r}"
+ )
+ self.message = message
+ self.category = category
+ self.stacklevel = stacklevel
+
+ def __call__(self, arg: _T, /) -> _T:
+ # Make sure the inner functions created below don't
+ # retain a reference to self.
+ msg = self.message
+ category = self.category
+ stacklevel = self.stacklevel
+ if category is None:
+ arg.__deprecated__ = msg
+ return arg
+ elif isinstance(arg, type):
+ import functools
+ from types import MethodType
+
+ original_new = arg.__new__
+
+ @functools.wraps(original_new)
+ def __new__(cls, *args, **kwargs):
+ if cls is arg:
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ if original_new is not object.__new__:
+ return original_new(cls, *args, **kwargs)
+ # Mirrors a similar check in object.__new__.
+ elif cls.__init__ is object.__init__ and (args or kwargs):
+ raise TypeError(f"{cls.__name__}() takes no arguments")
+ else:
+ return original_new(cls)
+
+ arg.__new__ = staticmethod(__new__)
+
+ original_init_subclass = arg.__init_subclass__
+ # We need slightly different behavior if __init_subclass__
+ # is a bound method (likely if it was implemented in Python)
+ if isinstance(original_init_subclass, MethodType):
+ original_init_subclass = original_init_subclass.__func__
+
+ @functools.wraps(original_init_subclass)
+ def __init_subclass__(*args, **kwargs):
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ return original_init_subclass(*args, **kwargs)
+
+ arg.__init_subclass__ = classmethod(__init_subclass__)
+ # Or otherwise, which likely means it's a builtin such as
+ # object's implementation of __init_subclass__.
+ else:
+ @functools.wraps(original_init_subclass)
+ def __init_subclass__(*args, **kwargs):
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ return original_init_subclass(*args, **kwargs)
+
+ arg.__init_subclass__ = __init_subclass__
+
+ arg.__deprecated__ = __new__.__deprecated__ = msg
+ __init_subclass__.__deprecated__ = msg
+ return arg
+ elif callable(arg):
+ import functools
+
+ @functools.wraps(arg)
+ def wrapper(*args, **kwargs):
+ warnings.warn(msg, category=category, stacklevel=stacklevel + 1)
+ return arg(*args, **kwargs)
+
+ arg.__deprecated__ = wrapper.__deprecated__ = msg
+ return wrapper
+ else:
+ raise TypeError(
+ "@deprecated decorator with non-None category must be applied to "
+ f"a class or callable, not {arg!r}"
+ )
+
+
+# We have to do some monkey patching to deal with the dual nature of
+# Unpack/TypeVarTuple:
+# - We want Unpack to be a kind of TypeVar so it gets accepted in
+# Generic[Unpack[Ts]]
+# - We want it to *not* be treated as a TypeVar for the purposes of
+# counting generic parameters, so that when we subscript a generic,
+# the runtime doesn't try to substitute the Unpack with the subscripted type.
+if not hasattr(typing, "TypeVarTuple"):
+ def _check_generic(cls, parameters, elen=_marker):
+ """Check correct count for parameters of a generic cls (internal helper).
+
+ This gives a nice error message in case of count mismatch.
+ """
+ if not elen:
+ raise TypeError(f"{cls} is not a generic class")
+ if elen is _marker:
+ if not hasattr(cls, "__parameters__") or not cls.__parameters__:
+ raise TypeError(f"{cls} is not a generic class")
+ elen = len(cls.__parameters__)
+ alen = len(parameters)
+ if alen != elen:
+ expect_val = elen
+ if hasattr(cls, "__parameters__"):
+ parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+ num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
+ if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
+ return
+
+ # deal with TypeVarLike defaults
+ # required TypeVarLikes cannot appear after a defaulted one.
+ if alen < elen:
+ # since we validate TypeVarLike default in _collect_type_vars
+ # or _collect_parameters we can safely check parameters[alen]
+ if (
+ getattr(parameters[alen], '__default__', NoDefault)
+ is not NoDefault
+ ):
+ return
+
+ num_default_tv = sum(getattr(p, '__default__', NoDefault)
+ is not NoDefault for p in parameters)
+
+ elen -= num_default_tv
+
+ expect_val = f"at least {elen}"
+
+ things = "arguments" if sys.version_info >= (3, 10) else "parameters"
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} {things}"
+ f" for {cls}; actual {alen}, expected {expect_val}")
+else:
+ # Python 3.11+
+
+ def _check_generic(cls, parameters, elen):
+ """Check correct count for parameters of a generic cls (internal helper).
+
+ This gives a nice error message in case of count mismatch.
+ """
+ if not elen:
+ raise TypeError(f"{cls} is not a generic class")
+ alen = len(parameters)
+ if alen != elen:
+ expect_val = elen
+ if hasattr(cls, "__parameters__"):
+ parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
+
+ # deal with TypeVarLike defaults
+ # required TypeVarLikes cannot appear after a defaulted one.
+ if alen < elen:
+ # since we validate TypeVarLike default in _collect_type_vars
+ # or _collect_parameters we can safely check parameters[alen]
+ if (
+ getattr(parameters[alen], '__default__', NoDefault)
+ is not NoDefault
+ ):
+ return
+
+ num_default_tv = sum(getattr(p, '__default__', NoDefault)
+ is not NoDefault for p in parameters)
+
+ elen -= num_default_tv
+
+ expect_val = f"at least {elen}"
+
+ raise TypeError(f"Too {'many' if alen > elen else 'few'} arguments"
+ f" for {cls}; actual {alen}, expected {expect_val}")
+
+if not _PEP_696_IMPLEMENTED:
+ typing._check_generic = _check_generic
+
+
+def _has_generic_or_protocol_as_origin() -> bool:
+ try:
+ frame = sys._getframe(2)
+ # - Catch AttributeError: not all Python implementations have sys._getframe()
+ # - Catch ValueError: maybe we're called from an unexpected module
+ # and the call stack isn't deep enough
+ except (AttributeError, ValueError):
+ return False # err on the side of leniency
+ else:
+ # If we somehow get invoked from outside typing.py,
+ # also err on the side of leniency
+ if frame.f_globals.get("__name__") != "typing":
+ return False
+ origin = frame.f_locals.get("origin")
+ # Cannot use "in" because origin may be an object with a buggy __eq__ that
+ # throws an error.
+ return origin is typing.Generic or origin is Protocol or origin is typing.Protocol
+
+
+_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)}
+
+
+def _is_unpacked_typevartuple(x) -> bool:
+ if get_origin(x) is not Unpack:
+ return False
+ args = get_args(x)
+ return (
+ bool(args)
+ and len(args) == 1
+ and type(args[0]) in _TYPEVARTUPLE_TYPES
+ )
+
+
+# Python 3.11+ _collect_type_vars was renamed to _collect_parameters
+if hasattr(typing, '_collect_type_vars'):
+ def _collect_type_vars(types, typevar_types=None):
+ """Collect all type variable contained in types in order of
+ first appearance (lexicographic order). For example::
+
+ _collect_type_vars((T, List[S, T])) == (T, S)
+ """
+ if typevar_types is None:
+ typevar_types = typing.TypeVar
+ tvars = []
+
+ # A required TypeVarLike cannot appear after a TypeVarLike with a default
+ # if it was a direct call to `Generic[]` or `Protocol[]`
+ enforce_default_ordering = _has_generic_or_protocol_as_origin()
+ default_encountered = False
+
+ # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+ type_var_tuple_encountered = False
+
+ for t in types:
+ if _is_unpacked_typevartuple(t):
+ type_var_tuple_encountered = True
+ elif isinstance(t, typevar_types) and t not in tvars:
+ if enforce_default_ordering:
+ has_default = getattr(t, '__default__', NoDefault) is not NoDefault
+ if has_default:
+ if type_var_tuple_encountered:
+ raise TypeError('Type parameter with a default'
+ ' follows TypeVarTuple')
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
+
+ tvars.append(t)
+ if _should_collect_from_parameters(t):
+ tvars.extend([t for t in t.__parameters__ if t not in tvars])
+ return tuple(tvars)
+
+ typing._collect_type_vars = _collect_type_vars
+else:
+ def _collect_parameters(args):
+ """Collect all type variables and parameter specifications in args
+ in order of first appearance (lexicographic order).
+
+ For example::
+
+ assert _collect_parameters((T, Callable[P, T])) == (T, P)
+ """
+ parameters = []
+
+ # A required TypeVarLike cannot appear after a TypeVarLike with default
+ # if it was a direct call to `Generic[]` or `Protocol[]`
+ enforce_default_ordering = _has_generic_or_protocol_as_origin()
+ default_encountered = False
+
+ # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple
+ type_var_tuple_encountered = False
+
+ for t in args:
+ if isinstance(t, type):
+ # We don't want __parameters__ descriptor of a bare Python class.
+ pass
+ elif isinstance(t, tuple):
+ # `t` might be a tuple, when `ParamSpec` is substituted with
+ # `[T, int]`, or `[int, *Ts]`, etc.
+ for x in t:
+ for collected in _collect_parameters([x]):
+ if collected not in parameters:
+ parameters.append(collected)
+ elif hasattr(t, '__typing_subst__'):
+ if t not in parameters:
+ if enforce_default_ordering:
+ has_default = (
+ getattr(t, '__default__', NoDefault) is not NoDefault
+ )
+
+ if type_var_tuple_encountered and has_default:
+ raise TypeError('Type parameter with a default'
+ ' follows TypeVarTuple')
+
+ if has_default:
+ default_encountered = True
+ elif default_encountered:
+ raise TypeError(f'Type parameter {t!r} without a default'
+ ' follows type parameter with a default')
+
+ parameters.append(t)
+ else:
+ if _is_unpacked_typevartuple(t):
+ type_var_tuple_encountered = True
+ for x in getattr(t, '__parameters__', ()):
+ if x not in parameters:
+ parameters.append(x)
+
+ return tuple(parameters)
+
+ if not _PEP_696_IMPLEMENTED:
+ typing._collect_parameters = _collect_parameters
+
+# Backport typing.NamedTuple as it exists in Python 3.13.
+# In 3.11, the ability to define generic `NamedTuple`s was supported.
+# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
+# On 3.12, we added __orig_bases__ to call-based NamedTuples
+# On 3.13, we deprecated kwargs-based NamedTuples
+if sys.version_info >= (3, 13):
+ NamedTuple = typing.NamedTuple
+else:
+ def _make_nmtuple(name, types, module, defaults=()):
+ fields = [n for n, t in types]
+ annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
+ for n, t in types}
+ nm_tpl = collections.namedtuple(name, fields,
+ defaults=defaults, module=module)
+ nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
+ # The `_field_types` attribute was removed in 3.9;
+ # in earlier versions, it is the same as the `__annotations__` attribute
+ if sys.version_info < (3, 9):
+ nm_tpl._field_types = annotations
+ return nm_tpl
+
+ _prohibited_namedtuple_fields = typing._prohibited
+ _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
+
+ class _NamedTupleMeta(type):
+ def __new__(cls, typename, bases, ns):
+ assert _NamedTuple in bases
+ for base in bases:
+ if base is not _NamedTuple and base is not typing.Generic:
+ raise TypeError(
+ 'can only inherit from a NamedTuple type and Generic')
+ bases = tuple(tuple if base is _NamedTuple else base for base in bases)
+ if "__annotations__" in ns:
+ types = ns["__annotations__"]
+ elif "__annotate__" in ns:
+ # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated
+ types = ns["__annotate__"](1)
+ else:
+ types = {}
+ default_names = []
+ for field_name in types:
+ if field_name in ns:
+ default_names.append(field_name)
+ elif default_names:
+ raise TypeError(f"Non-default namedtuple field {field_name} "
+ f"cannot follow default field"
+ f"{'s' if len(default_names) > 1 else ''} "
+ f"{', '.join(default_names)}")
+ nm_tpl = _make_nmtuple(
+ typename, types.items(),
+ defaults=[ns[n] for n in default_names],
+ module=ns['__module__']
+ )
+ nm_tpl.__bases__ = bases
+ if typing.Generic in bases:
+ if hasattr(typing, '_generic_class_getitem'): # 3.12+
+ nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem)
+ else:
+ class_getitem = typing.Generic.__class_getitem__.__func__
+ nm_tpl.__class_getitem__ = classmethod(class_getitem)
+ # update from user namespace without overriding special namedtuple attributes
+ for key, val in ns.items():
+ if key in _prohibited_namedtuple_fields:
+ raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
+ elif key not in _special_namedtuple_fields:
+ if key not in nm_tpl._fields:
+ setattr(nm_tpl, key, ns[key])
+ try:
+ set_name = type(val).__set_name__
+ except AttributeError:
+ pass
+ else:
+ try:
+ set_name(val, nm_tpl, key)
+ except BaseException as e:
+ msg = (
+ f"Error calling __set_name__ on {type(val).__name__!r} "
+ f"instance {key!r} in {typename!r}"
+ )
+ # BaseException.add_note() existed on py311,
+ # but the __set_name__ machinery didn't start
+ # using add_note() until py312.
+ # Making sure exceptions are raised in the same way
+ # as in "normal" classes seems most important here.
+ if sys.version_info >= (3, 12):
+ e.add_note(msg)
+ raise
+ else:
+ raise RuntimeError(msg) from e
+
+ if typing.Generic in bases:
+ nm_tpl.__init_subclass__()
+ return nm_tpl
+
+ _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
+
+ def _namedtuple_mro_entries(bases):
+ assert NamedTuple in bases
+ return (_NamedTuple,)
+
+ @_ensure_subclassable(_namedtuple_mro_entries)
+ def NamedTuple(typename, fields=_marker, /, **kwargs):
+ """Typed version of namedtuple.
+
+ Usage::
+
+ class Employee(NamedTuple):
+ name: str
+ id: int
+
+ This is equivalent to::
+
+ Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+ The resulting class has an extra __annotations__ attribute, giving a
+ dict that maps field names to types. (The field names are also in
+ the _fields attribute, which is part of the namedtuple API.)
+ An alternative equivalent functional syntax is also accepted::
+
+ Employee = NamedTuple('Employee', [('name', str), ('id', int)])
+ """
+ if fields is _marker:
+ if kwargs:
+ deprecated_thing = "Creating NamedTuple classes using keyword arguments"
+ deprecation_msg = (
+ "{name} is deprecated and will be disallowed in Python {remove}. "
+ "Use the class-based or functional syntax instead."
+ )
+ else:
+ deprecated_thing = "Failing to pass a value for the 'fields' parameter"
+ example = f"`{typename} = NamedTuple({typename!r}, [])`"
+ deprecation_msg = (
+ "{name} is deprecated and will be disallowed in Python {remove}. "
+ "To create a NamedTuple class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty list, e.g. "
+ ) + example + "."
+ elif fields is None:
+ if kwargs:
+ raise TypeError(
+ "Cannot pass `None` as the 'fields' parameter "
+ "and also specify fields using keyword arguments"
+ )
+ else:
+ deprecated_thing = "Passing `None` as the 'fields' parameter"
+ example = f"`{typename} = NamedTuple({typename!r}, [])`"
+ deprecation_msg = (
+ "{name} is deprecated and will be disallowed in Python {remove}. "
+ "To create a NamedTuple class with 0 fields "
+ "using the functional syntax, "
+ "pass an empty list, e.g. "
+ ) + example + "."
+ elif kwargs:
+ raise TypeError("Either list of fields or keywords"
+ " can be provided to NamedTuple, not both")
+ if fields is _marker or fields is None:
+ warnings.warn(
+ deprecation_msg.format(name=deprecated_thing, remove="3.15"),
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ fields = kwargs.items()
+ nt = _make_nmtuple(typename, fields, module=_caller())
+ nt.__orig_bases__ = (NamedTuple,)
+ return nt
+
+
+if hasattr(collections.abc, "Buffer"):
+ Buffer = collections.abc.Buffer
+else:
+ class Buffer(abc.ABC): # noqa: B024
+ """Base class for classes that implement the buffer protocol.
+
+ The buffer protocol allows Python objects to expose a low-level
+ memory buffer interface. Before Python 3.12, it is not possible
+ to implement the buffer protocol in pure Python code, or even
+ to check whether a class implements the buffer protocol. In
+ Python 3.12 and higher, the ``__buffer__`` method allows access
+ to the buffer protocol from Python code, and the
+ ``collections.abc.Buffer`` ABC allows checking whether a class
+ implements the buffer protocol.
+
+ To indicate support for the buffer protocol in earlier versions,
+ inherit from this ABC, either in a stub file or at runtime,
+ or use ABC registration. This ABC provides no methods, because
+ there is no Python-accessible methods shared by pre-3.12 buffer
+ classes. It is useful primarily for static checks.
+
+ """
+
+ # As a courtesy, register the most common stdlib buffer classes.
+ Buffer.register(memoryview)
+ Buffer.register(bytearray)
+ Buffer.register(bytes)
+
+
+# Backport of types.get_original_bases, available on 3.12+ in CPython
+if hasattr(_types, "get_original_bases"):
+ get_original_bases = _types.get_original_bases
+else:
+ def get_original_bases(cls, /):
+ """Return the class's "original" bases prior to modification by `__mro_entries__`.
+
+ Examples::
+
+ from typing import TypeVar, Generic
+ from typing_extensions import NamedTuple, TypedDict
+
+ T = TypeVar("T")
+ class Foo(Generic[T]): ...
+ class Bar(Foo[int], float): ...
+ class Baz(list[str]): ...
+ Eggs = NamedTuple("Eggs", [("a", int), ("b", str)])
+ Spam = TypedDict("Spam", {"a": int, "b": str})
+
+ assert get_original_bases(Bar) == (Foo[int], float)
+ assert get_original_bases(Baz) == (list[str],)
+ assert get_original_bases(Eggs) == (NamedTuple,)
+ assert get_original_bases(Spam) == (TypedDict,)
+ assert get_original_bases(int) == (object,)
+ """
+ try:
+ return cls.__dict__.get("__orig_bases__", cls.__bases__)
+ except AttributeError:
+ raise TypeError(
+ f'Expected an instance of type, not {type(cls).__name__!r}'
+ ) from None
+
+
+# NewType is a class on Python 3.10+, making it pickleable
+# The error message for subclassing instances of NewType was improved on 3.11+
+if sys.version_info >= (3, 11):
+ NewType = typing.NewType
+else:
+ class NewType:
+ """NewType creates simple unique types with almost zero
+ runtime overhead. NewType(name, tp) is considered a subtype of tp
+ by static type checkers. At runtime, NewType(name, tp) returns
+ a dummy callable that simply returns its argument. Usage::
+ UserId = NewType('UserId', int)
+ def name_by_id(user_id: UserId) -> str:
+ ...
+ UserId('user') # Fails type check
+ name_by_id(42) # Fails type check
+ name_by_id(UserId(42)) # OK
+ num = UserId(5) + 1 # type: int
+ """
+
+ def __call__(self, obj, /):
+ return obj
+
+ def __init__(self, name, tp):
+ self.__qualname__ = name
+ if '.' in name:
+ name = name.rpartition('.')[-1]
+ self.__name__ = name
+ self.__supertype__ = tp
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+
+ def __mro_entries__(self, bases):
+ # We defined __mro_entries__ to get a better error message
+ # if a user attempts to subclass a NewType instance. bpo-46170
+ supercls_name = self.__name__
+
+ class Dummy:
+ def __init_subclass__(cls):
+ subcls_name = cls.__name__
+ raise TypeError(
+ f"Cannot subclass an instance of NewType. "
+ f"Perhaps you were looking for: "
+ f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`"
+ )
+
+ return (Dummy,)
+
+ def __repr__(self):
+ return f'{self.__module__}.{self.__qualname__}'
+
+ def __reduce__(self):
+ return self.__qualname__
+
+ if sys.version_info >= (3, 10):
+ # PEP 604 methods
+ # It doesn't make sense to have these methods on Python <3.10
+
+ def __or__(self, other):
+ return typing.Union[self, other]
+
+ def __ror__(self, other):
+ return typing.Union[other, self]
+
+
+if hasattr(typing, "TypeAliasType"):
+ TypeAliasType = typing.TypeAliasType
+else:
+ def _is_unionable(obj):
+ """Corresponds to is_unionable() in unionobject.c in CPython."""
+ return obj is None or isinstance(obj, (
+ type,
+ _types.GenericAlias,
+ _types.UnionType,
+ TypeAliasType,
+ ))
+
+ class TypeAliasType:
+ """Create named, parameterized type aliases.
+
+ This provides a backport of the new `type` statement in Python 3.12:
+
+ type ListOrSet[T] = list[T] | set[T]
+
+ is equivalent to:
+
+ T = TypeVar("T")
+ ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,))
+
+ The name ListOrSet can then be used as an alias for the type it refers to.
+
+ The type_params argument should contain all the type parameters used
+ in the value of the type alias. If the alias is not generic, this
+ argument is omitted.
+
+ Static type checkers should only support type aliases declared using
+ TypeAliasType that follow these rules:
+
+ - The first argument (the name) must be a string literal.
+ - The TypeAliasType instance must be immediately assigned to a variable
+ of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid,
+ as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)').
+
+ """
+
+ def __init__(self, name: str, value, *, type_params=()):
+ if not isinstance(name, str):
+ raise TypeError("TypeAliasType name must be a string")
+ self.__value__ = value
+ self.__type_params__ = type_params
+
+ parameters = []
+ for type_param in type_params:
+ if isinstance(type_param, TypeVarTuple):
+ parameters.extend(type_param)
+ else:
+ parameters.append(type_param)
+ self.__parameters__ = tuple(parameters)
+ def_mod = _caller()
+ if def_mod != 'typing_extensions':
+ self.__module__ = def_mod
+ # Setting this attribute closes the TypeAliasType from further modification
+ self.__name__ = name
+
+ def __setattr__(self, name: str, value: object, /) -> None:
+ if hasattr(self, "__name__"):
+ self._raise_attribute_error(name)
+ super().__setattr__(name, value)
+
+ def __delattr__(self, name: str, /) -> Never:
+ self._raise_attribute_error(name)
+
+ def _raise_attribute_error(self, name: str) -> Never:
+ # Match the Python 3.12 error messages exactly
+ if name == "__name__":
+ raise AttributeError("readonly attribute")
+ elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}:
+ raise AttributeError(
+ f"attribute '{name}' of 'typing.TypeAliasType' objects "
+ "is not writable"
+ )
+ else:
+ raise AttributeError(
+ f"'typing.TypeAliasType' object has no attribute '{name}'"
+ )
+
+ def __repr__(self) -> str:
+ return self.__name__
+
+ def __getitem__(self, parameters):
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ parameters = [
+ typing._type_check(
+ item, f'Subscripting {self.__name__} requires a type.'
+ )
+ for item in parameters
+ ]
+ return typing._GenericAlias(self, tuple(parameters))
+
+ def __reduce__(self):
+ return self.__name__
+
+ def __init_subclass__(cls, *args, **kwargs):
+ raise TypeError(
+ "type 'typing_extensions.TypeAliasType' is not an acceptable base type"
+ )
+
+ # The presence of this method convinces typing._type_check
+ # that TypeAliasTypes are types.
+ def __call__(self):
+ raise TypeError("Type alias is not callable")
+
+ if sys.version_info >= (3, 10):
+ def __or__(self, right):
+ # For forward compatibility with 3.12, reject Unions
+ # that are not accepted by the built-in Union.
+ if not _is_unionable(right):
+ return NotImplemented
+ return typing.Union[self, right]
+
+ def __ror__(self, left):
+ if not _is_unionable(left):
+ return NotImplemented
+ return typing.Union[left, self]
+
+
+if hasattr(typing, "is_protocol"):
+ is_protocol = typing.is_protocol
+ get_protocol_members = typing.get_protocol_members
+else:
+ def is_protocol(tp: type, /) -> bool:
+ """Return True if the given type is a Protocol.
+
+ Example::
+
+ >>> from typing_extensions import Protocol, is_protocol
+ >>> class P(Protocol):
+ ... def a(self) -> str: ...
+ ... b: int
+ >>> is_protocol(P)
+ True
+ >>> is_protocol(int)
+ False
+ """
+ return (
+ isinstance(tp, type)
+ and getattr(tp, '_is_protocol', False)
+ and tp is not Protocol
+ and tp is not typing.Protocol
+ )
+
+ def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]:
+ """Return the set of members defined in a Protocol.
+
+ Example::
+
+ >>> from typing_extensions import Protocol, get_protocol_members
+ >>> class P(Protocol):
+ ... def a(self) -> str: ...
+ ... b: int
+ >>> get_protocol_members(P)
+ frozenset({'a', 'b'})
+
+ Raise a TypeError for arguments that are not Protocols.
+ """
+ if not is_protocol(tp):
+ raise TypeError(f'{tp!r} is not a Protocol')
+ if hasattr(tp, '__protocol_attrs__'):
+ return frozenset(tp.__protocol_attrs__)
+ return frozenset(_get_protocol_attrs(tp))
+
+
+if hasattr(typing, "Doc"):
+ Doc = typing.Doc
+else:
+ class Doc:
+ """Define the documentation of a type annotation using ``Annotated``, to be
+ used in class attributes, function and method parameters, return values,
+ and variables.
+
+ The value should be a positional-only string literal to allow static tools
+ like editors and documentation generators to use it.
+
+ This complements docstrings.
+
+ The string value passed is available in the attribute ``documentation``.
+
+ Example::
+
+ >>> from typing_extensions import Annotated, Doc
+ >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ...
+ """
+ def __init__(self, documentation: str, /) -> None:
+ self.documentation = documentation
+
+ def __repr__(self) -> str:
+ return f"Doc({self.documentation!r})"
+
+ def __hash__(self) -> int:
+ return hash(self.documentation)
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, Doc):
+ return NotImplemented
+ return self.documentation == other.documentation
+
+
+_CapsuleType = getattr(_types, "CapsuleType", None)
+
+if _CapsuleType is None:
+ try:
+ import _socket
+ except ImportError:
+ pass
+ else:
+ _CAPI = getattr(_socket, "CAPI", None)
+ if _CAPI is not None:
+ _CapsuleType = type(_CAPI)
+
+if _CapsuleType is not None:
+ CapsuleType = _CapsuleType
+ __all__.append("CapsuleType")
+
+
+# Aliases for items that have always been in typing.
+# Explicitly assign these (rather than using `from typing import *` at the top),
+# so that we get a CI error if one of these is deleted from typing.py
+# in a future version of Python
+AbstractSet = typing.AbstractSet
+AnyStr = typing.AnyStr
+BinaryIO = typing.BinaryIO
+Callable = typing.Callable
+Collection = typing.Collection
+Container = typing.Container
+Dict = typing.Dict
+ForwardRef = typing.ForwardRef
+FrozenSet = typing.FrozenSet
+Generic = typing.Generic
+Hashable = typing.Hashable
+IO = typing.IO
+ItemsView = typing.ItemsView
+Iterable = typing.Iterable
+Iterator = typing.Iterator
+KeysView = typing.KeysView
+List = typing.List
+Mapping = typing.Mapping
+MappingView = typing.MappingView
+Match = typing.Match
+MutableMapping = typing.MutableMapping
+MutableSequence = typing.MutableSequence
+MutableSet = typing.MutableSet
+Optional = typing.Optional
+Pattern = typing.Pattern
+Reversible = typing.Reversible
+Sequence = typing.Sequence
+Set = typing.Set
+Sized = typing.Sized
+TextIO = typing.TextIO
+Tuple = typing.Tuple
+Union = typing.Union
+ValuesView = typing.ValuesView
+cast = typing.cast
+no_type_check = typing.no_type_check
+no_type_check_decorator = typing.no_type_check_decorator
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000..7242d3b
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1 @@
+ruff==0.4.5
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..5be7adb
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,7 @@
+[tox]
+isolated_build = True
+envlist = py38, py39, py310, py311, py312, py313
+
+[testenv]
+changedir = src
+commands = python -m unittest discover